repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/ts-bookkeeping/build/message_parser.rs | utils/ts-bookkeeping/build/message_parser.rs | use std::default::Default;
use itertools::Itertools;
use t4rust_derive::Template;
use tsproto_structs::messages::*;
use tsproto_structs::{indent, messages, InnerRustType, RustType};
#[derive(Template)]
#[TemplatePath = "build/MessageDeclarations.tt"]
#[derive(Debug, Clone)]
pub struct MessageDeclarations<'a>(pub &'a messages::MessageDeclarations);
impl MessageDeclarations<'static> {
pub fn s2c() -> messages::MessageDeclarations {
let mut res = DATA.clone();
res.msg_group.retain(|g| g.default.s2c);
res
}
pub fn c2s() -> messages::MessageDeclarations {
let mut res = DATA.clone();
res.msg_group.retain(|g| g.default.c2s);
res
}
}
impl Default for MessageDeclarations<'static> {
fn default() -> Self { MessageDeclarations(&DATA) }
}
pub fn generate_deserializer(field: &Field) -> String {
let rust_type = field.get_type("").unwrap();
if let InnerRustType::Vec(inner) = rust_type.inner {
vector_value_deserializer(field, (*inner).into())
} else {
single_value_deserializer(field, &rust_type.to_string())
}
}
pub fn single_value_deserializer(field: &Field, rust_type: &str) -> String {
let res = match rust_type {
"i8" | "u8" | "i16" | "u16" | "i32" | "u32" | "i64" | "u64" => format!(
"val.parse().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?",
field.pretty
),
"f32" | "f64" => format!(
"val.parse().map_err(|e| ParseError::ParseFloat {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?",
field.pretty
),
"bool" => format!(
"match val {{ \"0\" => false, \"1\" => true, _ => return Err(ParseError::ParseBool {{
arg: \"{}\",
value: val.to_string(),
}}), }}",
field.pretty
),
"UidBuf" => "UidBuf(if let Ok(uid) = BASE64_STANDARD.decode(val) { uid } else { \
val.as_bytes().to_vec() })"
.into(),
"&str" => "val".into(),
"String" => "val.to_string()".into(),
"IconId" => format!(
"IconId(if val.starts_with('-') {{
val.parse::<i32>().map(|i| i as u32)
}} else {{
val.parse::<u64>().map(|i| i as u32)
}}.map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?)",
field.pretty
),
"ClientId" | "ClientDbId" | "ChannelId" | "ServerGroupId" | "ChannelGroupId" => format!(
"{}(val.parse().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?)",
rust_type, field.pretty
),
"IpAddr" | "SocketAddr" => format!(
"val.parse().map_err(|e| ParseError::ParseAddr {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?",
field.pretty
),
"ClientType" => format!(
"match val {{
\"0\" => ClientType::Normal,
\"1\" => ClientType::Query {{ admin: false }},
_ => return Err(ParseError::InvalidValue {{
arg: \"{}\",
value: val.to_string(),
}}),
}}",
field.pretty
),
"TextMessageTargetMode"
| "HostMessageMode"
| "HostBannerMode"
| "LicenseType"
| "LogLevel"
| "Codec"
| "CodecEncryptionMode"
| "Reason"
| "GroupNamingMode"
| "GroupType"
| "Permission"
| "PermissionType"
| "TokenType"
| "PluginTargetMode"
| "Error" => format!(
"{}::from_u32(val.parse().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?).ok_or(ParseError::InvalidValue {{
arg: \"{1}\",
value: val.to_string(),
}})?",
rust_type, field.pretty
),
"ChannelPermissionHint" | "ClientPermissionHint" => format!(
"{}::from_bits(val.parse().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?).ok_or(ParseError::InvalidValue {{
arg: \"{1}\",
value: val.to_string(),
}})?",
rust_type, field.pretty
),
"Duration" => {
if field.type_s == "DurationSeconds" {
format!(
"let val = val.parse::<i64>().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?;
if val.checked_mul(1000).is_some() {{ Duration::seconds(val) }}
else {{ return Err(ParseError::InvalidValue {{
arg: \"{0}\",
value: val.to_string(),
}}); }}",
field.pretty
)
} else if field.type_s == "DurationMilliseconds" {
format!(
"Duration::milliseconds(val.parse::<i64>().map_err(|e| ParseError::ParseInt \
{{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?)",
field.pretty
)
} else if field.type_s == "DurationMillisecondsFloat" {
format!(
"Duration::microseconds((1000.0 * val.parse::<f32>().map_err(|e| \
ParseError::ParseFloat {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?) as i64)",
field.pretty
)
} else {
panic!("Unknown original time type {} found.", field.type_s);
}
}
"OffsetDateTime" => format!(
"OffsetDateTime::from_unix_timestamp(
val.parse().map_err(|e| ParseError::ParseInt {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?).map_err(|e| ParseError::ParseDate {{
arg: \"{}\",
value: val.to_string(),
source: e,
}})?",
field.pretty, field.pretty
),
_ => panic!("Unknown type '{}' when trying to deserialize {:?}", rust_type, field),
};
if res.contains('\n') { indent(&res, 2) } else { res }
}
pub fn vector_value_deserializer(field: &Field, rust_type: RustType) -> String {
format!(
"val.split(',')
.filter_map(|val| {{
let val = val.trim();
if val.is_empty() {{
None
}} else {{
Some(val)
}}
}}).map(|val| {{
let val = val.trim();
Ok({})
}}).collect::<Result<Vec<{}>>>()?",
single_value_deserializer(field, &rust_type.to_string()),
rust_type,
)
}
pub fn generate_serializer(field: &Field, name: &str) -> String {
let rust_type = field.get_type("").unwrap();
if let InnerRustType::Vec(inner) = rust_type.inner {
let inner_type: RustType = (*inner).into();
vector_value_serializer(field, &inner_type.to_string(), name)
} else {
single_value_serializer(field, &rust_type.to_string(), name, false)
}
}
pub fn single_value_serializer(field: &Field, rust_type: &str, name: &str, is_ref: bool) -> String {
let ref_amp = if is_ref { "" } else { "&" };
let ref_star = if is_ref { "*" } else { "" };
match rust_type {
"i8" | "u8" | "i16" | "u16" | "i32" | "u32" | "i64" | "u64" | "f32" | "f64" | "String"
| "IpAddr" | "SocketAddr" => format!("{}{}", ref_amp, name),
"bool" => format!("if {}{} {{ &\"1\" }} else {{ &\"0\" }}", ref_star, name),
"&str" => name.to_string(),
"UidBuf" | "&Uid" => format!(
"&if {0}.is_server_admin() {{ Cow::Borrowed(\"ServerAdmin\") }}
else {{ Cow::<str>::Owned(BASE64_STANDARD.encode(&{0}.0)) }}",
name,
),
"ClientId" | "ClientDbId" | "ChannelId" | "ServerGroupId" | "ChannelGroupId" | "IconId" => {
format!("&{}.0", name)
}
"ClientType" => format!(
"match {} {{
ClientType::Normal => &\"0\",
ClientType::Query {{ .. }} => &\"1\",
}}",
name
),
"TextMessageTargetMode"
| "HostMessageMode"
| "HostBannerMode"
| "LicenseType"
| "LogLevel"
| "Codec"
| "CodecEncryptionMode"
| "Reason"
| "GroupNamingMode"
| "GroupType"
| "Permission"
| "PermissionType"
| "TokenType"
| "PluginTargetMode"
| "Error" => format!("&{}.to_u32().unwrap()", name),
"ChannelPermissionHint" | "ClientPermissionHint" => format!("&{}.bits()", name),
"Duration" => {
if field.type_s == "DurationSeconds" {
format!("&{}.whole_seconds()", name)
} else if field.type_s == "DurationMilliseconds" {
format!("&{}.whole_milliseconds()", name)
} else if field.type_s == "DurationMillisecondsFloat" {
format!("&({}.whole_microseconds() as f32 / 1000.0)", name)
} else {
panic!("Unknown original time type {} found.", field.type_s);
}
}
"OffsetDateTime" => format!("&{}.unix_timestamp()", name),
_ => panic!("Unknown type '{}'", rust_type),
}
}
pub fn vector_value_serializer(field: &Field, inner_type: &str, name: &str) -> String {
// TODO Vector serialization creates an intermediate string which is not necessary
format!(
"&{{ let mut s = String::new();
for val in {}.as_ref() {{
if !s.is_empty() {{ s += \",\" }}
write!(&mut s, \"{{}}\", {}).unwrap();
}}
s
}}",
name,
single_value_serializer(field, inner_type, "val", true)
)
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/ts-bookkeeping/build/build.rs | utils/ts-bookkeeping/build/build.rs | use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
mod book_parser;
mod book_to_messages_parser;
mod events;
mod message_parser;
mod messages_to_book_parser;
mod properties;
use crate::book_parser::BookDeclarations;
use crate::book_to_messages_parser::BookToMessagesDeclarations;
use crate::events::EventDeclarations;
use crate::message_parser::MessageDeclarations;
use crate::messages_to_book_parser::MessagesToBookDeclarations;
use crate::properties::Properties;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
// Book declarations
let path = Path::new(&out_dir);
let mut structs = File::create(path.join("structs.rs")).unwrap();
write!(&mut structs, "{}", BookDeclarations::default()).unwrap();
// Messages to book
let mut structs = File::create(path.join("m2bdecls.rs")).unwrap();
write!(&mut structs, "{}", MessagesToBookDeclarations::default()).unwrap();
// Write messages
let decls = MessageDeclarations::s2c();
let mut structs = File::create(path.join("s2c_messages.rs")).unwrap();
write!(&mut structs, "{}", MessageDeclarations(&decls)).unwrap();
let decls = MessageDeclarations::c2s();
let mut structs = File::create(path.join("c2s_messages.rs")).unwrap();
write!(&mut structs, "{}", MessageDeclarations(&decls)).unwrap();
// Book to messages
let mut structs = File::create(path.join("b2mdecls.rs")).unwrap();
write!(&mut structs, "{}", BookToMessagesDeclarations::default()).unwrap();
// Events
let mut structs = File::create(path.join("events.rs")).unwrap();
write!(&mut structs, "{}", EventDeclarations::default()).unwrap();
// Properties
let mut structs = File::create(path.join("properties.rs")).unwrap();
write!(&mut structs, "{}", Properties::default()).unwrap();
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/ts-bookkeeping/build/events.rs | utils/ts-bookkeeping/build/events.rs | use std::collections::HashSet;
use std::default::Default;
use std::ops::Deref;
use t4rust_derive::Template;
use tsproto_structs::book::*;
#[derive(Template)]
#[TemplatePath = "build/Events.tt"]
#[derive(Debug)]
pub struct EventDeclarations<'a>(&'a BookDeclarations);
impl<'a> Deref for EventDeclarations<'a> {
type Target = BookDeclarations;
fn deref(&self) -> &Self::Target { self.0 }
}
impl Default for EventDeclarations<'static> {
fn default() -> Self { EventDeclarations(&DATA) }
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/ts-bookkeeping/build/book_parser.rs | utils/ts-bookkeeping/build/book_parser.rs | use std::default::Default;
use t4rust_derive::Template;
use tsproto_structs::*;
#[derive(Template)]
#[TemplatePath = "build/BookDeclarations.tt"]
#[derive(Debug)]
pub struct BookDeclarations<'a>(pub &'a book::BookDeclarations);
impl Default for BookDeclarations<'static> {
fn default() -> Self { BookDeclarations(&tsproto_structs::book::DATA) }
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/ts-bookkeeping/build/book_to_messages_parser.rs | utils/ts-bookkeeping/build/book_to_messages_parser.rs | use std::default::Default;
use std::ops::Deref;
use heck::*;
use t4rust_derive::Template;
use tsproto_structs::book_to_messages::*;
use tsproto_structs::messages::Field;
use tsproto_structs::*;
#[derive(Template)]
#[TemplatePath = "build/BookToMessages.tt"]
#[derive(Debug)]
pub struct BookToMessagesDeclarations<'a>(&'a book_to_messages::BookToMessagesDeclarations<'a>);
impl<'a> Deref for BookToMessagesDeclarations<'a> {
type Target = book_to_messages::BookToMessagesDeclarations<'a>;
fn deref(&self) -> &Self::Target { self.0 }
}
impl Default for BookToMessagesDeclarations<'static> {
fn default() -> Self { BookToMessagesDeclarations(&DATA) }
}
fn get_to_list(to: &[&Field]) -> String {
let mut res = String::new();
if to.len() > 1 {
res.push('(');
}
let mut first = true;
for t in to {
if !first {
res.push_str(", ");
} else {
first = false;
}
res.push_str(&t.get_rust_name());
}
if to.len() > 1 {
res.push(')');
}
res
}
/// The prefix is written before from, if from is a mapped argument
fn rule_has_to(r: &RuleKind, field: &Field) -> bool {
match r {
RuleKind::Map { to, .. } | RuleKind::ArgumentMap { to, .. } => to == &field,
RuleKind::ArgumentFunction { to, .. } | RuleKind::Function { to, .. } => {
to.contains(&field)
}
}
}
/// Finds a matching rule in either the event ids or the given rule.
fn find_rule<'a>(
e: &'a Event, r: Option<&'a RuleKind>, field: &Field,
) -> (bool, Option<&'a RuleKind<'a>>) {
if let Some(r) = r {
if rule_has_to(r, field) {
return (true, Some(r));
}
}
for r in &e.ids {
if rule_has_to(r, field) {
return (false, Some(r));
}
}
(false, None)
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/errors.rs | utils/tsproto-structs/src/errors.rs | use std::result::Result;
use crate::*;
use once_cell::sync::Lazy;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Errors.csv"));
pub static DATA: Lazy<Errors> = Lazy::new(|| {
Errors(
csv::Reader::from_reader(DATA_STR.as_bytes())
.deserialize()
.collect::<Result<Vec<_>, _>>()
.unwrap(),
)
});
#[derive(Default, Debug)]
pub struct Errors(pub Vec<EnumValue>);
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/messages.rs | utils/tsproto-structs/src/messages.rs | use heck::*;
use once_cell::sync::Lazy;
use serde::Deserialize;
use crate::*;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Messages.toml"));
pub static DATA: Lazy<MessageDeclarations> = Lazy::new(|| toml::from_str(DATA_STR).unwrap());
#[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct MessageDeclarations {
pub fields: Vec<Field>,
pub msg_group: Vec<MessageGroup>,
}
impl MessageDeclarations {
pub fn get_message_opt(&self, name: &str) -> Option<&Message> {
self.msg_group.iter().flat_map(|g| g.msg.iter()).find(|m| m.name == name)
}
pub fn get_message(&self, name: &str) -> &Message {
self.get_message_opt(name).unwrap_or_else(|| panic!("Cannot find message {}", name))
}
pub fn get_message_group(&self, msg: &Message) -> &MessageGroup {
for g in &self.msg_group {
for m in &g.msg {
if std::ptr::eq(m, msg) {
return g;
}
}
}
panic!("Cannot find message group for message");
}
pub fn get_field(&self, mut map: &str) -> &Field {
if map.ends_with('?') {
map = &map[..map.len() - 1];
}
if let Some(f) = self.fields.iter().find(|f| f.map == map) {
f
} else {
panic!("Cannot find field {}", map);
}
}
pub fn uses_lifetime(&self, msg: &Message) -> bool {
for a in &msg.attributes {
let field = self.get_field(a);
if field.get_type(a).unwrap().to_ref(true).uses_lifetime() {
return true;
}
}
false
}
}
#[derive(Deserialize, Debug, Clone, Eq, Hash, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct Field {
/// Internal name of this declarations file to map fields to messages.
pub map: String,
/// The name as called by TeamSpeak in messages.
pub ts: String,
/// The pretty name in PascalCase. This will be used for the fields in rust.
pub pretty: String,
#[serde(rename = "type")]
pub type_s: String,
#[serde(rename = "mod")]
pub modifier: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct MessageGroup {
pub default: MessageGroupDefaults,
pub msg: Vec<Message>,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct MessageGroupDefaults {
pub s2c: bool,
pub c2s: bool,
pub response: bool,
pub low: bool,
pub np: bool,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields)]
pub struct Message {
/// How we call this message.
pub name: String,
/// How TeamSpeak calls this message.
pub notify: Option<String>,
pub attributes: Vec<String>,
}
impl Field {
pub fn get_rust_name(&self) -> String { self.pretty.to_snake_case() }
/// Takes the attribute to look if it is optional
pub fn get_type(&self, a: &str) -> Result<RustType> {
RustType::with(&self.type_s, a.ends_with('?'), None, false, self.is_array())
}
/// Returns if this field is optional in the message.
pub fn is_opt(&self, msg: &Message) -> bool { !msg.attributes.iter().any(|a| *a == self.map) }
pub fn is_array(&self) -> bool { self.modifier.as_ref().map(|s| s == "array").unwrap_or(false) }
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/messages_to_book.rs | utils/tsproto-structs/src/messages_to_book.rs | use std::collections::HashSet;
use std::fmt::Write;
use std::str::FromStr;
use once_cell::sync::Lazy;
use serde::Deserialize;
use crate::book::{BookDeclarations, Property, Struct};
use crate::messages::{Field, Message, MessageDeclarations};
use crate::*;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/MessagesToBook.toml"));
pub static DATA: Lazy<MessagesToBookDeclarations<'static>> = Lazy::new(|| {
let rules: TomlStruct = toml::from_str(DATA_STR).unwrap();
let book = &book::DATA;
let messages = &messages::DATA;
let mut decls: Vec<_> = rules
.rule
.into_iter()
.map(|r| {
let msg = messages.get_message(&r.from);
let msg_fields =
msg.attributes.iter().map(|a| messages.get_field(a)).collect::<Vec<_>>();
let book_struct = book
.structs
.iter()
.find(|s| s.name == r.to)
.unwrap_or_else(|| panic!("Cannot find struct {}", r.to));
let mut ev = Event {
op: r.operation.parse().expect("Failed to parse operation"),
id: r.id.iter().map(|s| find_field(s, &msg_fields)).collect(),
msg,
book_struct,
rules: r
.properties
.into_iter()
.map(|p| {
assert!(p.is_valid());
let find_prop = |name, book_struct: &'static Struct| -> &'static Property {
if let Some(prop) =
book_struct.properties.iter().find(|p| p.name == name)
{
return prop;
}
panic!(
"No such (nested) property {} found in struct {}",
name, book_struct.name,
);
};
if p.function.is_some() {
RuleKind::Function {
name: p.function.unwrap(),
to: p
.tolist
.unwrap()
.into_iter()
.map(|p| find_prop(p, book_struct))
.collect(),
}
} else {
RuleKind::Map {
from: find_field(&p.from.unwrap(), &msg_fields),
to: find_prop(p.to.unwrap(), book_struct),
op: p
.operation
.map(|s| s.parse().expect("Invalid operation for property"))
.unwrap_or(RuleOp::Update),
}
}
})
.collect(),
};
// Add attributes with the same name automatically (if they are not
// yet there).
let used_flds = ev
.rules
.iter()
.filter_map(|f| match *f {
RuleKind::Map { from, .. } => Some(from),
_ => None,
})
.collect::<HashSet<_>>();
let mut used_props = vec![];
for rule in &ev.rules {
if let RuleKind::Function { to, .. } = rule {
for p in to {
used_props.push(p.name.clone());
}
}
}
for fld in &msg_fields {
if used_flds.contains(fld) {
continue;
}
if let Some(prop) = book
.get_struct(&ev.book_struct.name)
.properties
.iter()
.find(|p| p.name == fld.pretty)
{
if used_props.contains(&prop.name) {
continue;
}
ev.rules.push(RuleKind::Map { from: fld, to: prop, op: RuleOp::Update });
}
}
ev
})
.collect();
// InitServer is done manually
decls.retain(|ev| ev.msg.name != "InitServer");
MessagesToBookDeclarations { book, messages, decls }
});
#[derive(Debug)]
pub struct MessagesToBookDeclarations<'a> {
pub book: &'a BookDeclarations,
pub messages: &'a MessageDeclarations,
pub decls: Vec<Event<'a>>,
}
#[derive(Debug)]
pub struct Event<'a> {
pub op: RuleOp,
/// Unique access tuple to get the property
pub id: Vec<&'a Field>,
pub msg: &'a Message,
pub book_struct: &'a Struct,
pub rules: Vec<RuleKind<'a>>,
}
#[derive(Debug)]
pub enum RuleKind<'a> {
Map { from: &'a Field, to: &'a Property, op: RuleOp },
Function { name: String, to: Vec<&'a Property> },
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum RuleOp {
Add,
Remove,
Update,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct TomlStruct {
rule: Vec<Rule>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct Rule {
id: Vec<String>,
from: String,
to: String,
operation: String,
#[serde(default = "Vec::new")]
properties: Vec<RuleProperty>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct RuleProperty {
from: Option<String>,
to: Option<String>,
operation: Option<String>,
function: Option<String>,
tolist: Option<Vec<String>>,
}
impl Event<'_> {
/// Fill the id of a `PropertyId`.
///
/// `msg` is the name of the message object.
pub fn get_id_args(&self, msg: &str) -> String {
let mut res = String::new();
for f in &self.id {
if !res.is_empty() {
res.push_str(", ");
}
if !f.get_type("").unwrap().is_primitive() {
res.push('&');
}
let _ = write!(res, "{}.{}", msg, f.get_rust_name());
}
res
}
/// Create a `PropertyId` from a message struct.
///
/// `msg` is the name of the message object.
pub fn get_property_id(&self, p: &Property, from: &Field, msg: &str) -> String {
let mut ids = self.get_id_args(msg);
if let Some(m) = &p.modifier {
if !ids.is_empty() {
ids.push_str(", ");
}
if m == "map" || m == "array" || m == "set" {
let _ = write!(ids, "{}.{}", msg, from.get_rust_name());
} else {
panic!("Unknown modifier {}", m);
}
}
format!("PropertyId::{}{}{}", self.book_struct.name, p.get_name(), embrace(&ids))
}
}
impl RuleProperty {
fn is_valid(&self) -> bool {
if self.from.is_some() {
self.to.is_some() && self.function.is_none() && self.tolist.is_none()
} else {
self.from.is_none()
&& self.to.is_none()
&& self.operation.is_none()
&& self.function.is_some()
&& self.tolist.is_some()
}
}
}
impl FromStr for RuleOp {
type Err = fmt::Error;
fn from_str(s: &str) -> Result<Self> {
if s == "add" {
Ok(RuleOp::Add)
} else if s == "remove" {
Ok(RuleOp::Remove)
} else if s == "update" {
Ok(RuleOp::Update)
} else {
eprintln!("Cannot parse operation, needs to be add, remove or update");
Err(fmt::Error)
}
}
}
// the in rust callable name (in PascalCase) from the field
fn find_field<'a>(name: &str, msg_fields: &[&'a Field]) -> &'a Field {
msg_fields
.iter()
.find(|f| f.pretty == name)
.unwrap_or_else(|| panic!("Cannot find field '{}'", name))
}
impl<'a> RuleKind<'a> {
pub fn is_function(&self) -> bool { matches!(self, RuleKind::Function { .. }) }
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/lib.rs | utils/tsproto-structs/src/lib.rs | //! `tsproto-structs` contains machine readable data for several TeamSpeak related topics.
//!
//! The underlying data files can be found in the
//! [tsdeclarations](https://github.com/ReSpeak/tsdeclarations) repository.
use std::fmt;
use std::str::FromStr;
use heck::*;
use serde::Deserialize;
type Result<T> = std::result::Result<T, fmt::Error>;
pub mod book;
pub mod book_to_messages;
pub mod enums;
pub mod errors;
pub mod messages;
pub mod messages_to_book;
pub mod permissions;
pub mod versions;
#[derive(Debug, Deserialize)]
pub struct EnumValue {
pub name: String,
pub doc: String,
pub num: String,
}
#[derive(Clone, Debug)]
pub enum InnerRustType {
Primitive(String),
Struct(String),
Ref(Box<InnerRustType>),
Option(Box<InnerRustType>),
/// Key, value
Map(Box<InnerRustType>, Box<InnerRustType>),
Set(Box<InnerRustType>),
Vec(Box<InnerRustType>),
Cow(Box<InnerRustType>),
}
#[derive(Clone, Debug)]
pub struct RustType {
pub inner: InnerRustType,
/// Include a lifetime specifier 'a
pub lifetime: bool,
}
impl InnerRustType {
/// `lifetime`: Include 'a for references
/// `is_ref`: If this is a refercene, used to emit either `str` or `String` or slices
pub fn fmt(&self, f: &mut fmt::Formatter, lifetime: bool, is_ref: bool) -> fmt::Result {
let lifetime_str = if lifetime { "'a " } else { "" };
match self {
Self::Struct(s) if s == "str" => {
if is_ref {
write!(f, "str")?;
} else {
write!(f, "String")?;
}
}
Self::Struct(s) if s == "Uid" => {
write!(f, "{}", s)?;
}
Self::Primitive(s) | Self::Struct(s) => write!(f, "{}", s)?,
Self::Ref(i) => {
write!(f, "&{}", lifetime_str)?;
i.fmt(f, lifetime, true)?;
}
Self::Option(i) => {
write!(f, "Option<")?;
i.fmt(f, lifetime, false)?;
write!(f, ">")?;
}
Self::Map(k, v) => {
write!(f, "HashMap<")?;
k.fmt(f, lifetime, false)?;
write!(f, ", ")?;
v.fmt(f, lifetime, false)?;
write!(f, ">")?;
}
Self::Set(i) => {
write!(f, "HashSet<")?;
i.fmt(f, lifetime, false)?;
write!(f, ">")?;
}
Self::Vec(i) => {
if is_ref {
write!(f, "[")?;
i.fmt(f, lifetime, false)?;
write!(f, "]")?;
} else {
write!(f, "Vec<")?;
i.fmt(f, lifetime, false)?;
write!(f, ">")?;
}
}
Self::Cow(i) => {
write!(f, "Cow<{}, ", lifetime_str.trim())?;
i.fmt(f, false, true)?;
write!(f, ">")?;
}
}
Ok(())
}
/// Returns a converted type.
pub fn to_ref(&self) -> Self {
match self {
Self::Struct(s) if s == "UidBuf" => Self::Ref(Box::new(Self::Struct("Uid".into()))),
Self::Struct(s) if s == "String" => Self::Ref(Box::new(Self::Struct("str".into()))),
Self::Struct(_) | Self::Map(_, _) | Self::Set(_) | Self::Vec(_) => {
Self::Ref(Box::new(self.clone()))
}
Self::Primitive(_) | Self::Ref(_) | Self::Cow(_) => self.clone(),
Self::Option(i) => Self::Option(Box::new(i.to_ref())),
}
}
/// Returns a converted type.
pub fn to_cow(&self) -> Self {
match self {
Self::Struct(s) if s == "UidBuf" => Self::Cow(Box::new(Self::Struct("Uid".into()))),
Self::Struct(s) if s == "String" => Self::Cow(Box::new(Self::Struct("str".into()))),
Self::Struct(_) | Self::Map(_, _) | Self::Set(_) | Self::Vec(_) => {
Self::Cow(Box::new(self.clone()))
}
Self::Primitive(_) | Self::Ref(_) | Self::Cow(_) => self.clone(),
Self::Option(i) => Self::Option(Box::new(i.to_cow())),
}
}
/// Get code snippet for `as_ref`.
pub fn code_as_ref(&self, name: &str) -> String {
match self {
Self::Struct(s) if s == "UidBuf" || s == "str" => format!("{}.as_ref()", name),
Self::Struct(s) if s == "String" => format!("{}.as_str()", name),
Self::Struct(s) if s == "str" => name.into(),
Self::Struct(_) => format!("&{}", name),
Self::Map(_, _) | Self::Set(_) | Self::Vec(_) | Self::Cow(_) => {
format!("{}.as_ref()", name)
}
Self::Primitive(_) => name.into(),
Self::Ref(i) => {
let inner = i.code_as_ref(name);
if inner == name {
format!("*{}", name)
} else if inner.starts_with('&') && &inner[1..] == name {
name.into()
} else {
inner
}
}
Self::Option(i) => {
match &**i {
// Shortcut
Self::Struct(s) if s == "String" => format!("{}.as_deref()", name),
_ => {
let inner = i.code_as_ref(name);
if inner == name {
inner
} else if inner.starts_with('&') && &inner[1..] == name {
format!("{0}.as_ref()", name)
} else {
format!("{0}.as_ref().map(|{0}| {1})", name, inner)
}
}
}
}
}
}
pub fn uses_lifetime(&self) -> bool {
match self {
Self::Struct(s) if s == "Uid" => true,
Self::Ref(_) | Self::Cow(_) => true,
Self::Map(_, i) | Self::Set(i) | Self::Vec(i) | Self::Option(i) => i.uses_lifetime(),
_ => false,
}
}
}
impl FromStr for InnerRustType {
type Err = fmt::Error;
fn from_str(s: &str) -> Result<Self> {
if s == "DateTime" {
Ok(Self::Primitive("OffsetDateTime".into()))
} else if s.starts_with("Duration") {
Ok(Self::Primitive("Duration".into()))
} else if s == "PermissionId" {
Ok(Self::Primitive("Permission".into()))
} else if s == "Ts3ErrorCode" {
Ok(Self::Primitive("Error".into()))
} else if s == "bool"
|| s.starts_with('i')
|| s.starts_with('u')
|| s.starts_with('f')
|| s.ends_with("Id")
|| s.ends_with("Type")
|| s.ends_with("Mode")
|| s == "ChannelPermissionHint"
|| s == "ClientPermissionHint"
|| s == "Codec"
|| s == "LogLevel"
|| s == "MaxClients"
|| s == "IpAddr"
|| s == "Reason"
|| s == "SocketAddr"
{
Ok(Self::Primitive(s.into()))
} else if s == "Uid" {
Ok(Self::Struct("UidBuf".into()))
} else if s == "str" || s == "String" {
Ok(Self::Struct("String".into()))
} else if let Some(rest) = s.strip_prefix('&') {
let rest = if rest.starts_with('\'') {
let i = rest.find(' ').ok_or_else(|| {
eprintln!("Reference type with lifetime has no inner type: {:?}", s);
fmt::Error
})?;
&rest[i + 1..]
} else {
rest
};
Ok(Self::Ref(Box::new(rest.parse()?)))
} else if let Some(rest) = s.strip_suffix('?') {
Ok(Self::Option(Box::new(rest.parse()?)))
} else if s.starts_with("Option<") {
let rest = &s[7..s.len() - 1];
Ok(Self::Option(Box::new(rest.parse()?)))
} else if let Some(rest) = s.strip_suffix("[]") {
Ok(Self::Vec(Box::new(rest.parse()?)))
} else if s.starts_with("HashMap<") {
let rest = &s[8..s.len() - 1];
let i = rest.find(',').ok_or_else(|| {
eprintln!("HashMap without key: {:?}", s);
fmt::Error
})?;
Ok(Self::Map(Box::new(rest[..i].parse()?), Box::new(rest[i..].trim().parse()?)))
} else if s.starts_with("HashSet<") {
let rest = &s[8..s.len() - 1];
Ok(Self::Set(Box::new(rest.parse()?)))
} else if s.starts_with("Vec<") {
let rest = &s[4..s.len() - 1];
Ok(Self::Vec(Box::new(rest.parse()?)))
} else if s.starts_with('[') {
let rest = &s[1..s.len() - 1];
if rest.contains(';') {
// Slice with explicit length, take as struct
Ok(Self::Struct(s.into()))
} else {
Ok(Self::Vec(Box::new(rest.parse()?)))
}
} else if let Some(rest) = s.strip_suffix('T') {
rest.parse()
} else {
Ok(Self::Struct(s.into()))
}
}
}
impl FromStr for RustType {
type Err = fmt::Error;
fn from_str(s: &str) -> Result<Self> {
Ok(Self { inner: s.parse()?, lifetime: s.contains("&'") })
}
}
impl RustType {
pub fn with_opt(s: &str, opt: bool) -> Result<Self> {
let inner = s.parse()?;
let inner = if opt { InnerRustType::Option(Box::new(inner)) } else { inner };
Ok(Self { inner, lifetime: s.contains("&'") })
}
/// `map` has a key
pub fn with(s: &str, opt: bool, map: Option<&str>, set: bool, vec: bool) -> Result<Self> {
assert!(
[map.is_some(), set, vec].iter().filter(|b| **b).count() <= 1,
"Too many modifiers active (map: {:?}, set: {:?}, vec: {:?})",
map,
set,
vec
);
let mut inner = s.parse()?;
if let Some(key) = map {
inner = InnerRustType::Map(Box::new(key.parse()?), Box::new(inner));
}
if set {
inner = InnerRustType::Set(Box::new(inner));
}
if vec {
inner = InnerRustType::Vec(Box::new(inner));
}
inner = if opt { InnerRustType::Option(Box::new(inner)) } else { inner };
Ok(Self { inner, lifetime: s.contains("&'") })
}
pub fn to_opt(&self, opt: bool) -> Self {
if opt {
Self {
inner: InnerRustType::Option(Box::new(self.inner.clone())),
lifetime: self.lifetime,
}
} else {
self.clone()
}
}
pub fn to_ref(&self, as_ref: bool) -> Self {
if as_ref {
Self { inner: self.inner.to_ref(), lifetime: self.lifetime }
} else {
self.clone()
}
}
pub fn to_cow(&self) -> Self { Self { inner: self.inner.to_cow(), lifetime: self.lifetime } }
pub fn lifetime(&self, lifetime: bool) -> Self {
let mut r = self.clone();
r.lifetime = lifetime;
r
}
pub fn wrap_ref(&self) -> Self {
Self { inner: InnerRustType::Ref(Box::new(self.inner.clone())), lifetime: self.lifetime }
}
pub fn wrap_opt(&self) -> Self {
Self { inner: InnerRustType::Option(Box::new(self.inner.clone())), lifetime: self.lifetime }
}
pub fn is_opt(&self) -> bool { matches!(self.inner, InnerRustType::Option(_)) }
pub fn is_primitive(&self) -> bool {
let inner = if let InnerRustType::Option(t) = &self.inner { t } else { &self.inner };
matches!(inner, InnerRustType::Primitive(_))
}
pub fn is_vec(&self) -> bool { matches!(self.inner, InnerRustType::Vec(_)) }
pub fn is_cow(&self) -> bool {
let inner = if let InnerRustType::Option(i) = &self.inner { i } else { &self.inner };
matches!(inner, InnerRustType::Cow(_))
}
pub fn uses_lifetime(&self) -> bool { self.inner.uses_lifetime() }
/// Returns an identifier from this type in camelCase.
pub fn to_name(&self) -> String {
self.to_string().replace('<', "_").replace('>', "").to_upper_camel_case()
}
/// Get code snippet for `as_ref`.
pub fn code_as_ref(&self, name: &str) -> String { self.inner.code_as_ref(name) }
}
impl From<InnerRustType> for RustType {
fn from(inner: InnerRustType) -> Self { Self { inner, lifetime: false } }
}
impl fmt::Display for RustType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f, self.lifetime, false) }
}
fn get_false() -> bool { false }
/// Prepend `/// ` to each line of a string.
pub fn doc_comment(s: &str) -> String { s.lines().map(|l| format!("/// {}\n", l)).collect() }
/// Indent a string by a given count using tabs.
pub fn indent<S: AsRef<str>>(s: S, count: usize) -> String {
let sref = s.as_ref();
let line_count = sref.lines().count();
let mut result = String::with_capacity(sref.len() + line_count * count * 4);
for l in sref.lines() {
if !l.is_empty() {
result.push_str(&"\t".repeat(count));
}
result.push_str(l);
result.push('\n');
}
result
}
/// Unindent a string by a given count of tabs.
pub fn unindent(s: &mut String) {
std::mem::swap(&mut s.replace("\n\t", "\n"), s);
if s.get(0..1) == Some("\t") {
s.remove(0);
}
}
/// Returns an empty string if `s` is empty, otherwise `s` with braces.
pub fn embrace(s: &str) -> String { if s.is_empty() { String::new() } else { format!("({})", s) } }
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/permissions.rs | utils/tsproto-structs/src/permissions.rs | use std::result::Result;
use crate::*;
use once_cell::sync::Lazy;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Permissions.csv"));
pub static DATA: Lazy<Permissions> = Lazy::new(|| {
Permissions(
csv::Reader::from_reader(DATA_STR.as_bytes())
.deserialize()
.collect::<Result<Vec<_>, _>>()
.unwrap(),
)
});
#[derive(Debug, Deserialize)]
pub struct EnumValue {
pub name: String,
pub doc: String,
}
#[derive(Default, Debug)]
pub struct Permissions(pub Vec<EnumValue>);
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/enums.rs | utils/tsproto-structs/src/enums.rs | use once_cell::sync::Lazy;
use serde::Deserialize;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Enums.toml"));
pub static DATA: Lazy<Enums> = Lazy::new(|| toml::from_str(DATA_STR).unwrap());
#[derive(Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct Enums {
#[serde(rename = "enum")]
pub enums: Vec<Enum>,
#[serde(rename = "bitflag")]
pub bitflags: Vec<Enum>,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct Enum {
pub name: String,
pub doc: Option<String>,
#[serde(rename = "type")]
pub use_type: Option<String>,
pub variants: Vec<Variant>,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct Variant {
pub name: String,
pub doc: String,
pub value: Option<u64>,
}
impl Enum {
/// Computes the needed number of bits to store all possible variants.
pub fn bitwidth(&self) -> u32 {
let mut leading_zeros = 0;
let mut value: u64 = 0;
for v in &self.variants {
if let Some(v) = v.value {
leading_zeros = std::cmp::min(leading_zeros, value.leading_zeros());
value = v;
} else {
value += 1;
}
}
leading_zeros = std::cmp::min(leading_zeros, value.leading_zeros());
(64 - leading_zeros).next_power_of_two()
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/versions.rs | utils/tsproto-structs/src/versions.rs | use std::collections::HashMap;
use std::fmt::Write;
use std::result::Result;
use crate::*;
use base64::prelude::*;
use once_cell::sync::Lazy;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Versions.csv"));
pub static DATA: Lazy<Versions> = Lazy::new(|| {
let mut table = csv::Reader::from_reader(DATA_STR.as_bytes());
let mut vs = Versions(table.deserialize().collect::<Result<Vec<_>, _>>().unwrap());
// Add count if necessary
let mut counts: HashMap<_, u32> = HashMap::new();
for v in &vs.0 {
let key = VersionKey::new(v);
*counts.entry(key).or_default() += 1;
}
counts.retain(|_, c| *c > 1);
for v in vs.0.iter_mut().rev() {
let key = VersionKey::new(v);
if let Some(count) = counts.get_mut(&key) {
v.count = *count;
*count -= 1;
}
}
vs
});
#[derive(Debug, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub struct Version {
pub version: String,
pub platform: String,
pub hash: String,
#[serde(default)]
count: u32,
}
impl Version {
pub fn get_enum_name(&self) -> String {
let mut res = String::new();
res.push_str(&self.platform.replace([' ', '.'], "_"));
let ver = self.version.split(' ').next().unwrap().replace('-', "_");
for num in ver.split('.') {
res.push('_');
if num != "?" {
res.push_str(num);
} else {
res.push('X');
}
}
if self.count != 0 {
res.push_str("__");
res.push_str(&self.count.to_string());
}
res
}
pub fn get_sign_array(&self) -> String {
let mut res = String::new();
for b in BASE64_STANDARD.decode(&self.hash).unwrap() {
if !res.is_empty() {
res.push_str(", ");
}
let _ = write!(res, "{:#x}", b);
}
res
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct VersionKey {
pub version: String,
pub platform: String,
}
impl VersionKey {
fn new(v: &Version) -> Self {
Self {
version: v.version.split(' ').next().unwrap().to_string(),
platform: v.platform.clone(),
}
}
}
#[derive(Default, Debug)]
pub struct Versions(pub Vec<Version>);
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/book.rs | utils/tsproto-structs/src/book.rs | use heck::*;
use once_cell::sync::Lazy;
use serde::Deserialize;
use crate::*;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/Book.toml"));
pub static DATA: Lazy<BookDeclarations> = Lazy::new(|| toml::from_str(DATA_STR).unwrap());
#[derive(Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct BookDeclarations {
#[serde(rename = "struct")]
pub structs: Vec<Struct>,
}
impl BookDeclarations {
pub fn get_struct(&self, name: &str) -> &Struct {
if let Some(s) = self.structs.iter().find(|s| s.name == name) {
s
} else {
panic!("Cannot find bookkeeping struct {}", name);
}
}
}
#[derive(Deserialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct Id {
#[serde(rename = "struct")]
pub struct_name: String,
pub prop: String,
}
impl Id {
pub fn find_property<'a>(&self, structs: &'a [Struct]) -> &'a Property {
// Find struct
for s in structs {
if s.name == self.struct_name {
// Find property
for p in &s.properties {
if p.name == self.prop {
return p;
}
}
}
}
panic!("Cannot find struct {} of id", self.struct_name);
}
}
#[derive(Deserialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct Struct {
pub name: String,
#[serde(default = "get_false")]
pub opt: bool,
pub id: Vec<Id>,
pub doc: String,
pub properties: Vec<Property>,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(deny_unknown_fields)]
pub struct Property {
/// The name of this property (in PascalCase) which can be called from rust
/// when generated.
pub name: String,
/// The rust declaration type.
#[serde(rename = "type")]
pub type_s: String,
pub doc: Option<String>,
#[serde(default = "get_false")]
pub opt: bool,
#[serde(rename = "mod")]
pub modifier: Option<String>,
pub key: Option<String>,
}
impl Struct {
pub fn get_type(&self) -> Result<RustType> { RustType::with_opt(&self.name, self.opt) }
pub fn get_ids(&self, structs: &[Struct]) -> String {
let mut res = String::new();
for id in &self.id {
let p = id.find_property(structs);
if !res.is_empty() {
res.push_str(", ");
}
res.push_str(&p.get_type().unwrap().to_string());
}
embrace(&res)
}
pub fn get_properties(&self, structs: &[Struct]) -> Vec<&Property> {
self.properties.iter().filter(|p| !structs.iter().any(|s| s.name == p.type_s)).collect()
}
/// Get all properties, including foreign ids (own ids are listed in properties).
pub fn get_all_properties(&self) -> impl Iterator<Item = PropId> {
self.id.iter()
// Only foreign ids, others are also stored in the properties
.filter_map(move |i| if i.struct_name != self.name { Some(PropId::from(i)) }
else { None })
.chain(self.properties.iter().map(|p| p.into()))
}
}
impl Property {
pub fn get_inner_type(&self) -> Result<RustType> { RustType::with_opt(&self.type_s, self.opt) }
pub fn get_type(&self) -> Result<RustType> {
let key = if self.is_map() {
Some(self.key.as_deref().ok_or_else(|| {
eprintln!("Specified map without key");
fmt::Error
})?)
} else {
None
};
RustType::with(&self.type_s, self.opt, key, self.is_set(), self.is_array())
}
/// Gets the type as a name, used for storing it in an enum.
pub fn get_inner_type_as_name(&self) -> Result<String> { Ok(self.get_inner_type()?.to_name()) }
pub fn get_ids(&self, structs: &[Struct], struc: &Struct) -> String {
let mut ids = struc.get_ids(structs);
if !ids.is_empty() {
ids.remove(0);
ids.pop();
}
if let Some(m) = &self.modifier {
if !ids.is_empty() {
ids.push_str(", ");
}
if m == "map" {
// The key is part of the id
ids.push_str(self.key.as_ref().unwrap());
} else if m == "array" || m == "set" {
// Take the element itself as part of the id.
// It has to be copied but most of the times it is an id itself.
ids.push_str(&self.get_inner_type().unwrap().to_string());
} else {
panic!("Unknown modifier {}", m);
}
}
embrace(&ids)
}
/// Get the name without trailing `s`.
pub fn get_name(&self) -> &str {
if self.modifier.is_some() && self.name.ends_with('s') {
&self.name[..self.name.len() - 1]
} else {
&self.name
}
}
pub fn is_array(&self) -> bool { self.modifier.as_ref().map(|s| s == "array").unwrap_or(false) }
pub fn is_set(&self) -> bool { self.modifier.as_ref().map(|s| s == "set").unwrap_or(false) }
pub fn is_map(&self) -> bool { self.modifier.as_ref().map(|s| s == "map").unwrap_or(false) }
}
pub enum PropId<'a> {
Prop(&'a Property),
Id(&'a Id),
}
impl<'a> PropId<'a> {
pub fn get_attr_name(&self, struc: &Struct) -> String {
match *self {
PropId::Prop(p) => p.name.to_snake_case(),
PropId::Id(id) => {
if struc.name == id.struct_name {
id.prop.to_snake_case()
} else {
format!("{}_{}", id.struct_name.to_snake_case(), id.prop.to_snake_case(),)
}
}
}
}
pub fn get_doc(&self) -> Option<&str> {
match *self {
PropId::Prop(p) => p.doc.as_deref(),
PropId::Id(_) => None,
}
}
pub fn get_type(&self, structs: &[Struct]) -> Result<RustType> {
match *self {
PropId::Prop(p) => p.get_type(),
PropId::Id(id) => id.find_property(structs).get_type(),
}
}
}
impl<'a> From<&'a Property> for PropId<'a> {
fn from(p: &'a Property) -> Self { PropId::Prop(p) }
}
impl<'a> From<&'a Id> for PropId<'a> {
fn from(p: &'a Id) -> Self { PropId::Id(p) }
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/utils/tsproto-structs/src/book_to_messages.rs | utils/tsproto-structs/src/book_to_messages.rs | use std::str::FromStr;
use heck::*;
use once_cell::sync::Lazy;
use serde::Deserialize;
use crate::book::{BookDeclarations, Property, Struct};
use crate::messages::{Field, Message, MessageDeclarations};
use crate::*;
pub const DATA_STR: &str =
include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/declarations/BookToMessages.toml"));
pub static DATA: Lazy<BookToMessagesDeclarations<'static>> = Lazy::new(|| {
let rules: TomlStruct = toml::from_str(DATA_STR).unwrap();
let book = &book::DATA;
let messages = &messages::DATA;
let decls: Vec<_> = rules
.rule
.into_iter()
.map(|r| {
let msg = messages.get_message(&r.to);
let msg_fields =
msg.attributes.iter().map(|a| messages.get_field(a)).collect::<Vec<_>>();
let book_struct = book
.structs
.iter()
.find(|s| s.name == r.from)
.unwrap_or_else(|| panic!("Cannot find struct {}", r.from));
let find_prop =
|name: &str, book_struct: &'static Struct| -> Option<&'static Property> {
book_struct.properties.iter().find(|p| p.name == *name)
};
// Map RuleProperty to RuleKind
let to_rule_kind = |p: RuleProperty| {
p.assert_valid();
if p.function.is_some() {
if p.type_s.is_some() {
RuleKind::ArgumentFunction {
type_s: p.type_s.unwrap(),
from: p.from.unwrap(),
name: p.function.unwrap(),
to: p
.tolist
.unwrap()
.into_iter()
.map(|p| find_field(&p, &msg_fields))
.collect(),
}
} else {
RuleKind::Function {
from: p.from.as_ref().map(|p| {
find_prop(p, book_struct).unwrap_or_else(|| {
panic!("No such (nested) property {} found in struct", p)
})
}),
name: p.function.unwrap(),
to: p
.tolist
.unwrap()
.into_iter()
.map(|p| find_field(&p, &msg_fields))
.collect(),
}
}
} else if let Some(prop) = find_prop(p.from.as_ref().unwrap(), book_struct) {
RuleKind::Map { from: prop, to: find_field(&p.to.unwrap(), &msg_fields) }
} else {
RuleKind::ArgumentMap {
from: p.from.unwrap(),
to: find_field(&p.to.unwrap(), &msg_fields),
}
}
};
let mut ev = Event {
op: r.operation.parse().expect("Failed to parse operation"),
ids: r.ids.into_iter().map(to_rule_kind).collect(),
msg,
book_struct,
rules: r.properties.into_iter().map(to_rule_kind).collect(),
};
// Add ids, which are required fields in the message.
// The filter checks that the message is not optional.
for field in msg_fields.iter().filter(|f| msg.attributes.iter().any(|a| *a == f.map)) {
if !ev.ids.iter().any(|i| match i {
RuleKind::Map { to, .. } => to == field,
RuleKind::ArgumentMap { to, .. } => to == field,
RuleKind::Function { to, .. } | RuleKind::ArgumentFunction { to, .. } => {
to.contains(field)
}
}) {
// Try to find matching property
if let Some(prop) = book
.get_struct(&ev.book_struct.name)
.properties
.iter()
.find(|p| !p.opt && p.name == field.pretty)
{
ev.ids.push(RuleKind::Map { from: prop, to: field })
}
// The property may be in the properties
}
}
// Add properties
for field in msg_fields.iter().filter(|f| !msg.attributes.iter().any(|a| *a == f.map)) {
if !ev.ids.iter().chain(ev.rules.iter()).any(|i| match i {
RuleKind::Map { to, .. } => to == field,
RuleKind::ArgumentMap { to, .. } => to == field,
RuleKind::Function { to, .. } | RuleKind::ArgumentFunction { to, .. } => {
to.contains(field)
}
}) {
// We ignore that properties are set as option. In all current cases, it
// makes no sense to set them to `None`, so we handle them the same way as
// non-optional properties.
if let Some(prop) = book
.get_struct(&ev.book_struct.name)
.properties
.iter()
.find(|p| p.name == field.pretty)
{
if !ev.ids.iter().chain(ev.rules.iter()).any(|i| i.from_name() == prop.name)
{
ev.rules.push(RuleKind::Map { from: prop, to: field })
}
}
}
}
ev
})
.collect();
BookToMessagesDeclarations { book, messages, decls }
});
#[derive(Debug)]
pub struct BookToMessagesDeclarations<'a> {
pub book: &'a BookDeclarations,
pub messages: &'a MessageDeclarations,
pub decls: Vec<Event<'a>>,
}
#[derive(Debug)]
pub struct Event<'a> {
pub op: RuleOp,
pub msg: &'a Message,
pub book_struct: &'a Struct,
pub ids: Vec<RuleKind<'a>>,
pub rules: Vec<RuleKind<'a>>,
}
#[derive(Debug)]
pub enum RuleKind<'a> {
Map { from: &'a Property, to: &'a Field },
ArgumentMap { from: String, to: &'a Field },
Function { from: Option<&'a Property>, name: String, to: Vec<&'a Field> },
ArgumentFunction { from: String, type_s: String, name: String, to: Vec<&'a Field> },
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum RuleOp {
Add,
Remove,
Update,
}
#[derive(Deserialize, Debug)]
#[serde(deny_unknown_fields)]
struct TomlStruct {
rule: Vec<Rule>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct Rule {
from: String,
to: String,
operation: String,
#[serde(default = "Vec::new")]
ids: Vec<RuleProperty>,
#[serde(default = "Vec::new")]
properties: Vec<RuleProperty>,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct RuleProperty {
from: Option<String>,
to: Option<String>,
#[serde(rename = "type")]
type_s: Option<String>,
function: Option<String>,
tolist: Option<Vec<String>>,
}
impl RuleProperty {
fn assert_valid(&self) {
if let Some(to) = &self.to {
assert!(self.from.is_some(), "to-property '{}' is invalid. It needs a 'from'", to);
assert!(
self.function.is_none(),
"to-property '{}' is invalid. It must not have a 'function'",
to
);
assert!(
self.tolist.is_none(),
"to-property '{}' is invalid. It must not have a 'tolist'",
to
);
assert!(
self.type_s.is_none(),
"to-property '{}' is invalid. It must not have a 'type'",
to
);
} else if let Some(fun) = &self.function {
assert!(
self.tolist.is_some(),
"function-property '{}' is invalid. It needs 'tolist'",
fun
);
assert!(
self.type_s.is_none() || self.from.is_some(),
"function-property '{}' is invalid. If the type ({:?}) is set, from must be set \
too",
fun,
self.type_s
);
} else {
panic!(
"Property is invalid. It needs either a 'to' or 'tolist'+'function'.Info: \
tolist={:?} type={:?} from={:?}",
self.tolist, self.type_s, self.from
);
}
}
}
impl FromStr for RuleOp {
type Err = fmt::Error;
fn from_str(s: &str) -> Result<Self> {
if s == "add" {
Ok(RuleOp::Add)
} else if s == "remove" {
Ok(RuleOp::Remove)
} else if s == "update" {
Ok(RuleOp::Update)
} else {
eprintln!("Cannot parse operation, needs to be add, remove or update");
Err(fmt::Error)
}
}
}
// the in rust callable name (in PascalCase) from the field
fn find_field<'a>(name: &str, msg_fields: &[&'a Field]) -> &'a Field {
msg_fields
.iter()
.find(|f| f.pretty == name)
.unwrap_or_else(|| panic!("Cannot find field '{}'", name))
}
impl<'a> RuleKind<'a> {
pub fn from_name(&'a self) -> &'a str {
match self {
RuleKind::Map { from, .. } => &from.name,
RuleKind::ArgumentMap { from, .. } => from,
RuleKind::Function { from, name, .. } => {
&from.unwrap_or_else(|| panic!("From not set for function {}", name)).name
}
RuleKind::ArgumentFunction { from, .. } => from,
}
}
pub fn from_name_singular(&'a self) -> &'a str {
let name = self.from_name();
if let Some(s) = name.strip_suffix('s') { s } else { name }
}
pub fn from(&self) -> &'a Property {
match self {
RuleKind::Map { from, .. } => from,
RuleKind::Function { from, name, .. } => {
from.unwrap_or_else(|| panic!("From not set for function {}", name))
}
RuleKind::ArgumentMap { .. } | RuleKind::ArgumentFunction { .. } => {
panic!("From is not a property for argument functions")
}
}
}
pub fn is_function(&self) -> bool {
matches!(self, RuleKind::Function { .. } | RuleKind::ArgumentFunction { .. })
}
pub fn get_type(&self) -> RustType {
match self {
RuleKind::Map { .. } | RuleKind::Function { .. } => self.from().get_type().unwrap(),
RuleKind::ArgumentMap { to, .. } => to.get_type("").unwrap(),
RuleKind::ArgumentFunction { type_s, .. } => type_s.parse().unwrap(),
}
}
pub fn get_type_no_option(&self) -> RustType {
match self {
RuleKind::Map { .. } => {
let mut rust_type = self.from().clone();
rust_type.opt = false;
rust_type.get_type().unwrap()
}
_ => self.get_type(),
}
}
pub fn get_argument(&self) -> String {
format!("{}: {}", self.from_name().to_snake_case(), self.get_type().to_ref(true))
}
pub fn get_argument_no_option(&self) -> String {
format!("{}: {}", self.from_name().to_snake_case(), self.get_type_no_option().to_ref(true))
}
}
impl<'a> Event<'a> {
/// The name of the change, could be a keyword.
pub fn get_small_name(&self) -> String {
// Strip own struct name and 'Request'
self.msg.name.replace(&self.book_struct.name, "").replace("Request", "")
}
/// The small name, not a keyword
pub fn get_change_name(&self) -> String {
let small_change_name = self.get_small_name();
if small_change_name == "Move" { self.msg.name.clone() } else { small_change_name }
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/prelude.rs | tsclientlib/src/prelude.rs | pub use ts_bookkeeping::data::exts::*;
pub use ts_bookkeeping::messages::OutMessageTrait;
pub use crate::OutCommandExt as _;
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/lib.rs | tsclientlib/src/lib.rs | //! tsclientlib is a library which makes it simple to create TeamSpeak clients
//! and bots.
//!
//! For a full client application, you might want to have a look at [Qint].
//!
//! If more power over the internals of a connection is needed, the `unstable` feature can be
//! enabled. Beware that functionality behind this feature may change on any minor release.
//!
//! The base class of this library is the [`Connection`]. One instance of this
//! struct manages a single connection to a server.
//!
//! [Qint]: https://github.com/ReSpeak/Qint
// Needed for futures on windows.
#![recursion_limit = "128"]
use std::borrow::Cow;
use std::collections::VecDeque;
use std::convert::TryInto;
use std::iter;
use std::mem;
use std::net::SocketAddr;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll};
use std::time::Duration;
use base64::prelude::*;
use futures::prelude::*;
use thiserror::Error;
use time::OffsetDateTime;
use tokio::io::AsyncWriteExt;
use tokio::net::{TcpStream, UdpSocket};
use tokio::sync::oneshot;
use tracing::{debug, info, info_span, warn, Instrument, Span};
use ts_bookkeeping::messages::c2s;
use ts_bookkeeping::messages::OutMessageTrait;
use tsproto::client;
use tsproto::connection::StreamItem as ProtoStreamItem;
use tsproto::resend::ResenderState;
use tsproto_packets::commands::{CommandItem, CommandParser};
#[cfg(feature = "audio")]
use tsproto_packets::packets::InAudioBuf;
use tsproto_packets::packets::{InCommandBuf, OutCommand, OutPacket, PacketType};
#[cfg(feature = "audio")]
pub mod audio;
pub mod prelude;
pub mod resolver;
pub mod sync;
// The build environment of tsclientlib.
git_testament::git_testament!(TESTAMENT);
#[cfg(test)]
mod tests;
// Reexports
pub use ts_bookkeeping::messages::s2c::InMessage;
// TODO This is bad because it re-exports ConnectOptions
pub use ts_bookkeeping::*;
pub use tsproto::resend::{ConnectionStats, PacketStat};
pub use tsproto::Identity;
pub use tsproto_types::errors::Error as TsError;
/// Wait this time for initserver, in seconds.
const INITSERVER_TIMEOUT: u64 = 5;
type Result<T> = std::result::Result<T, Error>;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct MessageHandle(pub u16);
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct FiletransferHandle(pub u16);
#[derive(Clone, Debug, Error, Eq, Hash, PartialEq)]
#[error("{}", error)]
pub struct CommandError {
#[source]
pub error: TsError,
pub missing_permission: Option<Permission>,
}
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum Error {
/// A command return an error.
#[error(transparent)]
CommandError(#[from] CommandError),
#[error("Failed to connect: {0}")]
Connect(#[source] tsproto::client::Error),
#[error("Failed to connect to server at {address:?}: {errors:?}")]
ConnectFailed { address: String, errors: Vec<Error> },
#[error("Connection aborted: {0}")]
ConnectionFailed(#[source] tsproto::client::Error),
/// The connection was destroyed.
#[error("Connection does not exist anymore")]
ConnectionGone,
#[error("Server refused connection: {0}")]
ConnectTs(#[source] tsproto_types::errors::Error),
#[error("File transfer failed: {0}")]
FiletransferIo(#[source] std::io::Error),
#[error("Failed to create identity: {0}")]
IdentityCreate(#[source] tsproto::Error),
#[error("The server needs an identity of level {0}, please increase your identity level")]
IdentityLevel(u8),
#[error("The server requested an identity of level {needed}, but we already have level {have}")]
IdentityLevelCorrupted { needed: u8, have: u8 },
#[error("Failed to increase identity level: Thread died")]
IdentityLevelIncreaseFailedThread,
#[error("We should be connected but the connection params do not exist")]
InitserverParamsMissing,
#[error("Failed to parse initserver: {0}")]
InitserverParse(#[source] ts_bookkeeping::messages::ParseError),
#[error("Timeout while waiting for initserver")]
InitserverTimeout,
#[error("Failed to receive initserver: {0}")]
InitserverWait(#[source] tsproto::client::Error),
#[error("Io error: {0}")]
Io(#[source] tokio::io::Error),
/// The connection is currently not connected to a server but is in the process of connecting.
#[error("Currently not connected")]
NotConnected,
#[error("Failed to resolve address: {0}")]
ResolveAddress(#[source] Box<resolver::Error>),
#[error("Failed to send clientinit: {0}")]
SendClientinit(#[source] tsproto::client::Error),
#[error("Failed to send packet: {0}")]
SendPacket(#[source] tsproto::client::Error),
#[error("The server changed its identity")]
ServerUidMismatch(UidBuf),
}
/// The reason for a temporary disconnect.
#[derive(Clone, Copy, Debug)]
pub enum TemporaryDisconnectReason {
/// Timed out because the server did not respond to packets in time.
Timeout(&'static str),
/// The server terminated our connection because it shut down.
///
/// This corresponds to the `Serverstop` and `ClientdisconnectServerShutdown` reasons.
/// This often happens on server restarts, so the connection will try to reconnect.
Serverstop,
}
pub trait OutCommandExt {
/// Adds a `return_code` to the command and returns if the corresponding
/// answer is received. If an error occurs, the future will return an error.
fn send_with_result(self, con: &mut Connection) -> Result<MessageHandle>;
/// Sends the command without asking for an answer.
fn send(self, con: &mut Connection) -> Result<()>;
}
/// The result of a download request.
///
/// A file download can be started by [`Connection::download_file`].
#[derive(Debug)]
pub struct FileDownloadResult {
/// The size of the requested file.
// TODO The rest of the size when a seek_position is specified?
pub size: u64,
/// The stream where the file can be downloaded.
pub stream: TcpStream,
}
/// The result of an upload request.
///
/// A file upload can be started by [`Connection::upload_file`].
#[derive(Debug)]
pub struct FileUploadResult {
/// The size of the already uploaded part when `resume` was set to `true`
/// in [`Connection::upload_file`].
pub seek_position: u64,
/// The stream where the file can be uploaded.
pub stream: TcpStream,
}
/// Signals audio related changes.
#[derive(Debug)]
pub enum AudioEvent {
/// If this client can send audio or is muted.
///
/// If the client is muted, the server will drop any sent audio packets. A client counts as
/// muted if input or output is muted, he is marked away or the talk power is less than the
/// needed talk power in the current channel. Temporary disconnects also count as mutes.
///
/// Every time the mute state changes, this event is emitted.
CanSendAudio(bool),
/// If this client can receive audio or the output is muted.
///
/// Audio packets might still be received but should not be played.
///
/// Every time the mute state changes, this event is emitted.
CanReceiveAudio(bool),
}
/// An event that gets returned by the connection.
///
/// A stream of these events is returned by [`Connection::events`].
#[derive(Debug)]
pub enum StreamItem {
/// All the incoming book events.
///
/// If a connection to the server was established this will contain an added event of a server.
BookEvents(Vec<events::Event>),
/// All incoming messages that are not related to the book.
///
/// This contains messages like `ChannelListFinished` or `ClientChatComposing`.
/// All events related to channels or clients are returned as events in the `BookEvents`
/// variant. Other messages handled by tsclientlib, e.g. for filetransfer are also not included
/// in these events.
MessageEvent(InMessage),
/// Received an audio packet.
///
/// Audio packets can be handled by the [`AudioHandler`](audio::AudioHandler), which builds a
/// queue per client and handles packet loss and jitter.
#[cfg(feature = "audio")]
Audio(InAudioBuf),
/// The needed level.
IdentityLevelIncreasing(u8),
/// This event may occur without an `IdentityLevelIncreasing` event before
/// if a new identity is created because no identity was supplied.
IdentityLevelIncreased,
/// The connection timed out or the server shut down. The connection will be
/// rebuilt automatically.
DisconnectedTemporarily(TemporaryDisconnectReason),
/// The result of sending a message.
///
/// The [`MessageHandle`] is the return value of [`OutCommandExt::send_with_result`].
MessageResult(MessageHandle, std::result::Result<(), CommandError>),
/// A file download succeeded. This event contains the `TcpStream` where the
/// file can be downloaded.
///
/// The [`FiletransferHandle`] is the return value of [`Connection::download_file`].
FileDownload(FiletransferHandle, FileDownloadResult),
/// A file upload succeeded. This event contains the `TcpStream` where the
/// file can be uploaded.
///
/// The [`FiletransferHandle`] is the return value of [`Connection::upload_file`].
FileUpload(FiletransferHandle, FileUploadResult),
/// A file download or upload failed.
///
/// This can happen if either the TeamSpeak server denied the file transfer or the tcp
/// connection failed.
///
/// The [`FiletransferHandle`] is the return value of [`Connection::download_file`] or
/// [`Connection::upload_file`].
FiletransferFailed(FiletransferHandle, Error),
/// The network statistics were updated.
///
/// This means e.g. the packet loss got a new value. Clients with audio probably want to update
/// the packet loss option of opus.
NetworkStatsUpdated,
/// A change related to audio.
AudioChange(AudioEvent),
}
/// The `Connection` is the main interaction point with this library.
///
/// It represents a connection to a TeamSpeak server. It will reconnect automatically when the
/// connection times out. It will not reconnect which the client is kicked or banned from the
/// server.
pub struct Connection {
state: ConnectionState,
span: Span,
options: ConnectOptions,
stream_items: VecDeque<Result<StreamItem>>,
}
struct ConnectedConnection {
client: client::Client,
cur_return_code: u16,
cur_filetransfer_id: u16,
/// If we are subscribed to the server. This will automatically subscribe to new channels.
subscribed: bool,
/// If a file stream can be opened, it gets put in here until the tcp
/// connection is ready and the key is sent.
///
/// Afterwards we can directly return a `TcpStream` in the event stream.
filetransfers: Vec<future::BoxFuture<'static, StreamItem>>,
connection_time: time::OffsetDateTime,
}
enum ConnectionState {
/// The future that resolves to a connection and a boolean if we should reconnect on failure.
///
/// If the `bool` is `false`, the connection will abort on error. Otherwise it will try to
/// connect again on timeout.
Connecting(future::BoxFuture<'static, Result<(client::Client, data::Connection)>>, bool),
IdentityLevelIncreasing {
/// We get the improved identity here.
recv: oneshot::Receiver<Identity>,
state: Arc<Mutex<IdentityIncreaseLevelState>>,
},
Connected {
con: ConnectedConnection,
book: data::Connection,
},
}
/// A wrapper to poll events from a connection. This is used so a user can drop
/// and filter the stream of events without problems.
struct EventStream<'a>(&'a mut Connection);
enum IdentityIncreaseLevelState {
Computing,
/// Set to this state to cancel the computation.
Canceled,
}
/// The main type of this crate, which represents a connection to a server.
///
/// After creating a connection with [`Connection::new`], the main way to interact with it is
/// [`get_state`](Connection::get_state). It stores currently visible clients and channels on the
/// server. The setter methods e.g. for the nickname or channel create a command that can be send
/// to the server. The send method returns a handle which can then be used to check if
/// the action succeeded or not.
///
/// The second way of interaction is polling with [`events()`](Connection::events), which returns a
/// stream of [`StreamItem`]s.
///
/// The connection will not do anything unless the event stream is polled. Even
/// sending packets will only happen while polling. Make sure to always wait for
/// events when awaiting other futures.
///
/// # Examples
/// This will open a connection to the TeamSpeak server at `localhost`.
///
/// ```no_run
/// use futures::prelude::*;
/// use tsclientlib::{Connection, StreamItem};
///
/// #[tokio::main]
/// async fn main() {
/// let mut con = Connection::build("localhost").connect().unwrap();
/// // Wait until connected
/// con.events()
/// // We are connected when we receive the first BookEvents
/// .try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
/// .next()
/// .await
/// .unwrap();
/// }
/// ```
impl Connection {
/// Start creating the configuration of a new connection.
///
/// # Arguments
/// The address of the server has to be supplied. The address can be a
/// [`SocketAddr`](std::net::SocketAddr), a string or directly a [`ServerAddress`]. A string
/// will automatically be resolved from all formats supported by TeamSpeak.
/// For details, see [`resolver::resolve`].
///
/// # Examples
/// This will open a connection to the TeamSpeak server at `localhost`.
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, StreamItem};
///
/// # #[tokio::main]
/// # async fn main() {
/// let mut con = Connection::build("localhost").connect().unwrap();
/// // Wait until connected
/// con.events()
/// // We are connected when we receive the first BookEvents
/// .try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
/// .next()
/// .await
/// .unwrap();
/// # }
/// ```
#[inline]
pub fn build<A: Into<ServerAddress>>(address: A) -> ConnectOptions {
ConnectOptions {
address: address.into(),
local_address: None,
identity: None,
server: None,
name: "TeamSpeakUser".into(),
version: Version::Windows_3_X_X__1,
hardware_id: "923f136fb1e22ae6ce95e60255529c00,d13231b1bc33edfecfb9169cc7a63bcc".into(),
channel: None,
channel_password: None,
password: None,
input_muted: false,
output_muted: false,
input_hardware_enabled: true,
output_hardware_enabled: true,
away: None,
log_commands: false,
log_packets: false,
log_udp_packets: false,
}
}
/// Create a connection
///
/// This function opens a new connection to a server. The returned future
/// resolves, when the connection is established successfully.
///
/// Settings like nickname of the user can be set using the
/// [`ConnectOptions`] parameter.
///
/// # Examples
/// This will open a connection to the TeamSpeak server at `localhost`.
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, ConnectOptions, StreamItem};
///
/// # #[tokio::main]
/// # async fn main() {
/// let mut con = Connection::new(ConnectOptions::new("localhost")).unwrap();
/// // Wait until connected
/// con.events()
/// // We are connected when we receive the first BookEvents
/// .try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
/// .next()
/// .await
/// .unwrap();
/// # }
/// ```
// TODO Remove
#[deprecated(since = "0.2.0", note = "ConnectOptions::connect should be used instead")]
pub fn new(options: ConnectOptions) -> Result<Self> { options.connect() }
/// Get the options which were used to create this connection.
///
/// The identity of the options is updated while connecting if the identity
/// level needs to be improved.
pub fn get_options(&self) -> &ConnectOptions { &self.options }
/// Get a stream of events. The event stream needs to be polled, otherwise
/// nothing will happen in a connection, not even sending packets will work.
///
/// The returned stream can be dropped and recreated if needed.
pub fn events(&mut self) -> impl Stream<Item = Result<StreamItem>> + '_ { EventStream(self) }
/// Connect to a server.
///
/// If `is_reconnect` is `true`, wait 10 seconds before sending the first packet. This is to not
/// spam unnecessary packets if the internet or server is down.
async fn connect(
options: ConnectOptions, is_reconnect: bool,
) -> Result<(client::Client, data::Connection)> {
if is_reconnect {
tokio::time::sleep(Duration::from_secs(10)).await;
}
let resolved = match &options.address {
ServerAddress::SocketAddr(a) => stream::once(future::ok(*a)).left_stream(),
ServerAddress::Other(s) => resolver::resolve(s.into()).right_stream(),
};
pin_utils::pin_mut!(resolved);
let mut resolved: Pin<_> = resolved;
let mut errors = Vec::new();
while let Some(addr) = resolved.next().await {
let addr = addr.map_err(|e| Error::ResolveAddress(Box::new(e)))?;
match Self::connect_to(&options, addr).await {
Ok(res) => return Ok(res),
Err(e @ Error::IdentityLevel(_)) | Err(e @ Error::ConnectTs(_)) => {
// Either increase identity level or the server refused us
return Err(e);
}
Err(error) => {
info!(%error, "Connecting failed, trying next address");
errors.push(error);
}
}
}
Err(Error::ConnectFailed { address: options.address.to_string(), errors })
}
async fn connect_to(
options: &ConnectOptions, addr: SocketAddr,
) -> Result<(client::Client, data::Connection)> {
let counter = options.identity.as_ref().unwrap().counter();
let socket = Box::new(
UdpSocket::bind(options.local_address.unwrap_or_else(|| {
if addr.is_ipv4() {
"0.0.0.0:0".parse().unwrap()
} else {
"[::]:0".parse().unwrap()
}
}))
.await
.map_err(Error::Io)?,
);
let mut client =
client::Client::new(addr, socket, options.identity.as_ref().unwrap().key().clone());
// Logging
tsproto::log::add_logger(
options.log_commands,
options.log_packets,
options.log_udp_packets,
&mut *client,
);
// Create a connection
debug!(address = %addr, "Connecting");
client.connect().await.map_err(Error::Connect)?;
if let Some(server_uid) = &options.server {
let params = if let Some(r) = &client.params {
r
} else {
return Err(Error::InitserverParamsMissing);
};
let real_uid = UidBuf(params.public_key.get_uid_no_base64());
if real_uid != *server_uid {
return Err(Error::ServerUidMismatch(real_uid));
}
}
// Create clientinit packet
let client_version = options.version.get_version_string();
let client_platform = options.version.get_platform();
let client_version_sign = BASE64_STANDARD.encode(options.version.get_signature());
let default_channel_password = options
.channel_password
.as_ref()
.map(|p| tsproto_types::crypto::encode_password(p.as_bytes()))
.unwrap_or_default();
let password = options
.password
.as_ref()
.map(|p| tsproto_types::crypto::encode_password(p.as_bytes()))
.unwrap_or_default();
let packet = c2s::OutClientInitMessage::new(&mut iter::once(c2s::OutClientInitPart {
name: Cow::Borrowed(options.name.as_ref()),
version: Cow::Borrowed(client_version),
platform: Cow::Borrowed(client_platform),
input_muted: if options.input_muted { Some(true) } else { None },
output_muted: if options.output_muted { Some(true) } else { None },
input_hardware_enabled: options.input_hardware_enabled,
output_hardware_enabled: options.output_hardware_enabled,
is_away: if options.away.is_some() { Some(true) } else { None },
away_message: options.away.as_deref().map(Cow::Borrowed),
default_channel: Cow::Borrowed(options.channel.as_deref().unwrap_or_default()),
default_channel_password: Cow::Borrowed(default_channel_password.as_ref()),
password: Cow::Borrowed(password.as_ref()),
metadata: "".into(),
version_sign: Cow::Borrowed(client_version_sign.as_ref()),
client_key_offset: counter,
phonetic_name: "".into(),
default_token: "".into(),
hardware_id: Cow::Borrowed(options.hardware_id.as_ref()),
badges: None,
signed_badges: None,
integrations: None,
active_integrations_info: None,
my_team_speak_avatar: None,
my_team_speak_id: None,
security_hash: None,
}));
client.send_packet(packet.into_packet()).map_err(Error::SendClientinit)?;
match tokio::time::timeout(
Duration::from_secs(INITSERVER_TIMEOUT),
Self::wait_initserver(client),
)
.await
{
Ok(r) => r,
Err(_) => Err(Error::InitserverTimeout),
}
}
async fn wait_initserver(
mut client: client::Client,
) -> Result<(client::Client, data::Connection)> {
// Wait until we received the initserver packet.
loop {
let cmd = client
.filter_commands(|_, cmd| Ok(Some(cmd)))
.await
.map_err(Error::InitserverWait)?;
let msg = InMessage::new(cmd.data().packet().header(), cmd.data().packet().content())
.map_err(Error::InitserverParse);
match msg {
Ok(InMessage::CommandError(e)) => {
let e = e.iter().next().unwrap();
if e.id == ts_bookkeeping::TsError::ClientCouldNotValidateIdentity {
if let Some(needed) =
e.extra_message.as_ref().and_then(|m| m.parse::<u8>().ok())
{
return Err(Error::IdentityLevel(needed));
}
}
return Err(Error::ConnectTs(e.id));
}
Ok(InMessage::InitServer(initserver)) => {
let public_key = {
let params = if let Some(r) = &client.params {
r
} else {
return Err(Error::InitserverParamsMissing);
};
params.public_key.clone()
};
// Create connection
let data = data::Connection::new(public_key, &initserver);
return Ok((client, data));
}
Ok(msg) => {
// TODO Save instead of drop
warn!(message = ?msg, "Expected initserver, dropping command");
}
Err(error) => {
warn!(%error, "Expected initserver, failed to parse command");
}
}
}
}
/// If this returns `None`, the level was increased and we should try
/// connecting again.
fn increase_identity_level(&mut self, needed: u8) -> Result<()> {
if needed > 20 {
return Err(Error::IdentityLevel(needed));
}
let identity = self.options.identity.as_ref().unwrap().clone();
let level = identity.level();
if level >= needed {
return Err(Error::IdentityLevelCorrupted { needed, have: level });
}
// Increase identity level
let state = Arc::new(Mutex::new(IdentityIncreaseLevelState::Computing));
let (send, recv) = oneshot::channel();
// TODO Use tokio::blocking
// TODO Time estimate
std::thread::spawn(move || {
let mut identity = identity;
// TODO Check if canceled in between
identity.upgrade_level(needed);
let _ = send.send(identity);
});
self.state = ConnectionState::IdentityLevelIncreasing { recv, state };
Ok(())
}
/// Adds a `return_code` to the command and returns if the corresponding
/// answer is received. If an error occurs, the future will return an error.
fn send_command_with_result(&mut self, packet: OutCommand) -> Result<MessageHandle> {
self.update_on_outgoing_command(&packet);
if let ConnectionState::Connected { con, .. } = &mut self.state {
con.send_command_with_result(packet)
} else {
Err(Error::NotConnected)
}
}
fn send_command(&mut self, packet: OutCommand) -> Result<()> {
self.update_on_outgoing_command(&packet);
if let ConnectionState::Connected { con, .. } = &mut self.state {
con.send_command(packet)
} else {
Err(Error::NotConnected)
}
}
/// Update for outgoing commands.
///
/// Updates subscription and muted state.
///
/// Muted state needs to be handled at the following places:
/// - Outgoing packet to change mute/away state: Immediately apply and add book event, update
/// CanTalk, save in ConnectOptions
/// - Incoming event to change channel/own name, edit current channel/own client permissions,
/// server settings: Update CanTalk/Play, save in ConnectOptions
/// - Incoming packet to change mute/away state: Ignore for own client
/// - Connect, temporary disconnect: Update CanTalk/Play
fn update_on_outgoing_command(&mut self, cmd: &OutCommand) {
if let ConnectionState::Connected { con, book } = &mut self.state {
let (cmd_name, parser) = CommandParser::new(cmd.0.content());
if cmd_name == b"channelsubscribeall" {
con.subscribed = true;
} else if cmd_name == b"channelunsubscribeall" {
con.subscribed = false;
} else if cmd_name == b"clientupdate" {
let prev_can_send = Self::intern_can_send_audio(book, &self.options);
let prev_can_receive = Self::intern_can_receive_audio(book, &self.options);
let mut own_client = book.clients.get_mut(&book.own_client);
let mut has_away_message = false;
let mut has_is_away = false;
let mut events = Vec::new();
for arg in parser {
if let CommandItem::Argument(arg) = arg {
macro_rules! set_prop {
($prop:ident, $event:ident, $event_value:ident) => {
self.options.$prop = arg.value().get_raw() == b"1";
if let Some(own_client) = &mut own_client {
if own_client.$prop != self.options.$prop {
let old = own_client.$prop;
own_client.$prop = self.options.$prop;
events.push(events::Event::PropertyChanged {
id: events::PropertyId::$event(own_client.id),
old: events::PropertyValue::$event_value(old),
invoker: None,
extra: Default::default(),
});
}
}
};
}
match arg.name() {
b"client_input_muted" => {
set_prop!(input_muted, ClientInputMuted, Bool);
}
b"client_output_muted" => {
set_prop!(output_muted, ClientOutputMuted, Bool);
}
b"client_away" => {
if arg.value().get_raw() == b"1" {
if !has_away_message {
self.options.away = Some("".into());
}
} else {
self.options.away = None;
}
has_is_away = true;
}
b"client_away_message" => {
if has_is_away && self.options.away.is_some() {
match arg.value().get_str() {
Ok(r) => self.options.away = Some(r.to_string().into()),
Err(error) => {
warn!(%error, message = ?arg.value(),
"Failed to parse sent away message");
}
}
has_away_message = true;
}
}
b"client_input_hardware" => {
set_prop!(input_hardware_enabled, ClientInputHardwareEnabled, Bool);
}
b"client_output_hardware" => {
set_prop!(
output_hardware_enabled,
ClientOutputHardwareEnabled,
Bool
);
}
_ => {}
}
if let Some(own_client) = &mut own_client {
if own_client.away_message.as_deref()
!= self.options.away.as_ref().map(|s| s.as_ref())
{
let old = mem::replace(
&mut own_client.away_message,
self.options.away.as_ref().map(|s| s.to_string()),
);
events.push(events::Event::PropertyChanged {
id: events::PropertyId::ClientAwayMessage(own_client.id),
old: events::PropertyValue::OptionString(old),
invoker: None,
extra: Default::default(),
});
}
}
}
}
if !events.is_empty() {
self.stream_items.push_back(Ok(StreamItem::BookEvents(events)));
}
let new_can_send = Self::intern_can_send_audio(book, &self.options);
let new_can_receive = Self::intern_can_receive_audio(book, &self.options);
if new_can_send != prev_can_send {
self.stream_items.push_back(Ok(StreamItem::AudioChange(
AudioEvent::CanSendAudio(new_can_send),
)));
}
if new_can_receive != prev_can_receive {
self.stream_items.push_back(Ok(StreamItem::AudioChange(
AudioEvent::CanReceiveAudio(new_can_receive),
)));
}
}
}
}
/// Cancels the computation to increase the identity level.
///
/// This function initiates the cancellation and immediately returns. It
/// does not wait until the background thread quits.
///
/// Does nothing if the identity level is currently not increased.
pub fn cancel_identity_level_increase(&mut self) {
if let ConnectionState::IdentityLevelIncreasing { state, .. } = &mut self.state {
*state.lock().unwrap() = IdentityIncreaseLevelState::Canceled;
}
}
/// Get access to the raw connection.
///
/// Fails if the connection is currently not connected to the server.
#[cfg(feature = "unstable")]
pub fn get_tsproto_client(&self) -> Result<&client::Client> {
if let ConnectionState::Connected { con, .. } = &self.state {
Ok(&con.client)
} else {
Err(Error::NotConnected)
}
}
/// Get access to the raw connection.
///
/// Fails if the connection is currently not connected to the server.
#[cfg(feature = "unstable")]
pub fn get_tsproto_client_mut(&mut self) -> Result<&mut client::Client> {
if let ConnectionState::Connected { con, .. } = &mut self.state {
Ok(&mut con.client)
} else {
Err(Error::NotConnected)
}
}
/// Returns the public key of the server, fails if disconnected.
#[cfg(feature = "unstable")]
pub fn get_server_key(&self) -> Result<tsproto_types::crypto::EccKeyPubP256> {
self.get_tsproto_client().and_then(|c| {
if let Some(params) = &c.params {
Ok(params.public_key.clone())
} else {
Err(Error::NotConnected)
}
})
}
/// Get the current state of clients and channels of this connection.
///
/// Fails if the connection is currently not connected to the server.
pub fn get_state(&self) -> Result<&data::Connection> {
if let ConnectionState::Connected { book, .. } = &self.state {
Ok(book)
} else {
Err(Error::NotConnected)
}
}
/// Disconnect from the server.
///
/// # Arguments
/// - `options`: Either `None` or `DisconnectOptions`.
///
/// # Examples
///
/// Use default options:
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, DisconnectOptions, StreamItem};
///
/// # #[tokio::main]
/// # async fn main() {
/// let mut con = Connection::build("localhost").connect().unwrap();
/// // Wait until connected
/// con.events()
/// // We are connected when we receive the first BookEvents
/// .try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
/// .next()
/// .await
/// .unwrap();
///
/// // Disconnect
/// con.disconnect(DisconnectOptions::new()).unwrap();
/// con.events().for_each(|_| future::ready(())).await;
/// # }
/// ```
///
/// Specify a reason and a quit message:
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, DisconnectOptions, Reason, StreamItem};
///
/// # #[tokio::main]
/// # async fn main() {
/// let mut con = Connection::build("localhost").connect().unwrap();
/// // Wait until connected
/// con.events()
/// // We are connected when we receive the first BookEvents
/// .try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
/// .next()
/// .await
/// .unwrap();
///
/// // Disconnect
/// let options = DisconnectOptions::new()
/// .reason(Reason::Clientdisconnect)
/// .message("Away for a while");
/// con.disconnect(options).unwrap();
/// con.events().for_each(|_| future::ready(())).await;
/// # }
/// ```
pub fn disconnect(&mut self, options: DisconnectOptions) -> Result<()> {
if let ConnectionState::Connected { con, book } = &mut self.state {
let packet = book.disconnect(options);
con.client.send_packet(packet.into_packet()).map_err(Error::SendPacket)?;
}
Ok(())
}
/// Send audio to the server.
///
/// This function does only accept `Voice` and `VoiceWhisper` packets. Commands should be send
/// with [`command.send(&mut connection)`](OutCommandExt::send) or
/// [`command.send_with_result(&mut connection)`](OutCommandExt::send_with_result).
///
/// # Examples
///
/// ```no_run
/// # let con: tsclientlib::Connection = panic!();
/// use tsproto_packets::packets::{AudioData, CodecType, OutAudio};
/// let codec = CodecType::OpusVoice;
/// // Send an empty packet to signal audio end
/// let packet = OutAudio::new(&AudioData::C2S { id: 0, codec, data: &[] });
/// con.send_audio(packet).unwrap();
/// ```
pub fn send_audio(&mut self, packet: OutPacket) -> Result<()> {
assert!(
[PacketType::Voice, PacketType::VoiceWhisper].contains(&packet.header().packet_type()),
"Can only send audio packets with send_audio"
);
if let ConnectionState::Connected { con, book } = &mut self.state {
if !Self::intern_can_send_audio(book, &self.options) {
let span = self.span.clone();
warn!(parent: span, "Sending audio while muted");
}
con.client.send_packet(packet).map_err(Error::SendPacket)?;
Ok(())
} else {
Err(Error::NotConnected)
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | true |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/tests.rs | tsclientlib/src/tests.rs | use once_cell::sync::Lazy;
use ts_bookkeeping::messages::s2c::InMessage;
use tsproto_packets::packets::{Direction, Flags, OutPacket, PacketType};
static TRACING: Lazy<()> = Lazy::new(|| tracing_subscriber::fmt().with_test_writer().init());
pub(crate) fn create_logger() { Lazy::force(&TRACING); }
fn parse_msg(msg: &str) -> InMessage {
let header = OutPacket::new_with_dir(Direction::S2C, Flags::empty(), PacketType::Command);
InMessage::new(&header.header(), msg.as_bytes()).unwrap()
}
fn test_iconid(input: &str, expected: u32) {
create_logger();
let msg = parse_msg(&format!(
r#"initserver virtualserver_name=TeamSpeak\s]I[\sServer virtualserver_welcomemessage=Welcome\sto\sTeamSpeak,\scheck\s[URL]www.teamspeak.com[\/URL]\sfor\slatest\sinformation virtualserver_platform=Linux virtualserver_version=3.11.0\s[Build:\s1578903157] virtualserver_maxclients=32 virtualserver_created=1571572631 virtualserver_codec_encryption_mode=2 virtualserver_hostmessage virtualserver_hostmessage_mode=0 virtualserver_default_server_group=8 virtualserver_default_channel_group=8 virtualserver_hostbanner_url virtualserver_hostbanner_gfx_url virtualserver_hostbanner_gfx_interval=0 virtualserver_priority_speaker_dimm_modificator=-18.0000 virtualserver_id=1 virtualserver_hostbutton_tooltip virtualserver_hostbutton_url virtualserver_hostbutton_gfx_url virtualserver_name_phonetic virtualserver_ip=0.0.0.0,\s:: virtualserver_ask_for_privilegekey=0 virtualserver_hostbanner_mode=0 virtualserver_channel_temp_delete_delay_default=0 virtualserver_nickname client_nickname=TeamSpeakUser client_version=3.?.?\s[Build:\s5680278000] client_platform=Windows client_input_muted=0 client_output_muted=0 client_outputonly_muted=0 client_input_hardware=1 client_output_hardware=1 client_default_channel client_default_channel_password client_server_password client_meta_data client_version_sign=DX5NIYLvfJEUjuIbCidnoeozxIDRRkpq3I9vVMBmE9L2qnekOoBzSenkzsg2lC9CMv8K5hkEzhr2TYUYSwUXCg== client_security_hash client_key_offset=354 client_away=0 client_away_message client_nickname_phonetic client_default_token client_badges client_myteamspeak_id client_integrations client_active_integrations_info client_myteamspeak_avatar client_signed_badges acn=TeamSpeakUser aclid=2 pv=7 client_talk_power=75 client_needed_serverquery_view_power=75 virtualserver_icon_id={}"#,
input
));
if let InMessage::InitServer(list) = msg {
let cmd = list.iter().next().unwrap();
assert_eq!(cmd.icon, ts_bookkeeping::IconId(expected));
} else {
panic!("Failed to parse as initserver");
}
}
#[test]
fn normal_iconid() { test_iconid("96136942", 96136942); }
#[test]
fn negative_iconid() { test_iconid("-96136942", 4198830354); }
#[test]
fn big_iconid() { test_iconid("18446744073225738240", 3811153920); }
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/sync.rs | tsclientlib/src/sync.rs | //! The `sync` module contains an easier to use interface for a connection.
//!
//! It makes it easier to use a connection from multiple threads and use
//! `async`/`await` syntax for the cost of a little bit performance.
use std::collections::HashMap;
use std::ops::{Deref, DerefMut};
use std::pin::Pin;
use std::task::{Context, Poll};
use futures::prelude::*;
use tokio::sync::{mpsc, oneshot};
use tracing::{error, info};
use ts_bookkeeping::ChannelId;
#[cfg(feature = "audio")]
use tsproto_packets::packets::InAudioBuf;
#[cfg(feature = "unstable")]
use tsproto_packets::packets::OutCommand;
use crate::{
events, AudioEvent, DisconnectOptions, Error, InMessage, Result, StreamItem,
TemporaryDisconnectReason,
};
enum SyncConMessage {
RunFn(Box<dyn FnOnce(&mut SyncConnection) + Send>),
#[cfg(feature = "unstable")]
SendCommand(OutCommand, oneshot::Sender<Result<()>>),
WaitConnected(oneshot::Sender<Result<()>>),
Disconnect(DisconnectOptions, oneshot::Sender<Result<()>>),
DownloadFile {
channel_id: ChannelId,
path: String,
channel_password: Option<String>,
seek_position: Option<u64>,
send: oneshot::Sender<Result<super::FileDownloadResult>>,
},
UploadFile {
channel_id: ChannelId,
path: String,
channel_password: Option<String>,
size: u64,
overwrite: bool,
resume: bool,
send: oneshot::Sender<Result<super::FileUploadResult>>,
},
}
/// This is a subset of [`StreamItem`](crate::StreamItem).
pub enum SyncStreamItem {
/// All the incoming book events.
///
/// If a connection to the server was established this will contain an added event of a server.
BookEvents(Vec<events::Event>),
/// All incoming messages that are not related to the book.
///
/// This contains messages like `ChannelListFinished` or `ClientChatComposing`.
/// All events related to channels or clients are returned as events in the `BookEvents`
/// variant. Other messages handled by tsclientlib, e.g. for filetransfer are also not included
/// in these events.
MessageEvent(InMessage),
/// Received an audio packet.
///
/// Audio packets can be handled by the [`AudioHandler`](crate::audio::AudioHandler), which
/// builds a queue per client and handles packet loss and jitter.
#[cfg(feature = "audio")]
Audio(InAudioBuf),
/// The needed level.
IdentityLevelIncreasing(u8),
/// This event may occur without an `IdentityLevelIncreasing` event before
/// if a new identity is created because no identity was supplied.
IdentityLevelIncreased,
/// The connection timed out or the server shut down. The connection will be
/// rebuilt automatically.
DisconnectedTemporarily(TemporaryDisconnectReason),
/// The network statistics were updated.
///
/// This means e.g. the packet loss got a new value. Clients with audio probably want to update
/// the packet loss option of opus.
NetworkStatsUpdated,
/// A change related to audio.
AudioChange(AudioEvent),
}
/// A handle for a [`SyncConnection`] which can be sent across threads.
///
/// All actions like sending messages, downloading and uploading happens through
/// a handle.
#[derive(Clone)]
pub struct SyncConnectionHandle {
send: mpsc::Sender<SyncConMessage>,
}
pub struct SyncConnection {
con: super::Connection,
recv: mpsc::Receiver<SyncConMessage>,
send: mpsc::Sender<SyncConMessage>,
commands: HashMap<super::MessageHandle, oneshot::Sender<Result<()>>>,
connects: Vec<oneshot::Sender<Result<()>>>,
disconnects: Vec<oneshot::Sender<Result<()>>>,
downloads:
HashMap<super::FiletransferHandle, oneshot::Sender<Result<super::FileDownloadResult>>>,
uploads: HashMap<super::FiletransferHandle, oneshot::Sender<Result<super::FileUploadResult>>>,
}
impl From<super::Connection> for SyncConnection {
fn from(con: super::Connection) -> Self {
let (send, recv) = mpsc::channel(1);
Self {
con,
recv,
send,
commands: Default::default(),
connects: Default::default(),
disconnects: Default::default(),
downloads: Default::default(),
uploads: Default::default(),
}
}
}
impl Deref for SyncConnection {
type Target = super::Connection;
#[inline]
fn deref(&self) -> &Self::Target { &self.con }
}
impl DerefMut for SyncConnection {
#[inline]
fn deref_mut(&mut self) -> &mut <Self as Deref>::Target { &mut self.con }
}
impl Stream for SyncConnection {
type Item = Result<SyncStreamItem>;
fn poll_next(mut self: Pin<&mut Self>, ctx: &mut Context) -> Poll<Option<Self::Item>> {
let _span = self.con.span.clone().entered();
loop {
if let Poll::Ready(msg) = self.recv.poll_recv(ctx) {
if let Some(msg) = msg {
match msg {
SyncConMessage::RunFn(f) => f(&mut self),
#[cfg(feature = "unstable")]
SyncConMessage::SendCommand(arg, send) => {
let handle = match self.con.send_command_with_result(arg) {
Ok(r) => r,
Err(e) => {
let _ = send.send(Err(e));
continue;
}
};
self.commands.insert(handle, send);
}
SyncConMessage::WaitConnected(send) => {
if self.con.get_state().is_ok() {
let _ = send.send(Ok(()));
} else {
self.connects.push(send);
}
}
SyncConMessage::Disconnect(arg, send) => {
match self.con.disconnect(arg) {
Ok(r) => r,
Err(e) => {
let _ = send.send(Err(e));
continue;
}
}
self.disconnects.push(send);
}
SyncConMessage::DownloadFile {
channel_id,
path,
channel_password,
seek_position,
send,
} => {
let handle = match self.con.download_file(
channel_id,
&path,
channel_password.as_deref(),
seek_position,
) {
Ok(r) => r,
Err(e) => {
let _ = send.send(Err(e));
continue;
}
};
self.downloads.insert(handle, send);
}
SyncConMessage::UploadFile {
channel_id,
path,
channel_password,
size,
overwrite,
resume,
send,
} => {
let handle = match self.con.upload_file(
channel_id,
&path,
channel_password.as_deref(),
size,
overwrite,
resume,
) {
Ok(r) => r,
Err(e) => {
let _ = send.send(Err(e));
continue;
}
};
self.uploads.insert(handle, send);
}
}
continue;
} else {
error!("Message stream ended unexpectedly");
}
}
break;
}
loop {
break if let Poll::Ready(item) = self.con.poll_next(ctx) {
Poll::Ready(match item {
Some(Ok(item)) => Some(Ok(match item {
StreamItem::BookEvents(i) => {
self.connects.drain(..).for_each(|send| {
let _ = send.send(Ok(()));
});
SyncStreamItem::BookEvents(i)
}
StreamItem::MessageEvent(i) => SyncStreamItem::MessageEvent(i),
#[cfg(feature = "audio")]
StreamItem::Audio(i) => SyncStreamItem::Audio(i),
StreamItem::IdentityLevelIncreasing(i) => {
SyncStreamItem::IdentityLevelIncreasing(i)
}
StreamItem::IdentityLevelIncreased => {
SyncStreamItem::IdentityLevelIncreased
}
StreamItem::DisconnectedTemporarily(reason) => {
SyncStreamItem::DisconnectedTemporarily(reason)
}
StreamItem::MessageResult(handle, res) => {
if let Some(send) = self.commands.remove(&handle) {
let _ = send.send(res.map_err(|e| e.into()));
} else {
info!("Got untracked message result");
}
continue;
}
StreamItem::FileDownload(handle, res) => {
if let Some(send) = self.downloads.remove(&handle) {
let _ = send.send(Ok(res));
} else {
info!("Got untracked download");
}
continue;
}
StreamItem::FileUpload(handle, res) => {
if let Some(send) = self.uploads.remove(&handle) {
let _ = send.send(Ok(res));
} else {
info!("Got untracked upload");
}
continue;
}
StreamItem::FiletransferFailed(handle, res) => {
if let Some(send) = self.downloads.remove(&handle) {
let _ = send.send(Err(res));
} else if let Some(send) = self.uploads.remove(&handle) {
let _ = send.send(Err(res));
} else {
info!("Got untracked file transfer");
}
continue;
}
StreamItem::NetworkStatsUpdated => SyncStreamItem::NetworkStatsUpdated,
StreamItem::AudioChange(change) => SyncStreamItem::AudioChange(change),
})),
Some(Err(e)) => Some(Err(e)),
None => {
self.disconnects.drain(..).for_each(|send| {
let _ = send.send(Ok(()));
});
None
}
})
} else {
Poll::Pending
};
}
}
}
impl SyncConnection {
/// Get a handle to the connection that can be sent across threads.
#[inline]
pub fn get_handle(&self) -> SyncConnectionHandle {
SyncConnectionHandle { send: self.send.clone() }
}
}
impl SyncConnectionHandle {
/// Run a function on the connection.
pub async fn with_connection<
T: Send + 'static,
F: FnOnce(&mut SyncConnection) -> T + Send + 'static,
>(
&mut self, f: F,
) -> Result<T> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::RunFn(Box::new(move |con| {
let _ = send.send(f(con));
})))
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)
}
/// Adds a `return_code` to the command and returns if the corresponding
/// answer is received. If an error occurs, the future will return an error.
#[cfg(feature = "unstable")]
pub async fn send_command(&mut self, arg: OutCommand) -> Result<()> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::SendCommand(arg, send))
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)?
}
/// This future resolves once the connection is connected to the server.
pub async fn wait_until_connected(&mut self) -> Result<()> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::WaitConnected(send))
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)?
}
/// Disconnect from the server.
///
/// # Arguments
/// - `options`: Either `None` or `DisconnectOptions`.
///
/// # Examples
///
/// Use default options:
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, ConnectOptions, DisconnectOptions, StreamItem};
/// # use tsclientlib::sync::SyncConnection;
///
/// # #[tokio::main]
/// # async fn main() {
/// let con: SyncConnection = Connection::build("localhost").connect().unwrap().into();
/// let mut handle = con.get_handle();
/// tokio::spawn(con.for_each(|_| future::ready(())));
/// // Wait until connected
/// handle.wait_until_connected().await.unwrap();
///
/// // Disconnect
/// handle.disconnect(DisconnectOptions::new()).await.unwrap();
/// # }
/// ```
///
/// Specify a reason and a quit message:
///
/// ```no_run
/// # use futures::prelude::*;
/// # use tsclientlib::{Connection, ConnectOptions, DisconnectOptions, Reason, StreamItem};
/// # use tsclientlib::sync::SyncConnection;
///
/// # #[tokio::main]
/// # async fn main() {
/// let con: SyncConnection = Connection::build("localhost").connect().unwrap().into();
/// let mut handle = con.get_handle();
/// tokio::spawn(con.for_each(|_| future::ready(())));
/// // Wait until connected
/// handle.wait_until_connected().await.unwrap();
///
/// // Disconnect
/// let options = DisconnectOptions::new()
/// .reason(Reason::Clientdisconnect)
/// .message("Away for a while");
/// handle.disconnect(DisconnectOptions::new()).await.unwrap();
/// # }
/// ```
pub async fn disconnect(&mut self, arg: DisconnectOptions) -> Result<()> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::Disconnect(arg, send))
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)?
}
/// Download a file from a channel of the connected TeamSpeak server.
///
/// Returns the size of the file and a tcp stream of the requested file.
///
/// # Example
/// Download an icon.
///
/// ```no_run
/// # use tsclientlib::ChannelId;
/// # let handle: tsclientlib::sync::SyncConnectionHandle = panic!();
/// # let id = 0;
/// let download = handle.download_file(ChannelId(0), format!("/icon_{}", id), None, None);
/// ```
pub async fn download_file(
&mut self, channel_id: ChannelId, path: String, channel_password: Option<String>,
seek_position: Option<u64>,
) -> Result<super::FileDownloadResult> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::DownloadFile {
channel_id,
path,
channel_password,
seek_position,
send,
})
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)?
}
/// Upload a file to a channel of the connected TeamSpeak server.
///
/// Returns the size of the part which is already uploaded (when resume is
/// specified) and a tcp stream where the requested file should be uploaded.
///
/// # Example
/// Upload an avatar.
///
/// ```no_run
/// # use tsclientlib::ChannelId;
/// # let handle: tsclientlib::sync::SyncConnectionHandle = panic!();
/// # let size = 0;
/// let upload = handle.upload_file(ChannelId(0), "/avatar".to_string(), None, size, true, false);
/// ```
pub async fn upload_file(
&mut self, channel_id: ChannelId, path: String, channel_password: Option<String>,
size: u64, overwrite: bool, resume: bool,
) -> Result<super::FileUploadResult> {
let (send, recv) = oneshot::channel();
self.send
.send(SyncConMessage::UploadFile {
channel_id,
path,
channel_password,
size,
overwrite,
resume,
send,
})
.await
.map_err(|_| Error::ConnectionGone)?;
recv.await.map_err(|_| Error::ConnectionGone)?
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/resolver.rs | tsclientlib/src/resolver.rs | //! Resolve TeamSpeak server addresses of any kind.
// Changes with TeamSpeak client 3.1:
// https://support.teamspeakusa.com/index.php?/Knowledgebase/Article/View/332
use std::net::{IpAddr, Ipv6Addr, SocketAddr};
use std::str::{self, FromStr};
use futures::prelude::*;
use hickory_resolver::config::{ResolverConfig, ResolverOpts};
use hickory_resolver::{Name, TokioAsyncResolver};
use itertools::Itertools;
use rand::Rng;
use thiserror::Error;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::{self, TcpStream};
use tokio::time::Duration;
use tracing::{debug, instrument, warn};
const DEFAULT_PORT: u16 = 9987;
const DNS_PREFIX_TCP: &str = "_tsdns._tcp.";
const DNS_PREFIX_UDP: &str = "_ts3._udp.";
const NICKNAME_LOOKUP_ADDRESS: &str = "https://named.myteamspeak.com/lookup";
/// Wait this amount of seconds before giving up.
const TIMEOUT_SECONDS: u64 = 10;
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum Error {
#[error("Failed to create resolver ({system})")]
CreateResolver {
#[source]
system: hickory_resolver::error::ResolveError,
},
#[error("Failed to composed domain from {0:?} and {1:?}: {2}")]
InvalidComposedDomain(String, String, #[source] hickory_proto::error::ProtoError),
#[error("Failed to parse domain {0:?}: {1}")]
InvalidDomain(String, #[source] hickory_proto::error::ProtoError),
#[error("Invalid IPv4 address")]
InvalidIp4Address,
#[error("Invalid IPv6 address")]
InvalidIp6Address,
#[error("Invalid IP address")]
InvalidIpAddress,
#[error("Not a valid nickname")]
InvalidNickname,
#[error("Failed to parse port: {0}")]
InvalidPort(#[source] std::num::ParseIntError),
#[error("Failed to contact {0} server: {1}")]
Io(&'static str, #[source] std::io::Error),
#[error("Failed to parse url: {0}")]
NicknameParseUrl(#[source] url::ParseError),
#[error("Failed to resolve nickname: {0}")]
NicknameResolve(#[source] reqwest::Error),
#[error("Found no SRV entry")]
NoSrvEntry,
#[error("Failed to resolve hostname: {0}")]
ResolveHost(#[source] tokio::io::Error),
#[error("Failed to get SRV record")]
SrvLookup(#[source] hickory_resolver::error::ResolveError),
#[error("tsdns did not return an ip address but {0:?}")]
TsdnsAddressInvalidResponse(String),
#[error("tsdns server does not know the address")]
TsdnsAddressNotFound,
#[error("Failed to parse tsdns response: {0}")]
TsdnsParseResponse(#[source] std::str::Utf8Error),
}
#[derive(Debug, PartialEq, Eq)]
enum ParseIpResult<'a> {
Addr(SocketAddr),
Other(&'a str, Option<u16>),
}
/// Beware that this may be slow because it tries all available methods.
///
/// The following methods are tried:
/// 1. If the address is an ip, the ip is returned
/// 1. Server nicknames are resolved by a http request to TeamSpeak
/// 1. The SRV record at `_ts3._udp.<address>`
/// 1. The SRV record at `_tsdns._tcp.address.tld` to get the address of a tsdns
/// server, e.g. when the address is `ts3.subdomain.from.com`, the SRV record
/// at `_tsdns._tcp.from.com` is requested
/// 1. Directly resolve the address to an ip address
///
/// If a port is given with `:port`, it overwrites the automatically determined
/// port. IPv6 addresses are put in square brackets when a port is present:
/// `[::1]:9987`
#[instrument]
pub fn resolve(address: String) -> impl Stream<Item = Result<SocketAddr>> {
debug!("Starting resolve");
let addr;
let port;
match parse_ip(&address) {
Ok(ParseIpResult::Addr(res)) => {
return stream::once(future::ok(res)).left_stream();
}
Ok(ParseIpResult::Other(a, p)) => {
addr = a.to_string();
port = p;
if let Some(port) = port {
debug!(port, "Found port");
}
}
Err(res) => return stream::once(future::err(res)).left_stream(),
}
// Resolve as nickname
let res = if !address.contains('.') && addr != "localhost" {
debug!("Resolving nickname");
// Could be a server nickname
resolve_nickname(address.clone())
.map_ok(move |mut addr| {
if let Some(port) = port {
addr.set_port(port);
}
addr
})
.left_stream()
} else {
stream::once(future::err(Error::InvalidNickname)).right_stream()
};
// The system config does not yet work on android:
// https://github.com/bluejekyll/trust-dns/issues/652
let addr2 = addr.clone();
// TODO Move current span into stream
let res = res.chain(
stream::once(async move {
let resolver = create_resolver();
// Try to get the address by an SRV record
let prefix = Name::from_str(DNS_PREFIX_UDP).expect("Cannot parse udp domain prefix");
let mut name =
Name::from_str(&addr2).map_err(|e| Error::InvalidDomain(addr2.clone(), e))?;
name.set_fqdn(true);
Result::<_>::Ok(resolve_srv(
resolver,
prefix.append_name(&name).map_err(|e| {
Error::InvalidComposedDomain(DNS_PREFIX_UDP.to_string(), addr2.clone(), e)
})?,
))
})
.try_flatten(),
);
// Try to get the address of a tsdns server by an SRV record
let addr2 = addr.clone();
// TODO Move current span into stream
let res = res.chain(
stream::once(async move {
let resolver = create_resolver();
let prefix = Name::from_str(DNS_PREFIX_TCP).expect("Cannot parse udp domain prefix");
let mut name =
Name::from_str(&addr2).map_err(|e| Error::InvalidDomain(addr2.clone(), e))?;
name.set_fqdn(true);
let name = name.trim_to(2);
// Pick the first srv record of the first server that answers
Result::<_>::Ok(
resolve_srv(
resolver,
prefix.append_name(&name).map_err(|e| {
Error::InvalidComposedDomain(DNS_PREFIX_UDP.to_string(), addr2.clone(), e)
})?,
)
.and_then(move |srv| {
let address = address.clone();
async move {
// Got tsdns server
let mut addr = resolve_tsdns(srv, &address).await?;
if let Some(port) = port {
// Overwrite port if it was specified
addr.set_port(port);
}
Ok(addr)
}
}),
)
})
.try_flatten(),
);
// Interpret as normal address and resolve with system resolver
let res = res.chain(
stream::once(async move {
let res = net::lookup_host((addr.as_str(), port.unwrap_or(DEFAULT_PORT)))
.await
.map_err(Error::ResolveHost)?
.map(Ok)
.collect::<Vec<_>>();
Result::<_>::Ok(stream::iter(res))
})
.try_flatten(),
);
// TODO Move current span into stream
tokio_stream::StreamExt::timeout(res, Duration::from_secs(TIMEOUT_SECONDS))
.filter_map(move |r: std::result::Result<Result<SocketAddr>, _>| {
future::ready(match r {
// Timeout
Err(_) => None,
// Error
Ok(Err(error)) => {
debug!(%error, "Resolver failed in one step");
None
}
// Success
Ok(Ok(r)) => Some(Ok(r)),
})
})
.right_stream()
}
// Windows for some reason automatically adds a link-local address to the dns
// resolver. These addresses are usually not reachable and should be filtered out.
// See: https://superuser.com/questions/638566/strange-value-in-dns-shown-in-ipconfig
const FILTERED_IPS: &[IpAddr] = &[
IpAddr::V6(Ipv6Addr::new(0xfec0, 0, 0, 0xffff, 0, 0, 0, 1)),
IpAddr::V6(Ipv6Addr::new(0xfec0, 0, 0, 0xffff, 0, 0, 0, 2)),
IpAddr::V6(Ipv6Addr::new(0xfec0, 0, 0, 0xffff, 0, 0, 0, 3)),
];
fn create_resolver() -> TokioAsyncResolver {
let (config, options) = create_resolver_config();
TokioAsyncResolver::tokio(config, options)
}
fn create_resolver_config() -> (ResolverConfig, ResolverOpts) {
match hickory_resolver::system_conf::read_system_conf() {
Ok(r) => {
let mut rc = ResolverConfig::from_parts(
None,
vec![],
hickory_resolver::config::NameServerConfigGroup::new(),
);
for ns in
r.0.name_servers().iter().filter(|ns| !FILTERED_IPS.contains(&ns.socket_addr.ip()))
{
rc.add_name_server(ns.clone());
}
(rc, r.1)
}
Err(error) => {
warn!(%error, "Failed to use system dns resolver config");
// Fallback
(ResolverConfig::cloudflare(), ResolverOpts::default())
}
}
}
fn parse_ip(address: &str) -> Result<ParseIpResult> {
let mut addr = address;
let mut port = None;
if let Some(pos) = address.rfind(':') {
// Either with port or IPv6 address
if address.find(':').unwrap() == pos {
// Port is appended
addr = &address[..pos];
port = Some(&address[pos + 1..]);
if addr.chars().all(|c| c.is_ascii_digit() || c == '.') {
// IPv4 address
return Ok(ParseIpResult::Addr(
std::net::ToSocketAddrs::to_socket_addrs(address)
.map_err(|_| Error::InvalidIp4Address)?
.next()
.ok_or(Error::InvalidIp4Address)?,
));
}
} else if let Some(pos_bracket) = address.rfind(']') {
if pos_bracket < pos {
// IPv6 address and port
return Ok(ParseIpResult::Addr(
std::net::ToSocketAddrs::to_socket_addrs(address)
.map_err(|_| Error::InvalidIp6Address)?
.next()
.ok_or(Error::InvalidIp6Address)?,
));
} else if pos_bracket == address.len() - 1 && address.starts_with('[') {
// IPv6 address
return Ok(ParseIpResult::Addr(
std::net::ToSocketAddrs::to_socket_addrs(&(
&address[1..pos_bracket],
DEFAULT_PORT,
))
.map_err(|_| Error::InvalidIp6Address)?
.next()
.ok_or(Error::InvalidIp6Address)?,
));
} else {
return Err(Error::InvalidIpAddress);
}
} else {
// IPv6 address
return Ok(ParseIpResult::Addr(
std::net::ToSocketAddrs::to_socket_addrs(&(address, DEFAULT_PORT))
.map_err(|_| Error::InvalidIp6Address)?
.next()
.ok_or(Error::InvalidIp6Address)?,
));
}
} else if address.chars().all(|c| c.is_ascii_digit() || c == '.') {
// IPv4 address
return Ok(ParseIpResult::Addr(
std::net::ToSocketAddrs::to_socket_addrs(&(address, DEFAULT_PORT))
.map_err(|_| Error::InvalidIp4Address)?
.next()
.ok_or(Error::InvalidIp4Address)?,
));
}
let port = if let Some(port) = port.map(|p| p.parse().map_err(Error::InvalidPort)) {
Some(port?)
} else {
None
};
Ok(ParseIpResult::Other(addr, port))
}
pub fn resolve_nickname(nickname: String) -> impl Stream<Item = Result<SocketAddr>> {
stream::once(async {
let nickname = nickname;
let url =
reqwest::Url::parse_with_params(NICKNAME_LOOKUP_ADDRESS, Some(("name", &nickname)))
.map_err(Error::NicknameParseUrl)?;
let body = reqwest::get(url)
.await
.map_err(Error::NicknameResolve)?
.error_for_status()
.map_err(Error::NicknameResolve)?
.text()
.await
.map_err(Error::NicknameResolve)?;
let addrs = body
.split(&['\r', '\n'][..])
.filter(|s| !s.is_empty())
.map(|s| Result::<_>::Ok(s.to_string()))
.collect::<Vec<_>>();
Result::<_>::Ok(
stream::iter(addrs)
.and_then(|addr| async move {
match parse_ip(&addr)? {
ParseIpResult::Addr(a) => Ok(stream::once(future::ok(a)).left_stream()),
ParseIpResult::Other(a, p) => {
let addrs = net::lookup_host((a, p.unwrap_or(DEFAULT_PORT)))
.await
.map_err(Error::ResolveHost)?
.collect::<Vec<_>>();
Ok(stream::iter(addrs).map(Result::<_>::Ok).right_stream())
}
}
})
.try_flatten(),
)
})
.try_flatten()
}
pub async fn resolve_tsdns<A: net::ToSocketAddrs>(server: A, addr: &str) -> Result<SocketAddr> {
let mut stream = TcpStream::connect(server).await.map_err(|e| Error::Io("tsdns", e))?;
stream.write_all(addr.as_bytes()).await.map_err(|e| Error::Io("tsdns", e))?;
let mut data = Vec::new();
stream.read_to_end(&mut data).await.map_err(|e| Error::Io("tsdns", e))?;
let addr = str::from_utf8(&data).map_err(Error::TsdnsParseResponse)?;
if addr.starts_with("404") {
return Err(Error::TsdnsAddressNotFound);
}
match parse_ip(addr)? {
ParseIpResult::Addr(a) => Ok(a),
_ => Err(Error::TsdnsAddressInvalidResponse(addr.to_string())),
}
}
fn resolve_srv(resolver: TokioAsyncResolver, addr: Name) -> impl Stream<Item = Result<SocketAddr>> {
stream::once(async {
let lookup = resolver.srv_lookup(addr).await.map_err(Error::SrvLookup)?;
let mut entries = Vec::new();
let mut max_prio = if let Some(e) = lookup.iter().next() {
e.priority()
} else {
return Err(Error::NoSrvEntry);
};
// Move all SRV records into entries and only retain the ones with
// the lowest priority.
for srv in lookup.iter() {
if srv.priority() < max_prio {
max_prio = srv.priority();
entries.clear();
entries.push(srv);
} else if srv.priority() == max_prio {
entries.push(srv);
}
}
let prios = lookup.iter().chunk_by(|e| e.priority());
let entries = prios.into_iter().sorted_by_key(|(p, _)| *p);
// Select by weight
let mut sorted_entries = Vec::new();
for (_, es) in entries {
let mut zero_entries = Vec::new();
// All non-zero entries
let mut entries = es
.filter_map(|e| {
if e.weight() == 0 {
zero_entries.push(e);
None
} else {
Some(e)
}
})
.collect::<Vec<_>>();
while !entries.is_empty() {
let weight: u32 = entries.iter().map(|e| e.weight() as u32).sum();
let mut w = rand::thread_rng().gen_range(0..=weight);
if w == 0 {
// Pick the first entry with weight 0
if let Some(i) = entries.iter().position(|e| e.weight() == 0) {
sorted_entries.push(entries.remove(i));
}
}
for i in 0..entries.len() {
let weight = entries[i].weight() as u32;
if w <= weight {
sorted_entries.push(entries.remove(i));
break;
}
w -= weight;
}
}
}
let res = sorted_entries
.into_iter()
.map(|e| Ok((e.target().to_ascii(), e.port())))
.collect::<Vec<Result<(String, u16)>>>();
drop(resolver);
Ok(stream::iter(res)
.and_then(|(e, port)| async move {
let res = net::lookup_host((e.as_str(), port))
.await
.map_err(Error::ResolveHost)?
.map(Ok)
.collect::<Vec<_>>();
Ok(stream::iter(res))
})
.try_flatten())
})
.try_flatten()
}
#[cfg(test)]
mod test {
use super::*;
use crate::tests::create_logger;
#[test]
fn parse_ip_without_port() {
let res = parse_ip("127.0.0.1");
assert_eq!(
res.unwrap(),
ParseIpResult::Addr(format!("127.0.0.1:{}", DEFAULT_PORT).parse().unwrap())
);
}
#[test]
fn parse_ip_with_port() {
let res = parse_ip("127.0.0.1:1");
assert_eq!(res.unwrap(), ParseIpResult::Addr("127.0.0.1:1".parse().unwrap()));
}
#[test]
fn parse_ip6_without_port() {
let res = parse_ip("::");
assert_eq!(
res.unwrap(),
ParseIpResult::Addr(format!("[::]:{}", DEFAULT_PORT).parse().unwrap())
);
}
#[test]
fn parse_ip6_without_port2() {
let res = parse_ip("[::]");
assert_eq!(
res.unwrap(),
ParseIpResult::Addr(format!("[::]:{}", DEFAULT_PORT).parse().unwrap())
);
}
#[test]
fn parse_ip6_with_port() {
let res = parse_ip("[::]:1");
assert_eq!(res.unwrap(), ParseIpResult::Addr("[::]:1".parse().unwrap()));
}
#[test]
fn parse_ip_address_without_port() {
assert_eq!(parse_ip("localhost").unwrap(), ParseIpResult::Other("localhost", None));
}
#[test]
fn parse_ip_address_with_port() {
assert_eq!(parse_ip("localhost:1").unwrap(), ParseIpResult::Other("localhost", Some(1)));
}
#[test]
fn parse_ip_with_large_port() {
assert!(parse_ip("127.0.0.1:65536").is_err());
}
#[tokio::test]
async fn resolve_localhost() {
create_logger();
let res: Vec<_> = resolve("127.0.0.1".into()).map(|r| r.unwrap()).collect().await;
let addr = format!("127.0.0.1:{}", DEFAULT_PORT).parse::<SocketAddr>().unwrap();
assert_eq!(res.as_slice(), &[addr]);
}
#[tokio::test]
async fn resolve_localhost2() {
create_logger();
let res: Vec<_> = resolve("localhost".into()).map(|r| r.unwrap()).collect().await;
assert!(res.contains(&format!("127.0.0.1:{}", DEFAULT_PORT).parse().unwrap()));
}
#[tokio::test]
async fn resolve_example() {
create_logger();
let res: Vec<_> = resolve("example.com".into()).map(|r| r.unwrap()).collect().await;
assert!(!res.is_empty());
}
#[tokio::test]
async fn resolve_splamy_de() {
create_logger();
let res: Vec<_> = tokio::time::timeout(
Duration::from_secs(5),
resolve("splamy.de".into()).map(|r| r.unwrap()).collect(),
)
.await
.expect("Resolve takes unacceptable long");
assert!(res.contains(&format!("37.120.179.68:{}", DEFAULT_PORT).parse().unwrap()));
}
#[tokio::test]
async fn resolve_loc() {
create_logger();
let res: Vec<_> = resolve("loc".into()).map(|r| r.unwrap()).collect().await;
assert!(res.contains(&format!("127.0.0.1:{}", DEFAULT_PORT).parse().unwrap()));
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/src/audio.rs | tsclientlib/src/audio.rs | //! Handle receiving audio.
//!
//! The [`AudioHandler`] collects all incoming audio packets and queues them per
//! client. It decodes the audio, handles out-of-order packets and missing
//! packets. It automatically adjusts the queue length based on the jitter of
//! incoming packets.
use std::cmp::Reverse;
use std::collections::{HashMap, VecDeque};
use std::convert::TryInto;
use std::fmt::Debug;
use std::hash::Hash;
use audiopus::coder::Decoder;
#[cfg(feature = "audiopus-unstable")]
use audiopus::coder::GenericCtl;
use audiopus::{Channels, SampleRate, packet};
use thiserror::Error;
use tracing::{debug, info_span, trace, warn, Span};
use tsproto_packets::packets::{AudioData, CodecType, InAudioBuf};
use crate::ClientId;
const SAMPLE_RATE: SampleRate = SampleRate::Hz48000;
const CHANNELS: Channels = Channels::Stereo;
const CHANNEL_NUM: usize = 2;
/// If this amount of packets is lost consecutively, we assume the stream stopped.
const MAX_PACKET_LOSSES: usize = 3;
/// Store the buffer sizes for the last `LAST_BUFFER_SIZE_COUNT` packets.
const LAST_BUFFER_SIZE_COUNT: u8 = 255;
/// The amount of samples to maximally buffer. Equivalent to 0.5 s.
const MAX_BUFFER_SIZE: usize = 48_000 / 2;
/// Maximum number of packets in the queue.
const MAX_BUFFER_PACKETS: usize = 50;
/// Buffer for maximal 0.5 s without playing anything.
const MAX_BUFFER_TIME: usize = 48_000 / 2;
/// Duplicate or remove every `step` sample when speeding-up.
const SPEED_CHANGE_STEPS: usize = 100;
/// The usual amount of samples in a frame.
///
/// Use 48 kHz, 20 ms frames (50 per second) and mono data (1 channel).
/// This means 1920 samples and 7.5 kiB.
const USUAL_FRAME_SIZE: usize = 48000 / 50;
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum Error {
#[error("Failed to create opus decoder: {0}")]
CreateDecoder(#[source] audiopus::Error),
#[error("Opus decode failed: {error} (packet: {packet:?})")]
Decode {
#[source]
error: audiopus::Error,
packet: Option<Vec<u8>>,
},
#[error("Get duplicate packet id {0}")]
Duplicate(u16),
#[error("Failed to get packet samples: {0}")]
GetPacketSample(#[source] audiopus::Error),
#[error("Audio queue is full, dropping")]
QueueFull,
#[error("Audio packet is too late, dropping (wanted {wanted}, got {got})")]
TooLate { wanted: u16, got: u16 },
#[error("Packet has too many samples")]
TooManySamples,
#[error("Only opus audio is supported, ignoring {0:?}")]
UnsupportedCodec(CodecType),
}
#[derive(Clone, Debug)]
struct SlidingWindowMinimum<T: Copy + Default + Ord> {
/// How long a value stays in the sliding window.
size: u8,
/// This is a sliding window minimum, it contains
/// `(insertion time, value)`.
///
/// When we insert a value, we can remove all bigger sample counts,
/// thus the queue always stays sorted with the minimum at the front
/// and the maximum at the back (latest entry).
///
/// Provides amortized O(1) minimum.
/// Source: https://people.cs.uct.ac.za/~ksmith/articles/sliding_window_minimum.html#sliding-window-minimum-algorithm
queue: VecDeque<(u8, T)>,
/// The current insertion time.
cur_time: u8,
}
#[derive(Debug)]
struct QueuePacket {
packet: InAudioBuf,
samples: usize,
id: u16,
}
/// A queue for audio packets for one audio stream.
pub struct AudioQueue {
span: Span,
decoder: Decoder,
pub volume: f32,
/// The id of the next packet that should be decoded.
///
/// Used to check for packet loss.
next_id: u16,
/// If the last packet was a whisper packet.
whispering: bool,
packet_buffer: VecDeque<QueuePacket>,
/// Amount of samples in the `packet_buffer`.
packet_buffer_samples: usize,
/// Temporary buffer that contains the samples of one decoded packet.
decoded_buffer: Vec<f32>,
/// The current position in the `decoded_buffer`.
decoded_pos: usize,
/// The number of samples in the last packet.
last_packet_samples: usize,
/// The last `packet_loss_num` packet decodes were a loss.
packet_loss_num: usize,
/// The amount of samples to buffer until this queue is ready to play.
buffering_samples: usize,
/// The amount of packets in the buffer when a packet was decoded.
///
/// Uses the amount of samples in the `packet_buffer` / `USUAL_PACKET_SAMPLES`.
/// Used to expand or reduce the buffer.
last_buffer_size_min: SlidingWindowMinimum<u8>,
last_buffer_size_max: SlidingWindowMinimum<Reverse<u8>>,
/// Buffered for this duration.
buffered_for_samples: usize,
}
/// Handles incoming audio, has one [`AudioQueue`] per sending client.
pub struct AudioHandler<Id: Clone + Debug + Eq + Hash + PartialEq = ClientId> {
queues: HashMap<Id, AudioQueue>,
/// Buffer this amount of samples for new queues before starting to play.
///
/// Updated when a new queue gets added.
avg_buffer_samples: usize,
}
impl<T: Copy + Default + Ord> SlidingWindowMinimum<T> {
fn new(size: u8) -> Self { Self { size, queue: Default::default(), cur_time: 0 } }
fn push(&mut self, value: T) {
while self.queue.back().map(|(_, s)| *s >= value).unwrap_or_default() {
self.queue.pop_back();
}
let i = self.cur_time;
self.queue.push_back((i, value));
while self
.queue
.front()
.map(|(i, _)| self.cur_time.wrapping_sub(*i) >= self.size)
.unwrap_or_default()
{
self.queue.pop_front();
}
self.cur_time = self.cur_time.wrapping_add(1);
}
fn get_min(&self) -> T { self.queue.front().map(|(_, s)| *s).unwrap_or_default() }
}
impl AudioQueue {
fn new(packet: InAudioBuf) -> Result<Self> {
let data = packet.data().data();
let opus_packet = data.data().try_into().map_err(Error::GetPacketSample)?;
let last_packet_samples =
packet::nb_samples(opus_packet, SAMPLE_RATE).map_err(Error::GetPacketSample)?;
if last_packet_samples > MAX_BUFFER_SIZE {
return Err(Error::TooManySamples);
}
let last_packet_samples = last_packet_samples * CHANNEL_NUM;
let whispering = matches!(data, AudioData::S2CWhisper { .. });
let mut decoder = Decoder::new(SAMPLE_RATE, CHANNELS).map_err(Error::CreateDecoder)?;
// Enable DRED and NoLACE, ignore errors e.g. if unsupported
#[cfg(feature = "audiopus-unstable")]
if let Err(error) = decoder.set_complexity(7) {
debug!(%error, "Failed setting opus decoder complexity, ignoring");
}
let mut res = Self {
span: Span::current(),
decoder,
volume: 1.0,
next_id: data.id(),
whispering,
packet_buffer: Default::default(),
packet_buffer_samples: 0,
decoded_buffer: Default::default(),
decoded_pos: 0,
last_packet_samples,
packet_loss_num: 0,
buffering_samples: 0,
last_buffer_size_min: SlidingWindowMinimum::new(LAST_BUFFER_SIZE_COUNT),
last_buffer_size_max: SlidingWindowMinimum::<Reverse<u8>>::new(LAST_BUFFER_SIZE_COUNT),
buffered_for_samples: 0,
};
res.add_buffer_size(0);
res.add_packet(packet)?;
Ok(res)
}
pub fn get_decoder(&self) -> &Decoder { &self.decoder }
pub fn is_whispering(&self) -> bool { self.whispering }
/// Size is in samples.
fn add_buffer_size(&mut self, size: usize) {
if let Ok(size) = (size / USUAL_FRAME_SIZE).try_into() {
self.last_buffer_size_min.push(size);
self.last_buffer_size_max.push(Reverse(size));
} else {
warn!(parent: &self.span, size, "Failed to put amount of packets into an u8");
}
}
/// The approximate deviation of the buffer size.
fn get_deviation(&self) -> u8 {
let min = self.last_buffer_size_min.get_min();
let max = self.last_buffer_size_max.get_min();
max.0 - min
}
fn add_packet(&mut self, packet: InAudioBuf) -> Result<()> {
let _span = self.span.enter();
if self.packet_buffer.len() >= MAX_BUFFER_PACKETS {
return Err(Error::QueueFull);
}
let samples;
if packet.data().data().data().len() <= 1 {
// End of stream
samples = 0;
} else {
let opus_packet =
packet.data().data().data().try_into().map_err(Error::GetPacketSample)?;
samples =
packet::nb_samples(opus_packet, SAMPLE_RATE).map_err(Error::GetPacketSample)?;
if samples > MAX_BUFFER_SIZE {
return Err(Error::TooManySamples);
}
}
let id = packet.data().data().id();
let packet = QueuePacket { packet, samples, id };
if id.wrapping_sub(self.next_id) > MAX_BUFFER_PACKETS as u16 {
return Err(Error::TooLate { wanted: self.next_id, got: id });
}
// Put into first spot where the id is smaller
let i = self.packet_buffer.len()
- self
.packet_buffer
.iter()
.enumerate()
.rev()
.take_while(|(_, p)| p.id.wrapping_sub(id) <= MAX_BUFFER_PACKETS as u16)
.count();
// Check for duplicate packet
if let Some(p) = self.packet_buffer.get(i) {
if p.id == packet.id {
return Err(Error::Duplicate(p.id));
}
}
trace!("Insert packet {} at {}", id, i);
let last_id = self.packet_buffer.back().map(|p| p.id.wrapping_add(1)).unwrap_or(id);
if last_id <= id {
self.buffering_samples = self.buffering_samples.saturating_sub(samples);
// Reduce buffering counter by lost packets if there are some
self.buffering_samples = self
.buffering_samples
.saturating_sub(usize::from(id - last_id) * self.last_packet_samples);
}
self.packet_buffer_samples += packet.samples;
self.packet_buffer.insert(i, packet);
Ok(())
}
fn decode_packet(&mut self, packet: Option<&QueuePacket>, fec: bool) -> Result<()> {
let _span = self.span.clone().entered();
trace!(has_packet = packet.is_some(), fec, "Decoding packet");
let packet_data;
let len;
if let Some(p) = packet {
packet_data =
Some(p.packet.data().data().data().try_into().map_err(Error::GetPacketSample)?);
len = p.samples;
self.whispering = matches!(p.packet.data().data(), AudioData::S2CWhisper { .. });
} else {
packet_data = None;
len = self.last_packet_samples;
}
self.packet_loss_num += 1;
self.decoded_buffer.resize(self.decoded_pos + len * CHANNEL_NUM, 0.0);
let len = self
.decoder
.decode_float(
packet_data,
(&mut self.decoded_buffer[self.decoded_pos..])
.try_into()
.map_err(Error::GetPacketSample)?,
fec,
)
.map_err(|e| Error::Decode {
error: e,
packet: packet.map(|p| p.packet.raw_data().to_vec()),
})?;
self.last_packet_samples = len;
self.decoded_buffer.truncate(self.decoded_pos + len * CHANNEL_NUM);
self.decoded_pos += len * CHANNEL_NUM;
// Update packet_loss_num
if packet.is_some() && !fec {
self.packet_loss_num = 0;
}
// Update last_buffer_size
let mut count = self.packet_buffer_samples;
if let Some(last) = self.packet_buffer.back() {
// Lost packets
trace!(
last.id,
next_id = self.next_id,
first_id = self.packet_buffer.front().unwrap().id,
buffer_len = self.packet_buffer.len(),
"Ids"
);
count += (usize::from(last.id.wrapping_sub(self.next_id)) + 1
- self.packet_buffer.len())
* self.last_packet_samples;
}
self.add_buffer_size(count);
Ok(())
}
/// Decode data and return the requested length of buffered data.
///
/// Returns `true` in the second return value when the stream ended,
/// `false` when it continues normally.
pub fn get_next_data(&mut self, len: usize) -> Result<(&[f32], bool)> {
let _span = self.span.clone().entered();
if self.buffering_samples > 0 {
if self.buffered_for_samples >= MAX_BUFFER_TIME {
self.buffering_samples = 0;
self.buffered_for_samples = 0;
trace!(
buffered_for_samples = self.buffered_for_samples,
buffering_samples = self.buffering_samples,
"Buffered for too long"
);
} else {
self.buffered_for_samples += len;
trace!(
buffered_for_samples = self.buffered_for_samples,
buffering_samples = self.buffering_samples,
"Buffering"
);
return Ok((&[], false));
}
}
// Need to refill buffer
if self.decoded_pos < self.decoded_buffer.len() {
if self.decoded_pos > 0 {
self.decoded_buffer.drain(..self.decoded_pos);
self.decoded_pos = 0;
}
} else {
self.decoded_buffer.clear();
self.decoded_pos = 0;
}
while self.decoded_buffer.len() < len {
trace!(
decoded_buffer = self.decoded_buffer.len(),
decoded_pos = self.decoded_pos,
len,
"get_next_data"
);
// Decode a packet
if let Some(packet) = self.packet_buffer.pop_front() {
if packet.packet.data().data().data().len() <= 1 {
// End of stream
return Ok((&self.decoded_buffer, true));
}
self.packet_buffer_samples -= packet.samples;
let cur_id = self.next_id;
self.next_id = self.next_id.wrapping_add(1);
if packet.id != cur_id {
debug_assert!(
packet.id.wrapping_sub(cur_id) < MAX_BUFFER_PACKETS as u16,
"Invalid packet queue state: {} < {}",
packet.id,
cur_id
);
// Packet loss
debug!(need = cur_id, have = packet.id, "Audio packet loss");
if packet.id == self.next_id {
// Can use forward-error-correction
self.decode_packet(Some(&packet), true)?;
} else {
self.decode_packet(None, false)?;
}
self.packet_buffer_samples += packet.samples;
self.packet_buffer.push_front(packet);
} else {
self.decode_packet(Some(&packet), false)?;
}
} else {
debug!("No packets in queue");
// Packet loss or end of stream
self.decode_packet(None, false)?;
}
if self.last_packet_samples == 0 {
break;
}
// Check if we should speed-up playback
let min = self.last_buffer_size_min.get_min();
let dev = self.get_deviation();
if min > (MAX_BUFFER_SIZE / USUAL_FRAME_SIZE) as u8 {
debug!(min, "Truncating buffer");
// Throw out all but min samples
let mut keep_samples = 0;
let keep = self
.packet_buffer
.iter()
.rev()
.take_while(|p| {
keep_samples += p.samples;
keep_samples < usize::from(min) + USUAL_FRAME_SIZE
})
.count();
let len = self.packet_buffer.len() - keep;
self.packet_buffer.drain(..len);
self.packet_buffer_samples = self.packet_buffer.iter().map(|p| p.samples).sum();
if let Some(p) = self.packet_buffer.front() {
self.next_id = p.id;
}
} else if min > dev {
// Speed-up
debug!(
min,
cur_packet_count = self.packet_buffer.len(),
last_packet_samples = self.last_packet_samples,
dev,
"Speed-up buffer"
);
let start = self.decoded_buffer.len() - self.last_packet_samples * CHANNEL_NUM;
for i in 0..(self.last_packet_samples / SPEED_CHANGE_STEPS) {
let i = start + i * (SPEED_CHANGE_STEPS - 1) * CHANNEL_NUM;
self.decoded_buffer.drain(i..(i + CHANNEL_NUM));
}
}
}
self.decoded_pos = len;
Ok((&self.decoded_buffer[..len], false))
}
}
impl<Id: Clone + Debug + Eq + Hash + PartialEq> Default for AudioHandler<Id> {
fn default() -> Self { Self { queues: Default::default(), avg_buffer_samples: 0 } }
}
impl<Id: Clone + Debug + Eq + Hash + PartialEq> AudioHandler<Id> {
pub fn new() -> Self { Default::default() }
/// Delete all queues
pub fn reset(&mut self) { self.queues.clear(); }
pub fn get_queues(&self) -> &HashMap<Id, AudioQueue> { &self.queues }
pub fn get_mut_queues(&mut self) -> &mut HashMap<Id, AudioQueue> { &mut self.queues }
/// `buf` is not cleared before filling it.
///
/// Returns the clients that are not talking anymore.
pub fn fill_buffer(&mut self, buf: &mut [f32]) -> Vec<Id> {
self.fill_buffer_with_proc(buf, |_, _| {})
}
/// `buf` is not cleared before filling it.
///
/// Same as [`fill_buffer`] but before merging a queue into the output buffer, a preprocessor
/// function is called. The queue volume is applied after calling the preprocessor.
///
/// Returns the clients that are not talking anymore.
pub fn fill_buffer_with_proc<F: FnMut(&Id, &[f32])>(
&mut self, buf: &mut [f32], mut handle: F,
) -> Vec<Id> {
trace!(len = buf.len(), "Filling audio buffer");
let mut to_remove = Vec::new();
for (id, queue) in self.queues.iter_mut() {
if queue.packet_loss_num >= MAX_PACKET_LOSSES {
debug!(packet_loss_num = queue.packet_loss_num, "Removing talker");
to_remove.push(id.clone());
continue;
}
let vol = queue.volume;
match queue.get_next_data(buf.len()) {
Err(error) => {
warn!(%error, "Failed to decode audio packet");
}
Ok((r, is_end)) => {
handle(id, r);
for i in 0..r.len() {
buf[i] += r[i] * vol;
}
if is_end {
to_remove.push(id.clone());
}
}
}
}
for id in &to_remove {
self.queues.remove(id);
}
to_remove
}
/// Add a packet to the audio queue.
///
/// If a new client started talking, returns the id of this client.
pub fn handle_packet(&mut self, id: Id, packet: InAudioBuf) -> Result<Option<Id>> {
let empty = packet.data().data().data().len() <= 1;
let codec = packet.data().data().codec();
if codec != CodecType::OpusMusic && codec != CodecType::OpusVoice {
return Err(Error::UnsupportedCodec(codec));
}
if let Some(queue) = self.queues.get_mut(&id) {
queue.add_packet(packet)?;
Ok(None)
} else {
if empty {
return Ok(None);
}
let _span = info_span!("audio queue", client = ?id);
trace!("Adding talker");
let mut queue = AudioQueue::new(packet)?;
if !self.queues.is_empty() {
// Update avg_buffer_samples
self.avg_buffer_samples = USUAL_FRAME_SIZE
+ self
.queues
.values()
.map(|q| usize::from(q.last_buffer_size_min.get_min()))
.sum::<usize>() / self.queues.len();
}
queue.buffering_samples = self.avg_buffer_samples;
self.queues.insert(id.clone(), queue);
Ok(Some(id))
}
}
}
#[cfg(test)]
mod test {
use anyhow::{bail, Result};
use audiopus::coder::Encoder;
use tsproto_packets::packets::{Direction, OutAudio};
use super::*;
use crate::tests::create_logger;
enum SimulateAction {
CreateEncoder,
/// Create packet with id.
///
/// The `bool` is `false` if packet handling should fail.
ReceivePacket(u16, bool),
ReceiveRaw(u16, Vec<u8>),
/// Fetch audio of this sample count and expect a certain packet id.
FillBuffer(usize, Option<u16>),
/// Custom check
Check(Box<dyn FnOnce(&AudioHandler<ClientId>)>),
}
/// Helper function to check an audio packet.
#[allow(dead_code)]
fn check_packet(data: &[u8]) -> Result<()> {
create_logger();
let mut handler = AudioHandler::<ClientId>::new();
let id = ClientId(0);
let mut buf = vec![0.0; 48_000 / 100 * 2];
// Sometimes, TS sends short, non-opus packets
let packet =
OutAudio::new(&AudioData::S2C { id: 30, codec: CodecType::OpusMusic, from: 0, data });
let input = InAudioBuf::try_new(Direction::S2C, packet.into_vec()).unwrap();
handler.handle_packet(id, input)?;
handler.fill_buffer(&mut buf);
handler.fill_buffer(&mut buf);
Ok(())
}
fn simulate(actions: Vec<SimulateAction>) -> Result<()> {
let mut encoder = None;
let mut opus_output = [0; 1275]; // Max size for an opus packet
let id = ClientId(0);
create_logger();
let mut handler = AudioHandler::<ClientId>::new();
for a in actions {
println!("\nCurrent state");
for q in &handler.queues {
print!("Queue {:?}:", q.0);
for p in &q.1.packet_buffer {
print!(" {:?},", p);
}
println!();
}
match a {
SimulateAction::CreateEncoder => {
encoder = Some(Encoder::new(
audiopus::SampleRate::Hz48000,
audiopus::Channels::Mono,
audiopus::Application::Voip,
)?);
}
SimulateAction::ReceivePacket(i, success) => {
let e = encoder.as_mut().unwrap();
let data = vec![i as f32; USUAL_FRAME_SIZE];
let len = e.encode_float(&data, &mut opus_output[..])?;
let packet = OutAudio::new(&AudioData::S2C {
id: i,
codec: CodecType::OpusMusic,
from: 0,
data: &opus_output[..len],
});
let input = InAudioBuf::try_new(Direction::S2C, packet.into_vec()).unwrap();
if handler.handle_packet(id, input).is_ok() != success {
bail!("handle_packet returned {:?} but expected {:?}", !success, success);
}
}
SimulateAction::ReceiveRaw(i, data) => {
let packet = OutAudio::new(&AudioData::S2C {
id: i,
codec: CodecType::OpusMusic,
from: 0,
data: &data,
});
let input = InAudioBuf::try_new(Direction::S2C, packet.into_vec()).unwrap();
let _ = handler.handle_packet(id, input);
}
SimulateAction::FillBuffer(size, expect) => {
let mut buf = vec![0.0; size * 2]; // Stereo
let cur_packet_id =
handler.queues.get(&id).and_then(|q| q.packet_buffer.front()).map(|p| p.id);
handler.fill_buffer(&mut buf);
let next_packet_id =
handler.queues.get(&id).and_then(|q| q.packet_buffer.front()).map(|p| p.id);
if expect.is_some() {
assert_eq!(expect, cur_packet_id);
assert_ne!(cur_packet_id, next_packet_id);
}
/*if let Some(e) = expect {
let e = *e as f32;
for b in &buf {
if (*b - e).abs() > 0.01 {
bail!("Buffer contains wrong value, \
expected {}: {:?}", e, buf);
}
}
}*/
}
SimulateAction::Check(f) => {
f(&handler);
}
}
}
Ok(())
}
#[test]
fn sliding_window_minimum() {
let data = &[
(5, 5),
(6, 5),
(3, 3),
(4, 3),
(6, 3),
(5, 3),
(6, 3),
(6, 4),
(6, 5),
(6, 5),
(6, 6),
(7, 6),
(5, 5),
];
let mut window = SlidingWindowMinimum::new(5);
assert_eq!(window.get_min(), 0);
for (i, (val, min)) in data.iter().enumerate() {
println!("{:?}", window);
window.push(*val);
assert_eq!(window.get_min(), *min, "Failed in iteration {} ({:?})", i, window);
}
}
#[test]
fn sliding_window_minimum_full() {
let mut window = SlidingWindowMinimum::new(255);
window.push(1);
assert_eq!(window.get_min(), 1);
for _ in 0..254 {
window.push(2);
}
assert_eq!(window.get_min(), 1);
window.push(2);
assert_eq!(window.get_min(), 2);
}
#[test]
fn packets_wrapping() {
create_logger();
let mut handler = AudioHandler::<ClientId>::new();
let id = ClientId(0);
let mut buf = vec![0.0; 48_000 / 100 * 2];
for i in 0..100 {
let packet = OutAudio::new(&AudioData::S2C {
id: 65_500u16.wrapping_add(i),
codec: CodecType::OpusMusic,
from: 0,
data: &[0, 0, 0, 0, 0, 0, 0],
});
let input = InAudioBuf::try_new(Direction::S2C, packet.into_vec()).unwrap();
handler.handle_packet(id, input).unwrap();
if i > 5 {
handler.fill_buffer(&mut buf);
}
}
}
#[quickcheck_macros::quickcheck]
fn short_packet_quickcheck(data: Vec<Vec<u8>>) {
create_logger();
let mut handler = AudioHandler::<ClientId>::new();
let id = ClientId(0);
let mut buf = vec![0.0; 48_000 / 100 * 2];
for p in data {
// Sometimes, TS sends short, non-opus packets
let packet = OutAudio::new(&AudioData::S2C {
id: 30,
codec: CodecType::OpusMusic,
from: 0,
data: &p,
});
let input = InAudioBuf::try_new(Direction::S2C, packet.into_vec()).unwrap();
let _ = handler.handle_packet(id, input);
handler.fill_buffer(&mut buf);
handler.fill_buffer(&mut buf);
}
}
#[test]
fn packets_wrapping2() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
for i in 0..100 {
let i = 65_500u16.wrapping_add(i);
a.push(SimulateAction::ReceivePacket(i, true));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, Some(i)));
}
for _ in 0..4 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
#[test]
fn silence() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
for _ in 0..100 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
simulate(a)
}
#[test]
fn reversed() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
for i in 0..5 {
a.push(SimulateAction::ReceivePacket(i, true));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, Some(i)));
}
a.push(SimulateAction::ReceivePacket(4, false));
a.push(SimulateAction::ReceivePacket(3, false));
for _ in 0..4 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
#[test]
fn duplicate() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
a.push(SimulateAction::ReceivePacket(0, true));
a.push(SimulateAction::ReceivePacket(0, false));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, Some(0)));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
simulate(a)
}
#[test]
fn big_whole() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
for i in 27120..27124 {
a.push(SimulateAction::ReceivePacket(i, true));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, Some(i)));
}
a.push(SimulateAction::ReceiveRaw(27124, vec![2]));
for _ in 0..10 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
for i in 27339..27349 {
a.push(SimulateAction::ReceivePacket(i, true));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
for _ in 0..4 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
#[test]
fn end_packet() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
for i in 0..10 {
a.push(SimulateAction::ReceivePacket(i, true));
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, Some(i)));
}
a.push(SimulateAction::ReceiveRaw(10, vec![]));
for _ in 0..4 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
#[test]
fn packet_loss() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
a.push(SimulateAction::ReceivePacket(50, true));
a.push(SimulateAction::ReceivePacket(53, true));
for _ in 0..8 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
#[test]
fn packet_wrapping_loss() -> Result<()> {
let mut a = vec![SimulateAction::CreateEncoder];
a.push(SimulateAction::ReceivePacket(65534, true));
a.push(SimulateAction::ReceivePacket(0, true));
for _ in 0..7 {
a.push(SimulateAction::FillBuffer(USUAL_FRAME_SIZE, None));
}
a.push(SimulateAction::Check(Box::new(|h| assert!(h.queues.is_empty()))));
simulate(a)
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/simple-sync.rs | tsclientlib/examples/simple-sync.rs | use anyhow::Result;
use clap::Parser;
use futures::prelude::*;
use tokio::time::{self, Duration};
use tsclientlib::sync::SyncConnection;
use tsclientlib::{Connection, DisconnectOptions, Identity};
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_config = Connection::build(args.address)
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
// Connect
let con: SyncConnection = con_config.connect()?.into();
let mut handle = con.get_handle();
// Do event handling in another thread
tokio::spawn(con.for_each(|_| future::ready(())));
handle.wait_until_connected().await?;
let welcome_message = handle
.with_connection(|con| Result::<_>::Ok(con.get_state()?.server.welcome_message.clone()))
.await??;
println!("Server welcome message: {}", sanitize(&welcome_message));
// Wait some time
time::sleep(Duration::from_secs(1)).await;
// Disconnect
handle.disconnect(DisconnectOptions::new()).await?;
Ok(())
}
/// Only retain a certain set of characters.
fn sanitize(s: &str) -> String {
s.chars()
.filter(|c| {
c.is_alphanumeric()
|| [' ', '\t', '.', ':', '-', '_', '"', '\'', '/', '(', ')', '[', ']', '{', '}']
.contains(c)
})
.collect()
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/sync.rs | tsclientlib/examples/sync.rs | use anyhow::Result;
use clap::Parser;
use futures::prelude::*;
use tokio::time::{self, Duration};
use tsclientlib::prelude::*;
use tsclientlib::sync::SyncConnection;
use tsclientlib::{Connection, DisconnectOptions, Identity, StreamItem};
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_config = Connection::build(args.address)
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
// Connect
let mut con = con_config.connect()?;
let r = con
.events()
.try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
.next()
.await;
if let Some(r) = r {
r?;
}
let sync_con: SyncConnection = con.into();
let mut con = sync_con.get_handle();
tokio::spawn(sync_con.for_each(|_| future::ready(())));
con.with_connection(move |mut con| {
let state = con.get_state()?;
state.server.send_textmessage("Hello there").send(&mut con)?;
Result::<_>::Ok(())
})
.await??;
// Wait some time
time::sleep(Duration::from_secs(1)).await;
// Disconnect
con.disconnect(DisconnectOptions::new()).await?;
Ok(())
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/audio-latency.rs | tsclientlib/examples/audio-latency.rs | use anyhow::Result;
use clap::Parser;
use tokio::sync::mpsc;
use tokio::task::LocalSet;
use tracing::{debug, info};
use tsclientlib::ClientId;
use tsproto_packets::packets::{Direction, InAudioBuf};
mod audio_utils;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
struct ConnectionId(u64);
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The volume for the capturing
#[arg(default_value_t = 1.0)]
volume: f32,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_id = ConnectionId(0);
let local_set = LocalSet::new();
let audiodata = audio_utils::start(&local_set)?;
let (send, mut recv) = mpsc::channel(1);
{
let mut a2t = audiodata.a2ts.lock().unwrap();
a2t.set_listener(send);
a2t.set_volume(args.volume);
a2t.set_playing(true);
}
let t2a = audiodata.ts2a.clone();
loop {
// Wait for ctrl + c
tokio::select! {
send_audio = recv.recv() => {
if let Some(packet) = send_audio {
let from = ClientId(0);
let mut t2a = t2a.lock().unwrap();
let in_audio = InAudioBuf::try_new(Direction::C2S, packet.into_vec()).unwrap();
if let Err(error) = t2a.play_packet((con_id, from), in_audio) {
debug!(%error, "Failed to play packet");
}
} else {
info!("Audio sending stream was canceled");
break;
}
}
_ = tokio::signal::ctrl_c() => { break; }
};
}
Ok(())
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/simple.rs | tsclientlib/examples/simple.rs | use anyhow::{bail, Result};
use clap::Parser;
use futures::prelude::*;
use tokio::time::{self, Duration};
use tsclientlib::{Connection, DisconnectOptions, Identity, StreamItem};
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_config = Connection::build(args.address)
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
// Connect
let mut con = con_config.connect()?;
let r = con
.events()
.try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
.next()
.await;
if let Some(r) = r {
r?;
}
println!("Server welcome message: {}", sanitize(&con.get_state()?.server.welcome_message));
// Wait some time
let mut events = con.events().try_filter(|_| future::ready(false));
tokio::select! {
_ = time::sleep(Duration::from_secs(1)) => {}
_ = events.next() => {
bail!("Disconnected");
}
};
drop(events);
// Disconnect
con.disconnect(DisconnectOptions::new())?;
con.events().for_each(|_| future::ready(())).await;
Ok(())
}
/// Only retain a certain set of characters.
fn sanitize(s: &str) -> String {
s.chars()
.filter(|c| {
c.is_alphanumeric()
|| [' ', '\t', '.', ':', '-', '_', '"', '\'', '/', '(', ')', '[', ']', '{', '}']
.contains(c)
})
.collect()
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/channeltree.rs | tsclientlib/examples/channeltree.rs | use anyhow::{bail, Result};
use clap::Parser;
use futures::prelude::*;
use tokio::time::{self, Duration};
use tsclientlib::data::{self, Channel, Client};
use tsclientlib::prelude::*;
use tsclientlib::{ChannelId, Connection, DisconnectOptions, Identity, StreamItem};
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
}
/// `channels` have to be ordered.
fn print_channels(clients: &[&Client], channels: &[&Channel], parent: ChannelId, depth: usize) {
let indention = " ".repeat(depth);
for channel in channels {
if channel.parent == parent {
println!("{}- {}", indention, channel.name);
// Print all clients in this channel
for client in clients {
if client.channel == channel.id {
println!("{} {}", indention, client.name);
}
}
print_channels(clients, channels, channel.id, depth + 1);
}
}
}
fn print_channel_tree(con: &data::Connection) {
let mut channels: Vec<_> = con.channels.values().collect();
let mut clients: Vec<_> = con.clients.values().collect();
// This is not the real sorting order, the order is the ChannelId of the
// channel on top of this one, but we don't care for this example.
channels.sort_by_key(|ch| ch.order.0);
clients.sort_by_key(|c| c.talk_power);
println!("{}", con.server.name);
print_channels(&clients, &channels, ChannelId(0), 0);
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_config = Connection::build(args.address)
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
// Connect
let mut con = con_config.connect()?;
let r = con
.events()
.try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
.next()
.await;
if let Some(r) = r {
r?;
}
con.get_state().unwrap().server.set_subscribed(true).send(&mut con)?;
// Wait some time
let mut events = con.events().try_filter(|_| future::ready(false));
tokio::select! {
_ = time::sleep(Duration::from_secs(1)) => {}
_ = events.next() => {
bail!("Disconnected");
}
};
drop(events);
// Print channel tree
print_channel_tree(con.get_state().unwrap());
// Change name
{
let state = con.get_state().unwrap();
let name = state.clients[&state.own_client].name.clone();
state
.client_update()
.set_input_muted(true)
.set_name(&format!("{}1", name))
.send(&mut con)?;
}
// Wait some time
let mut events = con.events().try_filter(|_| future::ready(false));
tokio::select! {
_ = time::sleep(Duration::from_secs(3)) => {}
_ = events.next() => {
bail!("Disconnected");
}
};
drop(events);
// Disconnect
con.disconnect(DisconnectOptions::new())?;
con.events().for_each(|_| future::ready(())).await;
Ok(())
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/many.rs | tsclientlib/examples/many.rs | use anyhow::Result;
use clap::Parser;
use futures::prelude::*;
use tokio::time::{self, Duration};
use tracing::error;
use tsclientlib::{Connection, DisconnectOptions, Identity, StreamItem};
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
/// How many connections
#[arg()]
count: usize,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_config = Connection::build(args.address.as_str())
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
stream::iter(0..args.count)
.for_each_concurrent(None, |_| {
let con_config = con_config.clone();
tokio::spawn(async move {
// Connect
let mut con = con_config.connect().unwrap();
let r = con
.events()
.try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
.next()
.await;
if let Some(Err(error)) = r {
error!(%error, "Connection failed");
return;
}
// Wait some time
let mut events = con.events().try_filter(|_| future::ready(false));
tokio::select! {
_ = time::sleep(Duration::from_secs(15)) => {}
_ = events.next() => {
error!("Disconnected unexpectedly");
return;
}
};
drop(events);
// Disconnect
let _ = con.disconnect(DisconnectOptions::new());
con.events().for_each(|_| future::ready(())).await;
})
.map(|_| ())
})
.await;
Ok(())
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/audio.rs | tsclientlib/examples/audio.rs | //! This example connects to a teamspeak instance and joins a channel.
//! The connected bot-identity will listen on your microphone and send everything to the current
//! channel. In parallel, the program will play back all other clients in the channel via its own
//! audio output.
//! Microphone -> Channel
//! Channel -> PC Speakers
use anyhow::{bail, Result};
use clap::Parser;
use futures::prelude::*;
use tokio::sync::mpsc;
use tokio::task::LocalSet;
use tracing::{debug, info};
use tsclientlib::{ClientId, Connection, DisconnectOptions, Identity, StreamItem};
use tsproto_packets::packets::AudioData;
mod audio_utils;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
struct ConnectionId(u64);
#[derive(Parser, Debug)]
#[command(author, about)]
struct Args {
/// The address of the server to connect to
#[arg(short, long, default_value = "localhost")]
address: String,
/// The volume for the capturing
#[arg(default_value_t = 1.0)]
volume: f32,
/// Print the content of all packets
///
/// 0. Print nothing
/// 1. Print command string
/// 2. Print packets
/// 3. Print udp packets
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
}
#[tokio::main]
async fn main() -> Result<()> { real_main().await }
async fn real_main() -> Result<()> {
tracing_subscriber::fmt::init();
// Parse command line options
let args = Args::parse();
let con_id = ConnectionId(0);
let local_set = LocalSet::new();
let audiodata = audio_utils::start(&local_set)?;
let con_config = Connection::build(args.address)
.log_commands(args.verbose >= 1)
.log_packets(args.verbose >= 2)
.log_udp_packets(args.verbose >= 3);
// Optionally set the key of this client, otherwise a new key is generated.
let id = Identity::new_from_str(
"MG0DAgeAAgEgAiAIXJBlj1hQbaH0Eq0DuLlCmH8bl+veTAO2+\
k9EQjEYSgIgNnImcmKo7ls5mExb6skfK2Tw+u54aeDr0OP1ITs\
C/50CIA8M5nmDBnmDM/gZ//4AAAAAAAAAAAAAAAAAAAAZRzOI").unwrap();
let con_config = con_config.identity(id);
// Connect
let mut con = con_config.connect()?;
let r = con
.events()
.try_filter(|e| future::ready(matches!(e, StreamItem::BookEvents(_))))
.next()
.await;
if let Some(r) = r {
r?;
}
let (send, mut recv) = mpsc::channel(5);
{
let mut a2t = audiodata.a2ts.lock().unwrap();
a2t.set_listener(send);
a2t.set_volume(args.volume);
a2t.set_playing(true);
}
loop {
let t2a = audiodata.ts2a.clone();
let events = con.events().try_for_each(|e| async {
if let StreamItem::Audio(packet) = e {
let from = ClientId(match packet.data().data() {
AudioData::S2C { from, .. } => *from,
AudioData::S2CWhisper { from, .. } => *from,
_ => panic!("Can only handle S2C packets but got a C2S packet"),
});
let mut t2a = t2a.lock().unwrap();
if let Err(error) = t2a.play_packet((con_id, from), packet) {
debug!(%error, "Failed to play packet");
}
}
Ok(())
});
// Wait for ctrl + c
tokio::select! {
send_audio = recv.recv() => {
if let Some(packet) = send_audio {
con.send_audio(packet)?;
} else {
info!("Audio sending stream was canceled");
break;
}
}
_ = tokio::signal::ctrl_c() => { break; }
r = events => {
r?;
bail!("Disconnected");
}
};
}
// Disconnect
con.disconnect(DisconnectOptions::new())?;
con.events().for_each(|_| future::ready(())).await;
Ok(())
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/audio_utils/mod.rs | tsclientlib/examples/audio_utils/mod.rs | use std::sync::{Arc, Mutex};
use anyhow::Result;
use tokio::task::LocalSet;
use audio_to_ts::AudioToTs;
use ts_to_audio::TsToAudio;
pub mod audio_to_ts;
pub mod ts_to_audio;
/// The usual frame size.
///
/// Use 48 kHz, 20 ms frames (50 per second) and mono data (1 channel).
/// This means 1920 samples and 7.5 kiB.
const USUAL_FRAME_SIZE: usize = 48000 / 50;
/// The maximum size of an opus frame is 1275 as from RFC6716.
const MAX_OPUS_FRAME_SIZE: usize = 1275;
#[derive(Clone)]
pub struct AudioData {
pub a2ts: Arc<Mutex<AudioToTs>>,
pub ts2a: Arc<Mutex<TsToAudio>>,
}
pub(crate) fn start(local_set: &LocalSet) -> Result<AudioData> {
let sdl_context = sdl2::init().unwrap();
let audio_subsystem = sdl_context.audio().unwrap();
// SDL automatically disables the screensaver, enable it again
if let Ok(video_subsystem) = sdl_context.video() {
video_subsystem.enable_screen_saver();
}
let ts2a = TsToAudio::new(audio_subsystem.clone(), local_set)?;
let a2ts = AudioToTs::new(audio_subsystem, local_set)?;
Ok(AudioData { a2ts, ts2a })
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/audio_utils/audio_to_ts.rs | tsclientlib/examples/audio_utils/audio_to_ts.rs | use std::sync::{Arc, Mutex};
use anyhow::{format_err, Result};
use audiopus::coder::Encoder;
use futures::prelude::*;
use sdl2::audio::{AudioCallback, AudioDevice, AudioSpec, AudioSpecDesired, AudioStatus};
use sdl2::AudioSubsystem;
use tokio::sync::mpsc;
use tokio::task::LocalSet;
use tokio::time::{self, Duration};
use tokio_stream::wrappers::IntervalStream;
use tracing::{debug, error, instrument};
use tsproto_packets::packets::{AudioData, CodecType, OutAudio, OutPacket};
use super::*;
pub struct AudioToTs {
audio_subsystem: AudioSubsystem,
listener: Arc<Mutex<Option<mpsc::Sender<OutPacket>>>>,
device: AudioDevice<SdlCallback>,
is_playing: bool,
volume: Arc<Mutex<f32>>,
}
struct SdlCallback {
spec: AudioSpec,
encoder: Encoder,
listener: Arc<Mutex<Option<mpsc::Sender<OutPacket>>>>,
volume: Arc<Mutex<f32>>,
opus_output: [u8; MAX_OPUS_FRAME_SIZE],
}
impl AudioToTs {
pub fn new(audio_subsystem: AudioSubsystem, local_set: &LocalSet) -> Result<Arc<Mutex<Self>>> {
let listener = Arc::new(Mutex::new(Default::default()));
let volume = Arc::new(Mutex::new(1.0));
let device = Self::open_capture(&audio_subsystem, listener.clone(), volume.clone())?;
let res = Arc::new(Mutex::new(Self {
audio_subsystem,
listener,
device,
is_playing: false,
volume,
}));
Self::start(res.clone(), local_set);
Ok(res)
}
#[instrument(skip(audio_subsystem, listener, volume))]
fn open_capture(
audio_subsystem: &AudioSubsystem, listener: Arc<Mutex<Option<mpsc::Sender<OutPacket>>>>,
volume: Arc<Mutex<f32>>,
) -> Result<AudioDevice<SdlCallback>> {
let desired_spec = AudioSpecDesired {
freq: Some(48000),
channels: Some(1),
// Default sample size, 20 ms per packet
samples: Some(48000 / 50),
};
audio_subsystem
.open_capture(None, &desired_spec, |spec| {
// This spec will always be the desired spec, the sdl wrapper passes
// zero as `allowed_changes`.
debug!(?spec, driver = audio_subsystem.current_audio_driver(), "Got capture spec");
let opus_channels = if spec.channels == 1 {
audiopus::Channels::Mono
} else {
audiopus::Channels::Stereo
};
let encoder = Encoder::new(
audiopus::SampleRate::Hz48000,
opus_channels,
audiopus::Application::Voip,
)
.expect("Could not create encoder");
SdlCallback {
spec,
encoder,
listener,
volume,
opus_output: [0; MAX_OPUS_FRAME_SIZE],
}
})
.map_err(|e| format_err!("SDL error: {}", e))
}
pub fn set_listener(&self, sender: mpsc::Sender<OutPacket>) {
let mut listener = self.listener.lock().unwrap();
*listener = Some(sender);
}
pub fn set_volume(&mut self, volume: f32) { *self.volume.lock().unwrap() = volume; }
pub fn set_playing(&mut self, playing: bool) {
if playing {
self.device.resume();
} else {
self.device.pause();
}
self.is_playing = playing;
}
#[instrument(skip(a2t, local_set))]
fn start(a2t: Arc<Mutex<Self>>, local_set: &LocalSet) {
local_set.spawn_local(
IntervalStream::new(time::interval(Duration::from_secs(1))).for_each(move |_| {
let mut a2t = a2t.lock().unwrap();
if a2t.device.status() == AudioStatus::Stopped {
// Try to reconnect to audio
match Self::open_capture(
&a2t.audio_subsystem,
a2t.listener.clone(),
a2t.volume.clone(),
) {
Ok(d) => {
a2t.device = d;
debug!("Reconnected to capture device");
if a2t.is_playing {
a2t.device.resume();
}
}
Err(error) => {
error!(%error, "Failed to open capture device");
}
};
}
future::ready(())
}),
);
}
}
impl AudioCallback for SdlCallback {
type Channel = f32;
#[instrument(skip(self, buffer))]
fn callback(&mut self, buffer: &mut [Self::Channel]) {
// Handle volume
let volume = *self.volume.lock().unwrap();
if volume != 1.0 {
for d in &mut *buffer {
*d *= volume;
}
}
match self.encoder.encode_float(buffer, &mut self.opus_output[..]) {
Err(error) => {
error!(%error, "Failed to encode opus");
}
Ok(len) => {
// Create packet
let codec = if self.spec.channels == 1 {
CodecType::OpusVoice
} else {
CodecType::OpusMusic
};
let packet =
OutAudio::new(&AudioData::C2S { id: 0, codec, data: &self.opus_output[..len] });
// Write into packet sink
let mut listener = self.listener.lock().unwrap();
if let Some(lis) = &mut *listener {
match lis.try_send(packet) {
Err(mpsc::error::TrySendError::Closed(_)) => *listener = None,
_ => {}
}
}
}
}
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
ReSpeak/tsclientlib | https://github.com/ReSpeak/tsclientlib/blob/3fbfa26ead0d3e5b38288b55abe3e2d636a97115/tsclientlib/examples/audio_utils/ts_to_audio.rs | tsclientlib/examples/audio_utils/ts_to_audio.rs | use std::sync::{Arc, Mutex};
use anyhow::{format_err, Result};
use futures::prelude::*;
use sdl2::audio::{AudioCallback, AudioDevice, AudioSpecDesired, AudioStatus};
use sdl2::AudioSubsystem;
use tokio::task::LocalSet;
use tokio::time::{self, Duration};
use tokio_stream::wrappers::IntervalStream;
use tracing::{debug, error, instrument};
use tsclientlib::ClientId;
use tsproto_packets::packets::InAudioBuf;
use super::*;
use crate::ConnectionId;
type Id = (ConnectionId, ClientId);
type AudioHandler = tsclientlib::audio::AudioHandler<Id>;
pub struct TsToAudio {
audio_subsystem: AudioSubsystem,
device: AudioDevice<SdlCallback>,
data: Arc<Mutex<AudioHandler>>,
}
struct SdlCallback {
data: Arc<Mutex<AudioHandler>>,
}
impl TsToAudio {
pub fn new(audio_subsystem: AudioSubsystem, local_set: &LocalSet) -> Result<Arc<Mutex<Self>>> {
let data = Arc::new(Mutex::new(AudioHandler::new()));
let device = Self::open_playback(&audio_subsystem, data.clone())?;
let res = Arc::new(Mutex::new(Self { audio_subsystem, device, data }));
Self::start(res.clone(), local_set);
Ok(res)
}
#[instrument(skip(audio_subsystem, data))]
fn open_playback(
audio_subsystem: &AudioSubsystem, data: Arc<Mutex<AudioHandler>>,
) -> Result<AudioDevice<SdlCallback>> {
let desired_spec = AudioSpecDesired {
freq: Some(48000),
channels: Some(2),
samples: Some(USUAL_FRAME_SIZE as u16),
};
audio_subsystem
.open_playback(None, &desired_spec, move |spec| {
// This spec will always be the desired spec, the sdl wrapper passes
// zero as `allowed_changes`.
debug!(?spec, driver = audio_subsystem.current_audio_driver(), "Got playback spec");
SdlCallback { data }
})
.map_err(|e| format_err!("SDL error: {}", e))
}
#[instrument(skip(t2a, local_set))]
fn start(t2a: Arc<Mutex<Self>>, local_set: &LocalSet) {
local_set.spawn_local(
IntervalStream::new(time::interval(Duration::from_secs(1))).for_each(move |_| {
let mut t2a = t2a.lock().unwrap();
if t2a.device.status() == AudioStatus::Stopped {
// Try to reconnect to audio
match Self::open_playback(&t2a.audio_subsystem, t2a.data.clone()) {
Ok(d) => {
t2a.device = d;
debug!("Reconnected to playback device");
}
Err(error) => {
error!(%error, "Failed to open playback device");
}
};
}
let data_empty = t2a.data.lock().unwrap().get_queues().is_empty();
if t2a.device.status() == AudioStatus::Paused && !data_empty {
debug!("Resuming playback");
t2a.device.resume();
} else if t2a.device.status() == AudioStatus::Playing && data_empty {
debug!("Pausing playback");
t2a.device.pause();
}
future::ready(())
}),
);
}
#[instrument(skip(self, id, packet))]
pub(crate) fn play_packet(&mut self, id: Id, packet: InAudioBuf) -> Result<()> {
let mut data = self.data.lock().unwrap();
data.handle_packet(id, packet)?;
if self.device.status() == AudioStatus::Paused {
debug!("Resuming playback");
self.device.resume();
}
Ok(())
}
}
impl AudioCallback for SdlCallback {
type Channel = f32;
fn callback(&mut self, buffer: &mut [Self::Channel]) {
// Clear buffer
for d in &mut *buffer {
*d = 0.0;
}
let mut data = self.data.lock().unwrap();
data.fill_buffer(buffer);
}
}
| rust | Apache-2.0 | 3fbfa26ead0d3e5b38288b55abe3e2d636a97115 | 2026-01-04T20:19:54.636515Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/lib.rs | backend/src/lib.rs | mod connection;
mod metadata;
mod plugin;
mod task;
mod util;
pub use plugin::ConsolePlugin;
#[track_caller]
fn spawn_named<T>(
_name: &str,
task: impl std::future::Future<Output = T> + Send + 'static,
) -> tokio::task::JoinHandle<T>
where
T: Send + 'static,
{
tokio::task::Builder::new().name(_name).spawn(task)
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/connection.rs | backend/src/connection.rs | use console_api::instrument::{
instrument_client::InstrumentClient, InstrumentRequest, PauseRequest, ResumeRequest,
TaskDetailsRequest, Update,
};
use console_api::tasks::TaskDetails;
use futures::stream::StreamExt;
use std::{error::Error, pin::Pin, time::Duration};
use tonic::{transport::Channel, transport::Uri, Streaming};
#[derive(Debug)]
pub struct Connection {
target: Uri,
state: State,
}
#[derive(Debug)]
pub(crate) enum State {
Connected {
client: InstrumentClient<Channel>,
stream: Box<Streaming<Update>>,
},
Disconnected(Duration),
}
macro_rules! with_client {
($me:ident, $client:ident, $block:expr) => ({
loop {
match $me.state {
State::Connected { client: ref mut $client, .. } => {
match $block {
Ok(resp) => break Ok(resp),
// If the error is a `h2::Error`, that indicates
// something went wrong at the connection level, rather
// than the server returning an error code. In that
// case, let's try reconnecting...
Err(error) if error.source().iter().any(|src| src.is::<h2::Error>()) => {
tracing::warn!(
error = %error,
"connection error sending command"
);
$me.state = State::Disconnected(Self::BACKOFF);
}
// Otherwise, return the error.
Err(e) => {
break Err(e);
}
}
}
State::Disconnected(_) => $me.connect().await,
}
}
})
}
impl Connection {
const BACKOFF: Duration = Duration::from_millis(500);
pub fn new(target: Uri) -> Self {
Self {
target,
state: State::Disconnected(Duration::from_secs(0)),
}
}
pub fn target(&self) -> &Uri {
&self.target
}
pub(crate) async fn try_connect(target: Uri) -> Result<State, Box<dyn Error + Send + Sync>> {
let mut client = InstrumentClient::connect(target.clone()).await?;
let request = tonic::Request::new(InstrumentRequest {});
let stream = Box::new(client.watch_updates(request).await?.into_inner());
Ok(State::Connected { client, stream })
}
async fn connect(&mut self) {
const MAX_BACKOFF: Duration = Duration::from_secs(5);
while let State::Disconnected(backoff) = self.state {
if backoff == Duration::from_secs(0) {
tracing::debug!(to = %self.target, "connecting");
} else {
tracing::debug!(reconnect_in = ?backoff, "reconnecting");
tokio::time::sleep(backoff).await;
}
self.state = match Self::try_connect(self.target.clone()).await {
Ok(connected) => {
tracing::debug!("connected successfully!");
connected
}
Err(error) => {
tracing::warn!(%error, "error connecting");
let backoff = std::cmp::max(backoff + Self::BACKOFF, MAX_BACKOFF);
State::Disconnected(backoff)
}
};
}
}
pub async fn next_update(&mut self) -> Update {
loop {
match self.state {
State::Connected { ref mut stream, .. } => match Pin::new(stream).next().await {
Some(Ok(update)) => return update,
Some(Err(status)) => {
tracing::warn!(%status, "error from stream");
self.state = State::Disconnected(Self::BACKOFF);
}
None => {
tracing::error!("stream closed by server");
self.state = State::Disconnected(Self::BACKOFF);
}
},
State::Disconnected(_) => self.connect().await,
}
}
}
#[tracing::instrument(skip(self))]
pub async fn watch_details(
&mut self,
task_id: u64,
) -> Result<Streaming<TaskDetails>, tonic::Status> {
with_client!(self, client, {
let request = tonic::Request::new(TaskDetailsRequest {
id: Some(task_id.into()),
});
client.watch_task_details(request).await
})
.map(tonic::Response::into_inner)
}
#[tracing::instrument(skip(self))]
pub async fn pause(&mut self) {
let res = with_client!(self, client, {
let request = tonic::Request::new(PauseRequest {});
client.pause(request).await
});
if let Err(e) = res {
tracing::error!(error = %e, "rpc error sending pause command");
}
}
#[tracing::instrument(skip(self))]
pub async fn resume(&mut self) {
let res = with_client!(self, client, {
let request = tonic::Request::new(ResumeRequest {});
client.resume(request).await
});
if let Err(e) = res {
tracing::error!(error = %e, "rpc error sending resume command");
}
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/util.rs | backend/src/util.rs | pub trait Percentage {
// Using an extension trait for this is maybe a bit excessive, but making it
// a method has the nice advantage of making it *really* obvious which is
// the total and which is the amount.
fn percent_of(self, total: Self) -> Self;
}
impl Percentage for usize {
fn percent_of(self, total: Self) -> Self {
percentage(total as f64, self as f64) as Self
}
}
impl Percentage for u64 {
fn percent_of(self, total: Self) -> Self {
percentage(total as f64, self as f64) as Self
}
}
impl Percentage for f64 {
fn percent_of(self, total: Self) -> Self {
percentage(total, self)
}
}
pub fn percentage(total: f64, amount: f64) -> f64 {
debug_assert!(
total >= amount,
"assertion failed: total >= amount; total={total}, amount={amount}",
);
(amount / total) * 100.0
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/task.rs | backend/src/task.rs | use std::{
collections::HashMap,
fmt,
time::{Duration, SystemTime},
};
use console_api::{tasks::Stats, Location};
use hdrhistogram::Histogram;
use serde::{Deserialize, Serialize};
use crate::{
metadata::{MetaId, Metadata},
util::Percentage,
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Deserialize, Serialize)]
pub struct TaskId(pub u64);
impl fmt::Display for TaskId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Debug)]
pub struct Task {
pub id: TaskId,
pub fields: Vec<Field>,
pub stats: TaskStats,
pub target: String,
pub name: Option<String>,
/// Currently active warnings for this task.
// pub warnings: Vec<Linter<Task>>,
pub location: String,
pub histogram: Option<Histogram<u64>>,
}
impl Task {
pub fn from_proto(
metas: &HashMap<MetaId, Metadata>,
stats_update: &mut HashMap<u64, Stats>,
mut task: console_api::tasks::Task,
) -> Option<Self> {
let id = match task.id {
Some(id) => TaskId(id.id),
None => {
tracing::warn!(?task, "skipping task with no id");
return None;
}
};
let meta_id = match task.metadata.as_ref() {
Some(id) => id.id,
None => {
tracing::warn!(?task, "task has no metadata ID, skipping");
return None;
}
};
let meta = match metas.get(&MetaId(meta_id)) {
Some(meta) => meta,
None => {
tracing::warn!(?task, meta_id, "no metadata for task, skipping");
return None;
}
};
let stats = match stats_update.remove(&id.0) {
Some(s) => s.into(),
None => {
tracing::warn!(?task, meta_id, "no stats for task, skipping");
return None;
}
};
let location = format_location(task.location);
let mut name = None;
let fields = task
.fields
.drain(..)
.filter_map(|pb| {
let field = Field::from_proto(pb, meta)?;
// the `task.name` field gets its own column, if it's present.
if &*field.name == Field::NAME {
name = Some(field.value.to_string());
return None;
}
Some(field)
})
.collect::<Vec<_>>();
let task = Self {
name,
id,
fields,
stats,
target: meta.target.clone(),
location,
histogram: None,
};
Some(task)
}
/// Returns `true` if this task is currently being polled.
pub(crate) fn is_running(&self) -> bool {
self.stats.last_poll_started > self.stats.last_poll_ended
}
pub(crate) fn is_completed(&self) -> bool {
self.stats.total.is_some()
}
pub(crate) fn state(&self) -> TaskState {
if self.is_completed() {
return TaskState::Completed;
}
if self.is_running() {
return TaskState::Running;
}
TaskState::Idle
}
pub(crate) fn total(&self, since: SystemTime) -> Duration {
self.stats
.total
.ok_or(|| since.duration_since(self.stats.created_at).ok())
.unwrap_or_default()
}
pub(crate) fn busy(&self, since: SystemTime) -> Duration {
if let (Some(last_poll_started), None) =
(self.stats.last_poll_started, self.stats.last_poll_ended)
{
// in this case the task is being polled at the moment
let current_time_in_poll = since.duration_since(last_poll_started).unwrap_or_default();
return self.stats.busy + current_time_in_poll;
}
self.stats.busy
}
pub(crate) fn idle(&self, since: SystemTime) -> Duration {
self.stats
.idle
.or_else(|| self.total(since).checked_sub(self.busy(since)))
.unwrap_or_default()
}
/// Returns the total number of times the task has been polled.
pub(crate) fn total_polls(&self) -> u64 {
self.stats.polls
}
/// Returns the elapsed time since the task was last woken, relative to
/// given `now` timestamp.
///
/// Returns `None` if the task has never been woken, or if it was last woken
/// more recently than `now` (which *shouldn't* happen as long as `now` is the
/// timestamp of the last stats update...)
pub(crate) fn since_wake(&self, now: SystemTime) -> Option<Duration> {
now.duration_since(self.last_wake()?).ok()
}
pub(crate) fn last_wake(&self) -> Option<SystemTime> {
self.stats.last_wake
}
/// Returns the current number of wakers for this task.
pub(crate) fn waker_count(&self) -> u64 {
self.waker_clones().saturating_sub(self.waker_drops())
}
/// Returns the total number of times this task's waker has been cloned.
pub(crate) fn waker_clones(&self) -> u64 {
self.stats.waker_clones
}
/// Returns the total number of times this task's waker has been dropped.
pub(crate) fn waker_drops(&self) -> u64 {
self.stats.waker_drops
}
/// Returns the total number of times this task has been woken.
pub(crate) fn wakes(&self) -> u64 {
self.stats.wakes
}
/// Returns the total number of times this task has woken itself.
pub(crate) fn self_wakes(&self) -> u64 {
self.stats.self_wakes
}
/// Returns the percentage of this task's total wakeups that were self-wakes.
pub(crate) fn self_wake_percent(&self) -> u64 {
self.self_wakes().percent_of(self.wakes())
}
/// From the histogram, build a visual representation by trying to make as
// many buckets as the width of the render area.
pub(crate) fn make_chart_data(&self, width: u16) -> (Vec<u64>, HistogramMetadata) {
self.histogram
.as_ref()
.map(|histogram| {
let step_size =
((histogram.max() - histogram.min()) as f64 / width as f64).ceil() as u64 + 1;
// `iter_linear` panics if step_size is 0
let data = if step_size > 0 {
let mut found_first_nonzero = false;
let data: Vec<u64> = histogram
.iter_linear(step_size)
.filter_map(|value| {
let count = value.count_since_last_iteration();
// Remove the 0s from the leading side of the buckets.
// Because HdrHistogram can return empty buckets depending
// on its internal state, as it approximates values.
if count == 0 && !found_first_nonzero {
None
} else {
found_first_nonzero = true;
Some(count)
}
})
.collect();
data
} else {
Vec::new()
};
(
data,
HistogramMetadata {
max_value: histogram.max(),
min_value: histogram.min(),
},
)
})
.unwrap_or_default()
}
}
fn truncate_registry_path(s: String) -> String {
use once_cell::sync::OnceCell;
use regex::Regex;
use std::borrow::Cow;
static REGEX: OnceCell<Regex> = OnceCell::new();
let regex = REGEX.get_or_init(|| {
Regex::new(r#".*/\.cargo(/registry/src/[^/]*/|/git/checkouts/)"#)
.expect("failed to compile regex")
});
return match regex.replace(&s, "<cargo>/") {
Cow::Owned(s) => s,
// String was not modified, return the original.
Cow::Borrowed(_) => s.to_string(),
};
}
fn format_location(loc: Option<Location>) -> String {
loc.map(|mut l| {
if let Some(file) = l.file.take() {
let truncated = truncate_registry_path(file);
l.file = Some(truncated);
}
format!("{l} ")
})
.unwrap_or_else(|| "<unknown location>".to_string())
}
#[derive(Debug)]
pub struct TaskStats {
polls: u64,
pub created_at: SystemTime,
pub dropped_at: Option<SystemTime>,
busy: Duration,
pub last_poll_started: Option<SystemTime>,
pub last_poll_ended: Option<SystemTime>,
idle: Option<Duration>,
total: Option<Duration>,
// === waker stats ===
/// Total number of times the task has been woken over its lifetime.
wakes: u64,
/// Total number of times the task's waker has been cloned
waker_clones: u64,
/// Total number of times the task's waker has been dropped.
waker_drops: u64,
/// The timestamp of when the task was last woken.
last_wake: Option<SystemTime>,
/// Total number of times the task has woken itself.
self_wakes: u64,
}
impl From<console_api::tasks::Stats> for TaskStats {
fn from(pb: console_api::tasks::Stats) -> Self {
fn pb_duration(dur: prost_types::Duration) -> Duration {
let secs =
u64::try_from(dur.seconds).expect("a task should not have a negative duration!");
let nanos =
u64::try_from(dur.nanos).expect("a task should not have a negative duration!");
Duration::from_secs(secs) + Duration::from_nanos(nanos)
}
let created_at = pb
.created_at
.expect("task span was never created")
.try_into()
.unwrap();
let dropped_at: Option<SystemTime> = pb.dropped_at.map(|v| v.try_into().unwrap());
let total = dropped_at.map(|d| d.duration_since(created_at).unwrap());
let poll_stats = pb.poll_stats.expect("task should have poll stats");
let busy = poll_stats.busy_time.map(pb_duration).unwrap_or_default();
let idle = total.map(|total| total.checked_sub(busy).unwrap_or_default());
Self {
total,
idle,
busy,
last_poll_started: poll_stats.last_poll_started.map(|v| v.try_into().unwrap()),
last_poll_ended: poll_stats.last_poll_ended.map(|v| v.try_into().unwrap()),
polls: poll_stats.polls,
created_at,
dropped_at,
wakes: pb.wakes,
waker_clones: pb.waker_clones,
waker_drops: pb.waker_drops,
last_wake: pb.last_wake.map(|v| v.try_into().unwrap()),
self_wakes: pb.self_wakes,
}
}
}
#[derive(Debug)]
pub struct Field {
pub(crate) name: String,
pub(crate) value: FieldValue,
}
#[derive(Debug)]
pub enum FieldValue {
Bool(bool),
Str(String),
U64(u64),
I64(i64),
Debug(String),
}
impl Field {
const SPAWN_LOCATION: &'static str = "spawn.location";
const NAME: &'static str = "task.name";
/// Converts a wire-format `Field` into an internal `Field` representation,
/// using the provided `Metadata` for the task span that the field came
/// from.
///
/// If the field is invalid or it has a string value which is empty, this
/// returns `None`.
fn from_proto(
console_api::Field {
name,
metadata_id,
value,
}: console_api::Field,
meta: &Metadata,
) -> Option<Self> {
use console_api::field::Name;
let name = match name? {
Name::StrName(n) => n,
Name::NameIdx(idx) => {
let meta_id = metadata_id.map(|m| m.id);
if meta_id != Some(meta.id.0) {
tracing::warn!(
task.meta_id = meta.id.0,
field.meta.id = ?meta_id,
field.name_index = idx,
?meta,
"skipping malformed field name (metadata id mismatch)"
);
debug_assert_eq!(
meta_id,
Some(meta.id.0),
"malformed field name: metadata ID mismatch! (name idx={idx}; metadata={meta:#?})",
);
return None;
}
match meta.field_names.get(idx as usize).cloned() {
Some(name) => name,
None => {
tracing::warn!(
task.meta_id = meta.id.0,
field.meta.id = ?meta_id,
field.name_index = idx,
?meta,
"missing field name for index"
);
return None;
}
}
}
};
debug_assert!(
value.is_some(),
"missing field value for field `{name:?}` (metadata={meta:#?})",
);
let mut value = FieldValue::from(value?)
// if the value is an empty string, just skip it.
.ensure_nonempty()?;
if &*name == Self::SPAWN_LOCATION {
value = value.truncate_registry_path();
}
Some(Self { name, value })
}
}
impl fmt::Display for FieldValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FieldValue::Bool(v) => fmt::Display::fmt(v, f)?,
FieldValue::Str(v) => fmt::Display::fmt(v, f)?,
FieldValue::U64(v) => fmt::Display::fmt(v, f)?,
FieldValue::Debug(v) => fmt::Display::fmt(v, f)?,
FieldValue::I64(v) => fmt::Display::fmt(v, f)?,
}
Ok(())
}
}
impl FieldValue {
/// Truncates paths including `.cargo/registry`.
fn truncate_registry_path(self) -> Self {
match self {
FieldValue::Str(s) | FieldValue::Debug(s) => Self::Debug(truncate_registry_path(s)),
f => f,
}
}
/// If `self` is an empty string, returns `None`. Otherwise, returns `Some(self)`.
fn ensure_nonempty(self) -> Option<Self> {
match self {
FieldValue::Debug(s) | FieldValue::Str(s) if s.is_empty() => None,
val => Some(val),
}
}
}
impl From<console_api::field::Value> for FieldValue {
fn from(pb: console_api::field::Value) -> Self {
match pb {
console_api::field::Value::BoolVal(v) => Self::Bool(v),
console_api::field::Value::StrVal(v) => Self::Str(v),
console_api::field::Value::I64Val(v) => Self::I64(v),
console_api::field::Value::U64Val(v) => Self::U64(v),
console_api::field::Value::DebugVal(v) => Self::Debug(v),
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum TaskState {
Completed,
Idle,
Running,
}
impl TaskState {
pub(crate) fn icon(self) -> &'static str {
const RUNNING_UTF8: &str = "\u{25B6}";
const IDLE_UTF8: &str = "\u{23F8}";
const COMPLETED_UTF8: &str = "\u{23F9}";
match self {
Self::Running => RUNNING_UTF8,
Self::Idle => IDLE_UTF8,
Self::Completed => COMPLETED_UTF8,
}
}
}
#[derive(Debug, Default)]
pub(crate) struct HistogramMetadata {
/// The max recorded value in the histogram. This is the label for the bottom-right in the chart
pub(crate) max_value: u64,
/// The min recorded value in the histogram.
pub(crate) min_value: u64,
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/main.rs | backend/src/main.rs | use std::net::SocketAddrV4;
use tracing_subscriber::{
filter::{LevelFilter, Targets},
prelude::*,
};
use grafana_tokio_console_datasource::ConsolePlugin;
#[grafana_plugin_sdk::main(
services(data, diagnostics, resource, stream),
init_subscriber = false,
shutdown_handler = "0.0.0.0:10001"
)]
async fn plugin() -> ConsolePlugin {
let fmt_filter = std::env::var("RUST_LOG")
.ok()
.and_then(|rust_log| match rust_log.parse::<Targets>() {
Ok(targets) => Some(targets),
Err(e) => {
eprintln!("failed to parse `RUST_LOG={rust_log:?}`: {e}");
None
}
})
.unwrap_or_else(|| Targets::default().with_default(LevelFilter::WARN));
let console_layer = console_subscriber::ConsoleLayer::builder()
.server_addr("127.0.0.1:6668".parse::<SocketAddrV4>().unwrap())
.spawn();
tracing_subscriber::registry()
.with(console_layer)
.with(grafana_plugin_sdk::backend::layer().with_filter(fmt_filter))
.init();
ConsolePlugin::default()
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/metadata.rs | backend/src/metadata.rs | #[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
pub struct MetaId(pub u64);
#[derive(Debug)]
pub struct Metadata {
pub id: MetaId,
pub field_names: Vec<String>,
pub target: String,
//TODO: add more metadata as needed
}
impl Metadata {
pub fn from_proto(pb: console_api::Metadata, id: u64) -> Self {
Self {
id: MetaId(id),
field_names: pb.field_names,
target: pb.target,
}
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/plugin/stream.rs | backend/src/plugin/stream.rs | /// The `grafana_plugin_sdk::backend::StreamService` implementation for the Console plugin.
use std::{
pin::Pin,
task::{Context, Poll},
time::Duration,
};
use futures::Stream;
use grafana_plugin_sdk::{backend, data};
use tokio::sync::{mpsc, oneshot};
use tracing::{debug, error, info, warn};
use crate::spawn_named;
use super::{ConnectMessage, ConsolePlugin, DatasourceUid, Error, Notification, Path};
/// Struct used to notify a receiver that a client has disconnected.
///
/// This wraps a returned `Stream` implementation (here, the one returned from
/// `backend::StreamService::run_stream`), and spawns a task which will notify a
/// channel when the returned struct is dropped.
struct ClientDisconnect<T>(T, oneshot::Sender<()>);
impl<T> ClientDisconnect<T> {
fn new<F: FnOnce() + Send + 'static>(
stream: T,
notifier: mpsc::Sender<Notification>,
on_disconnect: F,
) -> Self {
let (tx, rx) = oneshot::channel();
spawn_named(
"grafana_tokio_console_app::plugin::stream::ClientDisconnect",
async move {
let _ = rx.await;
on_disconnect();
if notifier
.send(Notification {
message: ConnectMessage::Disconnected,
})
.await
.is_err()
{
warn!("Could not send disconnect notification for datasource");
};
},
);
Self(stream, tx)
}
}
impl<T, I> Stream for ClientDisconnect<T>
where
T: Stream<Item = I> + std::marker::Unpin,
{
type Item = I;
fn poll_next(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.0).poll_next(ctx)
}
}
fn frame_to_initial_data(frame: data::Frame) -> Result<backend::InitialData, Error> {
let checked = frame.check()?;
let init = backend::InitialData::from_frame(checked, data::FrameInclude::All)?;
Ok(init)
}
#[backend::async_trait]
impl backend::StreamService for ConsolePlugin {
type JsonValue = ();
/// Subscribe to a stream of updates from a Console datasource instance.
///
/// This function will be called every time a user subscribes to a stream.
/// We have several Grafana streams for each datasource instance (tasks, resources, ...)
/// but only need a single Console connection. When a subscription request comes in,
/// we check whether a connection already exists for that datasource instance.
/// If so, we can return the existing in-memory state as `initial_data`.
/// If not, we need to create a new connection, load the initial state for the
/// console in question, and store those so that future subscription requests
/// reuse it.
///
/// After creating a connection we must also spawn a task which will stream updates
/// from the console to our in-memory state. This is due to the mismatch between
/// having 1 console stream and 3 Grafana streams.
///
/// TODO describe that better.
async fn subscribe_stream(
&self,
request: backend::SubscribeStreamRequest,
) -> Result<backend::SubscribeStreamResponse, Self::Error> {
let path = request.path()?;
let uid = request.datasource_uid()?;
let datasource_settings = request
.plugin_context
.datasource_instance_settings
.ok_or(Error::MissingDatasource)?;
let retain_for = datasource_settings
.json_data
.get("retainFor")
.and_then(|x| x.as_u64())
.map(Duration::from_secs);
// Check if we're already connected to this datasource instance and getting updates.
// If so, we should just return the current state as `initial_data`.
// If not, we should spawn a task to start populating the datasource instance's state.
let initial_data = match self.initial_data(&uid, &path, retain_for) {
Some(s) => s,
None => {
info!("No state found; connecting to console");
self.connect(datasource_settings).await.map_err(|e| {
error!(error = ?e, "error connecting to console");
e
})?;
self.state
.get(&uid)
// Invariant: self.connect will create the state for this uid.
.expect("state to be present")
.to_frame(&path, retain_for)
}
};
initial_data
.and_then(frame_to_initial_data)
.map(|x| backend::SubscribeStreamResponse::ok(Some(x)))
}
type Error = Error;
type Stream = backend::BoxRunStream<Self::Error>;
/// Begin streaming data for a given channel.
///
/// This method is called _once_ for a (datasource, path) combination and the output
/// is multiplexed to all clients by Grafana's backend. This is in contrast to the
/// `subscribe_stream` method which is called for every client that wishes to connect.
///
/// As such, this simply needs to stream updates of a specific type from a given datasource
/// instance's in-memory state, and inform the datasource's console connection when no clients
/// remain connected. This will allow the connection to disconnect, and the state to be cleared,
/// when there are no longer any streams running for it.
async fn run_stream(
&self,
request: backend::RunStreamRequest,
) -> Result<Self::Stream, Self::Error> {
let path = request.path()?;
let uid = request.datasource_uid()?;
let datasource_settings = request
.plugin_context
.datasource_instance_settings
.ok_or(Error::MissingDatasource)?;
let sender = match self.state.get(&uid) {
Some(s) => s.notification_tx.clone(),
None => {
self.connect(datasource_settings).await.map_err(|e| {
error!(error = ?e, "error connecting to console");
e
})?;
self.state
.get(&uid)
.expect("state to be present")
.notification_tx
.clone()
}
};
let stream = match path {
Path::Tasks => self.stream_tasks(&uid).await?,
Path::TaskDetails { task_id } => self.stream_task_details(&uid, task_id).await,
Path::TaskHistogram { task_id } => self.stream_task_histogram(&uid, task_id).await,
Path::Resources => self.stream_resources(&uid).await,
};
if let Some(ref x) = self.state.get_mut(&uid) {
if x.notification_tx
.send(Notification {
message: ConnectMessage::Connected,
})
.await
.is_err()
{
warn!(
datasource = %uid.0,
path = "tasks",
"Could not send connect notification",
);
};
}
Ok(Box::pin(ClientDisconnect::new(stream, sender, move || {
info!(
datasource = %uid,
path = %path,
"Client disconnected for datasource",
)
})))
}
async fn publish_stream(
&self,
_request: backend::PublishStreamRequest,
) -> Result<backend::PublishStreamResponse, Self::Error> {
debug!("Publishing to stream is not implemented");
unimplemented!()
}
}
/// Extension trait providing some convenience methods for getting the `path` and `datasource_uid`.
trait StreamRequestExt {
/// The path passed as part of the request, as a `&str`.
fn raw_path(&self) -> &str;
/// The datasource instance settings passed in the request.
fn datasource_instance_settings(&self) -> Option<&backend::DataSourceInstanceSettings>;
/// The parsed `Path`, or an `Error` if parsing failed.
fn path(&self) -> Result<Path, Error> {
let path = self.raw_path();
path.parse()
.map_err(|_| Error::UnknownPath(path.to_string()))
}
/// The datasource UID of the request, or an `Error` if the request didn't include
/// any datasource settings.
fn datasource_uid(&self) -> Result<DatasourceUid, Error> {
self.datasource_instance_settings()
.ok_or(Error::MissingDatasource)
.map(|x| DatasourceUid(x.uid.clone()))
}
}
macro_rules! impl_stream_request_ext {
($request: path) => {
impl StreamRequestExt for $request {
fn raw_path(&self) -> &str {
self.path.as_str()
}
fn datasource_instance_settings(&self) -> Option<&backend::DataSourceInstanceSettings> {
self.plugin_context.datasource_instance_settings.as_ref()
}
}
};
}
impl_stream_request_ext!(backend::RunStreamRequest);
impl_stream_request_ext!(backend::SubscribeStreamRequest);
impl_stream_request_ext!(backend::PublishStreamRequest);
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/plugin/resource.rs | backend/src/plugin/resource.rs | use bytes::Bytes;
use futures::stream;
use grafana_plugin_sdk::backend;
use http::{Response, StatusCode};
use serde::Serialize;
use thiserror::Error;
use crate::ConsolePlugin;
use super::DatasourceUid;
#[derive(Debug, Error)]
pub enum ResourceError {
#[error("Path not found")]
NotFound,
#[error("Plugin error: {0}")]
Plugin(#[from] super::Error),
#[error("Missing datasource settings")]
MissingDatasourceSettings,
#[error("Console not connected; please load a dashboard connected to this console first.")]
ConsoleNotConnected,
}
#[derive(Debug, Serialize)]
pub struct JsonError {
error: String,
}
impl backend::ErrIntoHttpResponse for ResourceError {
fn into_http_response(self) -> Result<Response<Bytes>, Box<dyn std::error::Error>> {
let status = match self {
Self::NotFound => StatusCode::NOT_FOUND,
Self::Plugin(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::MissingDatasourceSettings => StatusCode::BAD_REQUEST,
Self::ConsoleNotConnected => StatusCode::BAD_REQUEST,
};
Ok(Response::builder().status(status).body(Bytes::from(
serde_json::to_vec(&JsonError {
error: self.to_string(),
})
.expect("valid JSON"),
))?)
}
}
#[backend::async_trait]
impl backend::ResourceService for ConsolePlugin {
type Error = ResourceError;
type InitialResponse = Response<Bytes>;
type Stream = backend::BoxResourceStream<Self::Error>;
async fn call_resource(
&self,
request: backend::CallResourceRequest,
) -> Result<(Self::InitialResponse, Self::Stream), Self::Error> {
let datasource_settings = request
.plugin_context
.and_then(|pc| pc.datasource_instance_settings)
.ok_or(ResourceError::MissingDatasourceSettings)?;
let datasource_uid = DatasourceUid(datasource_settings.uid.clone());
if self.state.get(&datasource_uid).is_none() {
self.connect(datasource_settings).await?;
if self.state.get(&datasource_uid).is_none() {
return Err(ResourceError::ConsoleNotConnected);
}
}
let initial_response = self
.get_tasks(&datasource_uid)
.await
.map(|mut tasks| {
tasks.sort_unstable();
Response::new(Bytes::from(serde_json::to_vec(&tasks).expect("valid JSON")))
})
.map_err(ResourceError::Plugin)?;
Ok((initial_response, Box::pin(stream::empty())))
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/plugin/diagnostics.rs | backend/src/plugin/diagnostics.rs | use grafana_plugin_sdk::backend;
use crate::connection::Connection;
use super::{ConsolePlugin, Error};
#[backend::async_trait]
impl backend::DiagnosticsService for ConsolePlugin {
type CheckHealthError = Error;
async fn check_health(
&self,
request: backend::CheckHealthRequest,
) -> Result<backend::CheckHealthResponse, Self::CheckHealthError> {
let url = request
.plugin_context
.ok_or(Error::MissingDatasource)
.and_then(|pc| {
pc.datasource_instance_settings
.ok_or(Error::MissingDatasource)
.and_then(|x| {
x.url
.parse()
.map_err(|_| Error::InvalidDatasourceUrl(x.url.clone()))
})
})?;
Ok(Connection::try_connect(url)
.await
.map(|_| backend::CheckHealthResponse::ok("Connection successful".to_string()))
.unwrap_or_else(|e| backend::CheckHealthResponse::error(e.to_string())))
}
type CollectMetricsError = Error;
async fn collect_metrics(
&self,
_request: backend::CollectMetricsRequest,
) -> Result<backend::CollectMetricsResponse, Self::CollectMetricsError> {
todo!()
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/plugin/mod.rs | backend/src/plugin/mod.rs | use std::{
collections::{HashMap, HashSet},
fmt,
io::Cursor,
str::FromStr,
sync::Arc,
time::{Duration, SystemTime},
};
use chrono::prelude::*;
use console_api::{
instrument::Update,
tasks::{task_details::PollTimesHistogram, TaskDetails},
};
use dashmap::DashMap;
use futures::{StreamExt, TryStreamExt};
use grafana_plugin_sdk::{backend, data, prelude::*};
use humantime::format_duration;
use serde::Deserialize;
use tokio::{sync::mpsc, task::JoinHandle};
use tokio_stream::wrappers::ReceiverStream;
use tracing::{debug, error, info, trace, warn};
use crate::{
connection::Connection,
metadata::{MetaId, Metadata},
spawn_named,
task::{Task, TaskId},
};
#[path = "data.rs"]
mod data_service_impl;
mod diagnostics;
mod resource;
mod stream;
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct DatasourceUid(String);
impl fmt::Display for DatasourceUid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("stream already running")]
StreamAlreadyRunning,
#[error("missing task ID")]
MissingTaskId,
#[error("invalid task ID: {0}")]
InvalidTaskId(String),
#[error("task with ID {} not found", (.0).0)]
TaskNotFound(TaskId),
#[error("unknown path: {0}. must be one of: tasks, resources/<id>")]
UnknownPath(String),
#[error("data not found for instance")]
DatasourceInstanceNotFound,
#[error("invalid datasource URL: {0}")]
InvalidDatasourceUrl(String),
#[error("Datasource ID not present on request")]
MissingDatasource,
#[error("Error converting data: {0}")]
ConvertTo(#[from] backend::ConvertToError),
#[error("Error converting request: {0}")]
ConvertFrom(#[from] backend::ConvertFromError),
#[error("Error creating frame : {0}")]
Data(#[from] data::Error),
}
#[derive(Clone, Debug, Deserialize, PartialEq, Eq)]
#[serde(tag = "path")]
enum Path {
#[serde(rename = "tasks")]
Tasks,
#[serde(rename = "task", rename_all = "camelCase")]
TaskDetails { task_id: TaskId },
#[serde(rename = "taskHistogram", rename_all = "camelCase")]
TaskHistogram { task_id: TaskId },
#[serde(rename = "resources")]
Resources,
}
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Tasks => write!(f, "tasks"),
Self::TaskDetails { task_id } => write!(f, "task/{task_id}"),
Self::TaskHistogram { task_id } => write!(f, "taskHistogram/{task_id}"),
Self::Resources => write!(f, "resources"),
}
}
}
impl FromStr for Path {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut iter = s.splitn(2, '/');
match (iter.next(), iter.next()) {
(Some("tasks"), _) => Ok(Self::Tasks),
(Some("task"), None) => Err(Error::MissingTaskId),
(Some("task"), Some(task_id)) => task_id
.parse()
.map(|id| Self::TaskDetails {
task_id: TaskId(id),
})
.map_err(|_| Error::InvalidTaskId(task_id.to_string())),
(Some("taskHistogram"), None) => Err(Error::MissingTaskId),
(Some("taskHistogram"), Some(task_id)) => task_id
.parse()
.map(|id| Self::TaskHistogram {
task_id: TaskId(id),
})
.map_err(|_| Error::InvalidTaskId(task_id.to_string())),
(Some("resources"), _) => todo!(),
_ => Err(Error::UnknownPath(s.to_string())),
}
}
}
#[derive(Debug)]
enum ConnectMessage {
Connected,
Disconnected,
}
struct Notification {
message: ConnectMessage,
}
#[derive(Debug)]
enum TaskDetailsStream {
NotFound,
Connected(JoinHandle<()>),
Removed,
}
/// An instance of a Console datasource.
///
/// This is moved into a spawned task, and communicates back
/// to the plugin using channels.
#[derive(Debug)]
struct ConsoleInstance {
connection: Connection,
task_detail_tasks: HashMap<TaskId, TaskDetailsStream>,
notifications: mpsc::Receiver<Notification>,
stream_count: usize,
}
impl ConsoleInstance {
fn should_unsubscribe(&self, task: &Task) -> bool {
task.is_completed()
}
}
#[derive(Debug)]
struct DatasourceState {
uid: DatasourceUid,
metas: HashMap<MetaId, Metadata>,
tasks: HashMap<TaskId, Task>,
// resources: HashMap<u64, Resource>,
// resource_stats: HashMap<u64, Stats>,
last_updated: Option<SystemTime>,
/// The incoming stream of task updates, to be forwarded to subscribers as `Frame`s.
///
/// This will be `None` if the stream has been taken by `stream_tasks`.
tasks_frame_tx: Option<mpsc::Sender<Result<data::Frame, Error>>>,
tasks_frame_rx: Option<mpsc::Receiver<Result<data::Frame, Error>>>,
/// Map from task ID to sender of incoming task details.
task_details_frame_txs: HashMap<TaskId, mpsc::Sender<Result<data::Frame, Error>>>,
/// Map from task ID to sender of task histogram frames.
task_details_histogram_frame_txs: HashMap<TaskId, mpsc::Sender<Result<data::Frame, Error>>>,
/// The incoming stream of resource updates, to be forwarded to subscribers as `Frame`s.
// resources_stream_tx: Option<mpsc::Sender<Result<data::Frame, Error>>>,
// resources_stream_rx: Option<mpsc::Receiver<Result<data::Frame, Error>>>,
notification_tx: mpsc::Sender<Notification>,
}
impl DatasourceState {
/// Create a new `DatasourceState` for a datasource from an initial update.
async fn new(
datasource_uid: DatasourceUid,
update: Update,
retain_for: Option<Duration>,
) -> (Self, mpsc::Receiver<Notification>) {
let (tasks_frame_tx, tasks_frame_rx) = mpsc::channel(128);
let (notification_tx, notification_rx) = mpsc::channel(128);
let mut s = DatasourceState {
uid: datasource_uid,
last_updated: None,
metas: Default::default(),
tasks: Default::default(),
// resources: Default::default(),
// resource_stats: Default::default(),
tasks_frame_tx: Some(tasks_frame_tx),
tasks_frame_rx: Some(tasks_frame_rx),
task_details_frame_txs: Default::default(),
task_details_histogram_frame_txs: Default::default(),
// resources_stream_tx: Default::default(),
// resources_stream_rx: Default::default(),
notification_tx,
};
s.update(update, retain_for).await;
(s, notification_rx)
}
/// Process and update using a general `Update`, which contains information about new and updated
/// metadata and tasks.
#[tracing::instrument(level = "debug", skip(self, update))]
async fn update(&mut self, update: Update, retain_for: Option<Duration>) {
self.last_updated = Some(SystemTime::now());
if let Some(new_metadata) = update.new_metadata {
let metas = new_metadata.metadata.into_iter().filter_map(|meta| {
let id = meta.id?.id;
let metadata = meta.metadata?;
let metadata = Metadata::from_proto(metadata, id);
Some((metadata.id, metadata))
});
self.metas.extend(metas);
}
if let Some(task_update) = update.task_update {
let mut stats_update = task_update.stats_update;
let mut updated_ids = HashSet::with_capacity(task_update.new_tasks.len());
debug!(new_tasks = task_update.new_tasks.len(), "Adding new tasks");
for new_task in task_update.new_tasks {
if let Some(task) = Task::from_proto(&self.metas, &mut stats_update, new_task) {
updated_ids.insert(task.id);
self.tasks.insert(task.id, task);
}
}
debug!(updated_tasks = stats_update.len(), "Updating task stats");
for (id, stats) in stats_update {
if let Some(task) = self.tasks.get_mut(&TaskId(id)) {
updated_ids.insert(task.id);
task.stats = stats.into();
}
}
// Add any tasks that are not yet done to the set of tasks to be sent.
// This is required so the frontend doesn't remove them from the UI due to
// apparent inactivity.
for (id, task) in &self.tasks {
if !task.is_completed() {
updated_ids.insert(*id);
}
}
// Send updates to the main 'tasks' stream.
let tasks_frame = self.get_tasks_frame(Some(&updated_ids), retain_for);
if let Some(tx) = &self.tasks_frame_tx {
if let Err(e) = tx.send(tasks_frame).await {
error!(datasource_uid = %self.uid.0, error = %e, "Error sending tasks frame; replacing channel");
let (tx, rx) = mpsc::channel(128);
self.tasks_frame_tx = Some(tx);
self.tasks_frame_rx = Some(rx);
}
}
// Send updates to any single-task streams.
for task_id in updated_ids {
if let Some(tx) = self.task_details_frame_txs.get(&task_id) {
if tx.send(self.get_task_details_frame(task_id)).await.is_err() {
info!(
datasource_uid = %self.uid.0,
task_id = %task_id.0,
"Dropping task details transmitter for task",
);
self.task_details_frame_txs.remove(&task_id);
}
}
}
}
}
/// Process a single task's `TaskDetails` update, which contains an updated
/// histogram of poll times.
#[tracing::instrument(level = "debug", skip(self, update))]
async fn update_details(&mut self, update: TaskDetails) {
if let TaskDetails {
task_id: Some(id),
poll_times_histogram: Some(histogram),
..
} = update
{
let task_id = TaskId(id.id);
// Use a bool to track this so we don't hold a mutable reference to
// `self.tasks` after we're done with it.
let mut should_send_histogram = false;
if let Some(task) = self.tasks.get_mut(&task_id) {
// Update our own state.
trace!(task_id = %task_id, "Updating poll times histogram for task");
task.histogram = match histogram {
PollTimesHistogram::LegacyHistogram(data) => {
hdrhistogram::serialization::Deserializer::new()
.deserialize(&mut Cursor::new(&data))
.ok()
}
PollTimesHistogram::Histogram(x) => {
hdrhistogram::serialization::Deserializer::new()
.deserialize(&mut Cursor::new(&x.raw_histogram))
.ok()
}
};
should_send_histogram = true;
}
// Send updates to any downstream consumers.
if should_send_histogram {
if let Some(tx) = self.task_details_histogram_frame_txs.get(&task_id) {
if tx
.send(self.get_task_histogram_frame(task_id))
.await
.is_err()
{
info!(
datasource_uid = %self.uid.0,
task_id = %task_id.0,
"Dropping task histogram transmitter for task",
);
self.task_details_histogram_frame_txs.remove(&task_id);
}
}
}
}
}
/// Get a `Frame` containing the latest task data, optionally only for a subset of tasks.
fn get_tasks_frame<'a, T>(
&self,
updated_ids: Option<T>,
retain_for: Option<Duration>,
) -> Result<data::Frame, Error>
where
T: IntoIterator<Item = &'a TaskId>,
T::IntoIter: ExactSizeIterator,
{
let now = SystemTime::now();
let updated_ids = updated_ids.map(|x| x.into_iter());
let len = updated_ids
.as_ref()
.map_or_else(|| self.tasks.len(), |x| x.len());
let iter: Box<dyn Iterator<Item = &Task>> = match updated_ids {
Some(ids) => Box::new(ids.filter_map(|id| self.tasks.get(id))),
None => Box::new(self.tasks.values()),
};
let iter: Box<dyn Iterator<Item = &Task>> = match retain_for {
None => iter,
Some(retain_for) => Box::new(iter.filter(move |task| {
task.stats
.dropped_at
.map(|d| {
let dropped_for = now.duration_since(d).unwrap_or_default();
retain_for > dropped_for
})
.unwrap_or(true)
})),
};
let mut timestamps = Vec::with_capacity(len);
let mut ids = Vec::with_capacity(len);
let mut names = Vec::with_capacity(len);
let mut targets = Vec::with_capacity(len);
let mut fields = Vec::with_capacity(len);
let mut states = Vec::with_capacity(len);
let mut locations = Vec::with_capacity(len);
let mut polls = Vec::with_capacity(len);
let mut poll_times_histograms = Vec::with_capacity(len);
let mut created_at = Vec::with_capacity(len);
let mut dropped_at = Vec::with_capacity(len);
let mut busy = Vec::with_capacity(len);
let mut last_poll_started = Vec::with_capacity(len);
let mut last_poll_ended = Vec::with_capacity(len);
let mut idle = Vec::with_capacity(len);
let mut total = Vec::with_capacity(len);
let mut wakes = Vec::with_capacity(len);
let mut waker_counts = Vec::with_capacity(len);
let mut waker_clones = Vec::with_capacity(len);
let mut waker_drops = Vec::with_capacity(len);
let mut last_wakes = Vec::with_capacity(len);
let mut since_wakes = Vec::with_capacity(len);
let mut self_wakes = Vec::with_capacity(len);
let mut self_wake_percents = Vec::with_capacity(len);
for task in iter {
timestamps.push(now);
ids.push(task.id.0);
names.push(task.name.clone());
targets.push(task.target.clone());
fields.push(
task.fields
.iter()
.map(|f| format!("{}={}", f.name, f.value))
.collect::<Vec<_>>()
.join(" "),
);
states.push(task.state().icon());
locations.push(task.location.clone());
polls.push(task.total_polls());
poll_times_histograms.push(
task.histogram
.as_ref()
.map(|_| serde_json::to_string(&task.make_chart_data(100).0).unwrap())
.unwrap_or_else(|| "[]".to_string()),
);
created_at.push(to_datetime(task.stats.created_at));
dropped_at.push(task.stats.dropped_at.map(to_datetime));
busy.push(self.last_updated.map(|x| task.busy(x)).map(as_nanos));
last_poll_started.push(task.stats.last_poll_started.map(to_datetime));
last_poll_ended.push(task.stats.last_poll_ended.map(to_datetime));
idle.push(self.last_updated.map(|x| task.idle(x)).map(as_nanos));
total.push(self.last_updated.map(|x| task.total(x)).map(as_nanos));
wakes.push(task.wakes());
waker_counts.push(task.waker_count());
waker_clones.push(task.waker_clones());
waker_drops.push(task.waker_drops());
last_wakes.push(task.last_wake().map(to_datetime));
since_wakes.push(
self.last_updated
.and_then(|x| task.since_wake(x))
.map(as_nanos),
);
self_wakes.push(task.self_wakes());
self_wake_percents.push(task.self_wake_percent());
}
let mut wake_percent_config = data::FieldConfig::default();
wake_percent_config.description =
Some("The percentage of this task's total wakeups that are self wakes.".to_string());
let frame = data::Frame::new("tasks").with_fields([
timestamps.into_field("Time"),
ids.into_field("ID"),
names.into_field("Name"),
targets.into_field("Target"),
fields.into_field("Fields"),
states.into_field("State"),
locations.into_field("Location"),
polls.into_field("Polls"),
poll_times_histograms.into_field("Poll times"),
created_at.into_field("Created At"),
dropped_at.into_opt_field("Dropped At"),
busy.into_opt_field("Busy"),
last_poll_started.into_opt_field("Last Poll Started"),
last_poll_ended.into_opt_field("Last Poll Ended"),
idle.into_opt_field("Idle"),
total.into_opt_field("Total"),
wakes.into_field("Wakes"),
waker_counts.into_field("Waker count"),
waker_clones.into_field("Waker clones"),
waker_drops.into_field("Waker drops"),
last_wakes.into_opt_field("Last wake"),
since_wakes.into_opt_field("Since last wake"),
self_wakes.into_field("Self wakes"),
self_wake_percents
.into_field("Self wake percent")
.with_config(wake_percent_config),
]);
Ok(frame)
}
/// Get a `Frame` for a single task.
fn get_task_details_frame(&self, id: TaskId) -> Result<data::Frame, Error> {
self.get_tasks_frame(Some(&[id]), None)
}
/// Get a `Frame` holding the buckets and counts of the poll times histogram for
/// a single task.
fn get_task_histogram_frame(&self, id: TaskId) -> Result<data::Frame, Error> {
let task = self.tasks.get(&id).ok_or(Error::TaskNotFound(id))?;
let (chart_data, chart_metadata) = task.make_chart_data(101);
let width = (chart_metadata.max_value - chart_metadata.min_value) as f64 / 101.0;
let x: Vec<_> = chart_data
.iter()
.enumerate()
.map(|x| {
(x.0 % 5 == 0)
.then(|| {
let nanos = chart_metadata.min_value as f64 + (width * x.0 as f64);
format_duration(Duration::from_nanos(nanos as u64)).to_string()
})
.unwrap_or_default()
})
.collect();
let fields = [x.into_field("x"), chart_data.into_field("y")];
Ok(fields.into_frame(id.to_string()))
}
/// Convert this state into an owned `Frame`.
fn to_frame(&self, path: &Path, retain_for: Option<Duration>) -> Result<data::Frame, Error> {
match path {
Path::Tasks => self.get_tasks_frame::<&[TaskId]>(None, retain_for),
Path::TaskDetails { task_id } => self.get_task_details_frame(*task_id),
Path::TaskHistogram { task_id } => self.get_task_histogram_frame(*task_id),
Path::Resources => todo!(),
}
}
}
fn to_datetime(s: SystemTime) -> DateTime<Utc> {
DateTime::<Utc>::from(s)
}
fn as_nanos(d: Duration) -> Option<u64> {
d.as_nanos()
.try_into()
.map_err(|e| error!(error = ?e, "Error getting duration as nanos"))
.ok()
}
#[derive(Clone, Debug, Default)]
pub struct ConsolePlugin {
state: Arc<DashMap<DatasourceUid, DatasourceState>>,
}
impl ConsolePlugin {
/// Connect to a console backend and begin streaming updates from the console.
///
/// This spawns a single task named 'manage connection' which takes ownership of the connection
/// and handles any new data received from it.
async fn connect(&self, datasource: backend::DataSourceInstanceSettings) -> Result<(), Error> {
let datasource_uid = DatasourceUid(datasource.uid);
let url = datasource
.url
.parse()
.map_err(|_| Error::InvalidDatasourceUrl(datasource.url))?;
let retain_for = datasource
.json_data
.get("retainFor")
.and_then(|x| x.as_u64())
.map(Duration::from_secs);
info!(url = %url, retain_for = ?retain_for, "Connecting to console");
let mut connection = Connection::new(url);
// Get some initial state.
let update = connection.next_update().await;
let (instance_state, notification_rx) =
DatasourceState::new(datasource_uid.clone(), update, retain_for).await;
self.state.insert(datasource_uid.clone(), instance_state);
// Spawn a task to continuously fetch updates from the console, and
// update the datasource. Each update will also send messages to the
// listeners associated with the state, if there are any.
let state = Arc::clone(&self.state);
let uid_clone = datasource_uid.clone();
spawn_named("manage connection", async move {
let mut instance = ConsoleInstance {
connection,
task_detail_tasks: Default::default(),
notifications: notification_rx,
stream_count: 0,
};
let (task_details_tx, mut task_details_rx) = mpsc::channel(256);
loop {
tokio::select! {
instrument_update = instance.connection.next_update() => {
if let Some(mut s) = state.get_mut(&uid_clone) {
// First, update the state with the new and updated task data.
s.update(instrument_update, retain_for).await;
// Next check if we need to subscribe to any new task details streams,
// or unsubscribe from any finished tasks.
debug!(n_tasks = s.tasks.len(), "Checking for old or new tasks");
for (task_id, task) in &s.tasks {
let stream_running = s.task_details_frame_txs.contains_key(task_id) ||
s.task_details_histogram_frame_txs.contains_key(task_id);
let has_handle = instance.task_detail_tasks.contains_key(task_id);
let should_definitely_subscribe = stream_running && !has_handle;
// Remove any completed tasks.
if !stream_running && instance.should_unsubscribe(task) {
instance.task_detail_tasks.entry(*task_id).and_modify(|t| {
if let TaskDetailsStream::Connected(handle) = t {
debug!(task_id = %task_id, "Unsubscribing from completed task details");
handle.abort();
}
*t = TaskDetailsStream::Removed;
});
} else if /* instance.should_subscribe(task) || */should_definitely_subscribe {
match instance.connection.watch_details(task_id.0).await {
Ok(stream) => {
let tid = *task_id;
let task_details_tx = task_details_tx.clone();
debug!(task_id = %tid, ?should_definitely_subscribe, "Subscribing to task details");
let handle = spawn_named("manage task details", async move {
let mut stream = stream.map_err(|e| (e, tid));
loop {
match stream.next().await {
Some(x) => if let Err(e) = task_details_tx.send(x).await {
warn!(task_id = %tid, error = %e, "Could not send task details; dropping");
return;
},
None => {
debug!(task_id = %tid, "Task details stream completed");
return
},
}
}
});
instance.task_detail_tasks.insert(*task_id, TaskDetailsStream::Connected(handle));
},
Err(e) => {
warn!(task_id = %task_id, error = %e, "Error subscribing to task details stream");
instance.task_detail_tasks.insert(*task_id, TaskDetailsStream::NotFound);
}
}
}
}
}
}
Some(task_details_update) = task_details_rx.recv() => {
match task_details_update {
Ok(update) => {
if let Some(mut s) = state.get_mut(&uid_clone) {
s.update_details(update).await;
}
},
Err((_, task_id)) => {
instance.task_detail_tasks.entry(task_id).and_modify(|t| {
if let TaskDetailsStream::Connected(handle) = t {
debug!(task_id = %task_id, "Shutting down task details stream");
handle.abort();
}
*t = TaskDetailsStream::Removed;
});
}
}
}
notification = instance.notifications.recv() => {
if let Some(n) = notification {
use ConnectMessage::{Connected, Disconnected};
match n.message {
Connected => {
instance.stream_count += 1;
info!(stream_count = instance.stream_count, "New stream registered");
},
Disconnected => {
instance.stream_count = instance.stream_count.saturating_sub(1);
info!(stream_count = instance.stream_count, "Stream deregistered");
},
};
} else {
// TODO: figure out why the sender would have dropped and how to handle it properly
warn!("Notifications channel dropped, stream may not be cleaned up");
}
// Drop connection and delete the initial state when we have no streams left.
if instance.stream_count == 0 {
instance.notifications.close();
state.remove(&uid_clone);
info!(url = %instance.connection.target(), "Disconnecting from console");
return
}
}
}
}
});
Ok(())
}
async fn get_tasks(&self, datasource_uid: &DatasourceUid) -> Result<Vec<TaskId>, Error> {
self.state
.get(datasource_uid)
.map(|s| s.tasks.keys().copied().collect())
.ok_or(Error::DatasourceInstanceNotFound)
}
async fn stream_tasks(
&self,
datasource_uid: &DatasourceUid,
) -> Result<<Self as backend::StreamService>::Stream, <Self as backend::StreamService>::Error>
{
let state = self.state.get_mut(datasource_uid);
state
.and_then(|mut x| x.tasks_frame_rx.take())
.ok_or(Error::DatasourceInstanceNotFound)
.map(|x| {
Box::pin(ReceiverStream::new(x).map(|res| {
res.and_then(|frame| {
frame
.check()
.map_err(Error::Data)
.and_then(|f| Ok(backend::StreamPacket::from_frame(f)?))
})
})) as <Self as backend::StreamService>::Stream
})
}
async fn stream_task_details(
&self,
datasource_uid: &DatasourceUid,
task_id: TaskId,
) -> <Self as backend::StreamService>::Stream {
let state = self.state.get_mut(datasource_uid);
let (tx, rx) = mpsc::channel(128);
state
.ok_or(Error::DatasourceInstanceNotFound)
.expect("state should be present for datasource")
.task_details_frame_txs
.insert(task_id, tx);
Box::pin(ReceiverStream::new(rx).map(|res| {
res.and_then(|frame| {
frame
.check()
.map_err(Error::Data)
.and_then(|f| Ok(backend::StreamPacket::from_frame(f)?))
})
}))
}
async fn stream_task_histogram(
&self,
datasource_uid: &DatasourceUid,
task_id: TaskId,
) -> <Self as backend::StreamService>::Stream {
let state = self.state.get_mut(datasource_uid);
let (tx, rx) = mpsc::channel(128);
state
.ok_or(Error::DatasourceInstanceNotFound)
.expect("state should be present for datasource")
.task_details_histogram_frame_txs
.insert(task_id, tx);
info!(
?task_id,
"Inserted tx into task_details_histogram_frame_txs"
);
Box::pin(ReceiverStream::new(rx).map(|res| {
res.and_then(|frame| {
frame
.check()
.map_err(Error::Data)
.and_then(|f| Ok(backend::StreamPacket::from_frame(f)?))
})
}))
}
async fn stream_resources(
&self,
_datasource_uid: &DatasourceUid,
) -> <Self as backend::StreamService>::Stream {
todo!()
}
/// Fetch the initial data for a given datasource instance and path.
///
/// This will be used when a new subscriber is registered.
fn initial_data(
&self,
datasource_uid: &DatasourceUid,
path: &Path,
retain_for: Option<Duration>,
) -> Option<Result<data::Frame, Error>> {
self.state
.get(datasource_uid)
.map(|s| s.to_frame(path, retain_for))
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/backend/src/plugin/data.rs | backend/src/plugin/data.rs | use futures::stream::FuturesOrdered;
use serde::Deserialize;
use grafana_plugin_sdk::{backend, data};
use super::{ConsolePlugin, Path};
#[derive(Debug, thiserror::Error)]
#[error("Error querying backend for {}", .ref_id)]
pub struct QueryError {
ref_id: String,
}
impl backend::DataQueryError for QueryError {
fn ref_id(self) -> String {
self.ref_id
}
}
#[derive(Clone, Debug, PartialEq, Eq, Deserialize)]
pub struct ConsoleQueryDataRequest {
#[serde(flatten)]
path: Path,
}
#[backend::async_trait]
impl backend::DataService for ConsolePlugin {
type Query = ConsoleQueryDataRequest;
type QueryError = QueryError;
type Stream = backend::BoxDataResponseStream<Self::QueryError>;
async fn query_data(
&self,
mut request: backend::QueryDataRequest<Self::Query>,
) -> Self::Stream {
Box::pin(
request
.queries
.into_iter()
.map(move |x| {
let uid = request
.plugin_context
.datasource_instance_settings
.take()
.map(|x| x.uid);
async move {
let uid = uid.ok_or_else(|| QueryError {
ref_id: x.ref_id.clone(),
})?;
let mut frame = data::Frame::new("");
frame.set_channel(format!("ds/{}/{}", uid, x.query.path).parse().unwrap());
Ok(backend::DataResponse::new(
x.ref_id,
vec![frame.check().unwrap()],
))
}
})
.collect::<FuturesOrdered<_>>(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::plugin::*;
#[test]
fn deserialize_path() {
assert_eq!(
serde_json::from_str::<Path>(r#"{"path": "tasks"}"#).unwrap(),
Path::Tasks
);
assert_eq!(
serde_json::from_str::<Path>(r#"{"path": "task", "taskId": 1}"#).unwrap(),
Path::TaskDetails { task_id: TaskId(1) }
);
assert_eq!(
serde_json::from_str::<Path>(r#"{"path": "taskHistogram", "taskId": 1}"#).unwrap(),
Path::TaskHistogram { task_id: TaskId(1) }
);
assert_eq!(
serde_json::from_str::<Path>(r#"{"path": "resources"}"#).unwrap(),
Path::Resources
);
}
#[test]
fn deserialize_request() {
assert_eq!(
serde_json::from_str::<ConsoleQueryDataRequest>(r#"{"path": "tasks"}"#).unwrap(),
ConsoleQueryDataRequest { path: Path::Tasks }
);
assert_eq!(
serde_json::from_str::<ConsoleQueryDataRequest>(
r#"{"path": "taskHistogram", "taskId": 1}"#
)
.unwrap(),
ConsoleQueryDataRequest {
path: Path::TaskHistogram { task_id: TaskId(1) }
}
);
assert_eq!(
serde_json::from_str::<ConsoleQueryDataRequest>(r#"{"path": "resources"}"#).unwrap(),
ConsoleQueryDataRequest {
path: Path::Resources
}
);
}
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
sd2k/grafana-tokio-console-datasource | https://github.com/sd2k/grafana-tokio-console-datasource/blob/ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9/xtask/src/main.rs | xtask/src/main.rs | use std::{env, error::Error, process::Command};
fn main() -> Result<(), Box<dyn Error>> {
let mut args = env::args().skip(1);
let cmd = args.next();
match cmd.as_deref() {
Some("watch") => watch(args)?,
_ => print_help(),
};
Ok(())
}
fn print_help() {
eprintln!(
"Tasks:
watch [release] watch for changes, then compile plugin (optionally in release mode), replace in `dist` directory, and restart plugin process
"
)
}
fn go_target() -> Result<String, Box<dyn Error>> {
env::var("GOARCH").or_else(|_| {
let go_output = Command::new("go")
.arg("version")
.output()
.map_err(|_| "go must be installed to fetch target host and arch; alternatively set GOARCH env var to e.g. darwin_arm64 or linux_amd64")?;
Ok(String::from_utf8(go_output.stdout)?.trim().split(' ').nth(3).map(|s| s.replace('/', "_")).ok_or("unexpected output from `go version`")?)
})
}
fn watch(mut args: impl Iterator<Item = String>) -> Result<(), Box<dyn Error>> {
let go_target = go_target()?;
let (build_cmd, cargo_target) = if let Some("release") = args.next().as_deref() {
("build --release", "release")
} else {
("build", "debug")
};
let shell_cmd = format!(
"rm -rf ./dist/grafana-tokio-console-datasource_{go_target} && cp ./target/{cargo_target}/grafana-tokio-console-datasource ./dist/grafana-tokio-console-datasource_{go_target} && pkill -HUP grafana-tokio-console-datasource_{go_target}",
);
let mut handle = Command::new("cargo")
.arg("watch")
.arg("-w")
.arg("backend")
.arg("-x")
.arg("clippy")
.arg("-x")
.arg(build_cmd)
.arg("-s")
.arg(&shell_cmd)
.arg("-c")
.spawn()?;
Ok(handle.wait().map(|_| ())?)
}
| rust | Apache-2.0 | ceb221d09a1cbb1e8104fe6fcb5fcf8a904f1cc9 | 2026-01-04T20:20:24.329730Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/src/lib.rs | src/lib.rs | //! Create extensions for types you don't own with [extension traits] but without the boilerplate.
//!
//! Example:
//!
//! ```rust
//! use extend::ext;
//!
//! #[ext]
//! impl<T: Ord> Vec<T> {
//! fn sorted(mut self) -> Self {
//! self.sort();
//! self
//! }
//! }
//!
//! assert_eq!(
//! vec![1, 2, 3],
//! vec![2, 3, 1].sorted(),
//! );
//! ```
//!
//! # How does it work?
//!
//! Under the hood it generates a trait with methods in your `impl` and implements those for the
//! type you specify. The code shown above expands roughly to:
//!
//! ```rust
//! trait VecExt<T: Ord> {
//! fn sorted(self) -> Self;
//! }
//!
//! impl<T: Ord> VecExt<T> for Vec<T> {
//! fn sorted(mut self) -> Self {
//! self.sort();
//! self
//! }
//! }
//! ```
//!
//! # Supported items
//!
//! Extensions can contain methods or associated constants:
//!
//! ```rust
//! use extend::ext;
//!
//! #[ext]
//! impl String {
//! const CONSTANT: &'static str = "FOO";
//!
//! fn method() {
//! // ...
//! # todo!()
//! }
//! }
//! ```
//!
//! # Configuration
//!
//! You can configure:
//!
//! - The visibility of the trait. Use `pub impl ...` to generate `pub trait ...`. The default
//! visibility is private.
//! - The name of the generated extension trait. Example: `#[ext(name = MyExt)]`. By default we
//! generate a name based on what you extend.
//! - Which supertraits the generated extension trait should have. Default is no supertraits.
//! Example: `#[ext(supertraits = Default + Clone)]`.
//!
//! More examples:
//!
//! ```rust
//! use extend::ext;
//!
//! #[ext(name = SortedVecExt)]
//! impl<T: Ord> Vec<T> {
//! fn sorted(mut self) -> Self {
//! self.sort();
//! self
//! }
//! }
//!
//! #[ext]
//! pub(crate) impl i32 {
//! fn double(self) -> i32 {
//! self * 2
//! }
//! }
//!
//! #[ext(name = ResultSafeUnwrapExt)]
//! pub impl<T> Result<T, std::convert::Infallible> {
//! fn safe_unwrap(self) -> T {
//! match self {
//! Ok(t) => t,
//! Err(_) => unreachable!(),
//! }
//! }
//! }
//!
//! #[ext(supertraits = Default + Clone)]
//! impl String {
//! fn my_length(self) -> usize {
//! self.len()
//! }
//! }
//! ```
//!
//! For backwards compatibility you can also declare the visibility as the first argument to `#[ext]`:
//!
//! ```
//! use extend::ext;
//!
//! #[ext(pub)]
//! impl i32 {
//! fn double(self) -> i32 {
//! self * 2
//! }
//! }
//! ```
//!
//! # async-trait compatibility
//!
//! Async extensions are supported via [async-trait](https://crates.io/crates/async-trait).
//!
//! Be aware that you need to add `#[async_trait]` _below_ `#[ext]`. Otherwise the `ext` macro
//! cannot see the `#[async_trait]` attribute and pass it along in the generated code.
//!
//! Example:
//!
//! ```
//! use extend::ext;
//! use async_trait::async_trait;
//!
//! #[ext]
//! #[async_trait]
//! impl String {
//! async fn read_file() -> String {
//! // ...
//! # todo!()
//! }
//! }
//! ```
//!
//! # Other attributes
//!
//! Other attributes provided _below_ `#[ext]` will be passed along to both the generated trait and
//! the implementation. See [async-trait compatibility](#async-trait-compatibility) above for an
//! example.
//!
//! [extension traits]: https://dev.to/matsimitsu/extending-existing-functionality-in-rust-with-traits-in-rust-3622
#![allow(clippy::let_and_return)]
#![deny(unused_variables, dead_code, unused_must_use, unused_imports)]
use proc_macro2::TokenStream;
use quote::{format_ident, quote, ToTokens};
use std::convert::{TryFrom, TryInto};
use syn::{
parse::{self, Parse, ParseStream},
parse_macro_input, parse_quote,
punctuated::Punctuated,
spanned::Spanned,
token::{Plus, Semi},
Ident, ImplItem, ItemImpl, Result, Token, TraitItemConst, TraitItemFn, Type, TypeArray,
TypeBareFn, TypeGroup, TypeNever, TypeParamBound, TypeParen, TypePath, TypePtr, TypeReference,
TypeSlice, TypeTraitObject, TypeTuple, Visibility,
};
#[derive(Debug)]
struct Input {
item_impl: ItemImpl,
vis: Option<Visibility>,
}
impl Parse for Input {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut attributes = Vec::new();
if input.peek(syn::Token![#]) {
attributes.extend(syn::Attribute::parse_outer(input)?);
}
let vis = input
.parse::<Visibility>()
.ok()
.filter(|vis| vis != &Visibility::Inherited);
let mut item_impl = input.parse::<ItemImpl>()?;
item_impl.attrs.extend(attributes);
Ok(Self { item_impl, vis })
}
}
/// See crate docs for more info.
#[proc_macro_attribute]
#[allow(clippy::unneeded_field_pattern)]
pub fn ext(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let item = parse_macro_input!(item as Input);
let config = parse_macro_input!(attr as Config);
match go(item, config) {
Ok(tokens) => tokens,
Err(err) => err.into_compile_error().into(),
}
}
/// Like [`ext`](macro@crate::ext) but always add `Sized` as a supertrait.
///
/// This is provided as a convenience for generating extension traits that require `Self: Sized`
/// such as:
///
/// ```
/// use extend::ext_sized;
///
/// #[ext_sized]
/// impl i32 {
/// fn requires_sized(self) -> Option<Self> {
/// Some(self)
/// }
/// }
/// ```
#[proc_macro_attribute]
#[allow(clippy::unneeded_field_pattern)]
pub fn ext_sized(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let item = parse_macro_input!(item as Input);
let mut config: Config = parse_macro_input!(attr as Config);
config.supertraits = if let Some(supertraits) = config.supertraits.take() {
Some(parse_quote!(#supertraits + Sized))
} else {
Some(parse_quote!(Sized))
};
match go(item, config) {
Ok(tokens) => tokens,
Err(err) => err.into_compile_error().into(),
}
}
fn go(item: Input, mut config: Config) -> Result<proc_macro::TokenStream> {
if let Some(vis) = item.vis {
if config.visibility != Visibility::Inherited {
return Err(syn::Error::new(
config.visibility.span(),
"Cannot set visibility on `#[ext]` and `impl` block",
));
}
config.visibility = vis;
}
let ItemImpl {
attrs,
unsafety,
generics,
trait_,
self_ty,
items,
// What is defaultness?
defaultness: _,
impl_token: _,
brace_token: _,
} = item.item_impl;
if let Some((_, path, _)) = trait_ {
return Err(syn::Error::new(
path.span(),
"Trait impls cannot be used for #[ext]",
));
}
let self_ty = parse_self_ty(&self_ty)?;
let ext_trait_name = if let Some(ext_trait_name) = config.ext_trait_name {
ext_trait_name
} else {
ext_trait_name(&self_ty)?
};
let MethodsAndConsts {
trait_methods,
trait_consts,
} = extract_allowed_items(&items)?;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let visibility = &config.visibility;
let mut all_supertraits = Vec::<TypeParamBound>::new();
if let Some(supertraits_from_config) = config.supertraits {
all_supertraits.extend(supertraits_from_config);
}
let supertraits_quoted = if all_supertraits.is_empty() {
quote! {}
} else {
let supertraits_quoted = punctuated_from_iter::<_, _, Plus>(all_supertraits);
quote! { : #supertraits_quoted }
};
let code = (quote! {
#[allow(non_camel_case_types)]
#(#attrs)*
#visibility
#unsafety
trait #ext_trait_name #impl_generics #supertraits_quoted #where_clause {
#(
#trait_consts
)*
#(
#[allow(
patterns_in_fns_without_body,
clippy::inline_fn_without_body,
unused_attributes
)]
#trait_methods
)*
}
#(#attrs)*
impl #impl_generics #ext_trait_name #ty_generics for #self_ty #where_clause {
#(#items)*
}
})
.into();
Ok(code)
}
#[derive(Debug, Clone)]
enum ExtType<'a> {
Array(&'a TypeArray),
Group(&'a TypeGroup),
Never(&'a TypeNever),
Paren(&'a TypeParen),
Path(&'a TypePath),
Ptr(&'a TypePtr),
Reference(&'a TypeReference),
Slice(&'a TypeSlice),
Tuple(&'a TypeTuple),
BareFn(&'a TypeBareFn),
TraitObject(&'a TypeTraitObject),
}
#[allow(clippy::wildcard_in_or_patterns)]
fn parse_self_ty(self_ty: &Type) -> Result<ExtType> {
let ty = match self_ty {
Type::Array(inner) => ExtType::Array(inner),
Type::Group(inner) => ExtType::Group(inner),
Type::Never(inner) => ExtType::Never(inner),
Type::Paren(inner) => ExtType::Paren(inner),
Type::Path(inner) => ExtType::Path(inner),
Type::Ptr(inner) => ExtType::Ptr(inner),
Type::Reference(inner) => ExtType::Reference(inner),
Type::Slice(inner) => ExtType::Slice(inner),
Type::Tuple(inner) => ExtType::Tuple(inner),
Type::BareFn(inner) => ExtType::BareFn(inner),
Type::TraitObject(inner) => ExtType::TraitObject(inner),
Type::ImplTrait(_) | Type::Infer(_) | Type::Macro(_) | Type::Verbatim(_) | _ => {
return Err(syn::Error::new(
self_ty.span(),
"#[ext] is not supported for this kind of type",
))
}
};
Ok(ty)
}
impl<'a> TryFrom<&'a Type> for ExtType<'a> {
type Error = syn::Error;
fn try_from(inner: &'a Type) -> Result<ExtType<'a>> {
parse_self_ty(inner)
}
}
impl<'a> ToTokens for ExtType<'a> {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
ExtType::Array(inner) => inner.to_tokens(tokens),
ExtType::Group(inner) => inner.to_tokens(tokens),
ExtType::Never(inner) => inner.to_tokens(tokens),
ExtType::Paren(inner) => inner.to_tokens(tokens),
ExtType::Path(inner) => inner.to_tokens(tokens),
ExtType::Ptr(inner) => inner.to_tokens(tokens),
ExtType::Reference(inner) => inner.to_tokens(tokens),
ExtType::Slice(inner) => inner.to_tokens(tokens),
ExtType::Tuple(inner) => inner.to_tokens(tokens),
ExtType::BareFn(inner) => inner.to_tokens(tokens),
ExtType::TraitObject(inner) => inner.to_tokens(tokens),
}
}
}
fn ext_trait_name(self_ty: &ExtType) -> Result<Ident> {
fn inner_self_ty(self_ty: &ExtType) -> Result<Ident> {
match self_ty {
ExtType::Path(inner) => find_and_combine_idents(inner),
ExtType::Reference(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
if inner.mutability.is_some() {
Ok(format_ident!("RefMut{}", name))
} else {
Ok(format_ident!("Ref{}", name))
}
}
ExtType::Array(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
Ok(format_ident!("ListOf{}", name))
}
ExtType::Group(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
Ok(format_ident!("Group{}", name))
}
ExtType::Paren(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
Ok(format_ident!("Paren{}", name))
}
ExtType::Ptr(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
Ok(format_ident!("PointerTo{}", name))
}
ExtType::Slice(inner) => {
let name = inner_self_ty(&(&*inner.elem).try_into()?)?;
Ok(format_ident!("SliceOf{}", name))
}
ExtType::Tuple(inner) => {
let mut name = format_ident!("TupleOf");
for elem in &inner.elems {
name = format_ident!("{}{}", name, inner_self_ty(&elem.try_into()?)?);
}
Ok(name)
}
ExtType::Never(_) => Ok(format_ident!("Never")),
ExtType::BareFn(inner) => {
let mut name = format_ident!("BareFn");
for input in inner.inputs.iter() {
name = format_ident!("{}{}", name, inner_self_ty(&(&input.ty).try_into()?)?);
}
match &inner.output {
syn::ReturnType::Default => {
name = format_ident!("{}Unit", name);
}
syn::ReturnType::Type(_, ty) => {
name = format_ident!("{}{}", name, inner_self_ty(&(&**ty).try_into()?)?);
}
}
Ok(name)
}
ExtType::TraitObject(inner) => {
let mut name = format_ident!("TraitObject");
for bound in inner.bounds.iter() {
match bound {
TypeParamBound::Trait(bound) => {
for segment in bound.path.segments.iter() {
name = format_ident!("{}{}", name, segment.ident);
}
}
TypeParamBound::Lifetime(lifetime) => {
name = format_ident!("{}{}", name, lifetime.ident);
}
other => {
return Err(syn::Error::new(other.span(), "unsupported bound"));
}
}
}
Ok(name)
}
}
}
Ok(format_ident!("{}Ext", inner_self_ty(self_ty)?))
}
fn find_and_combine_idents(type_path: &TypePath) -> Result<Ident> {
use syn::visit::{self, Visit};
struct IdentVisitor<'a>(Vec<&'a Ident>);
impl<'a> Visit<'a> for IdentVisitor<'a> {
fn visit_ident(&mut self, i: &'a Ident) {
self.0.push(i);
}
}
let mut visitor = IdentVisitor(Vec::new());
visit::visit_type_path(&mut visitor, type_path);
let idents = visitor.0;
if idents.is_empty() {
Err(syn::Error::new(type_path.span(), "Empty type path"))
} else {
let start = &idents[0].span();
let combined_span = idents
.iter()
.map(|i| i.span())
.fold(*start, |a, b| a.join(b).unwrap_or(a));
let combined_name = idents.iter().map(|i| i.to_string()).collect::<String>();
Ok(Ident::new(&combined_name, combined_span))
}
}
#[derive(Debug, Default)]
struct MethodsAndConsts {
trait_methods: Vec<TraitItemFn>,
trait_consts: Vec<TraitItemConst>,
}
#[allow(clippy::wildcard_in_or_patterns)]
fn extract_allowed_items(items: &[ImplItem]) -> Result<MethodsAndConsts> {
let mut acc = MethodsAndConsts::default();
for item in items {
match item {
ImplItem::Fn(method) => acc.trait_methods.push(TraitItemFn {
attrs: method.attrs.clone(),
sig: {
let mut sig = method.sig.clone();
sig.inputs = sig
.inputs
.into_iter()
.map(|fn_arg| match fn_arg {
syn::FnArg::Receiver(recv) => syn::FnArg::Receiver(recv),
syn::FnArg::Typed(mut pat_type) => {
pat_type.pat = Box::new(match *pat_type.pat {
syn::Pat::Ident(pat_ident) => syn::Pat::Ident(pat_ident),
_ => {
parse_quote!(_)
}
});
syn::FnArg::Typed(pat_type)
}
})
.collect();
sig
},
default: None,
semi_token: Some(Semi::default()),
}),
ImplItem::Const(const_) => acc.trait_consts.push(TraitItemConst {
attrs: const_.attrs.clone(),
generics: const_.generics.clone(),
const_token: Default::default(),
ident: const_.ident.clone(),
colon_token: Default::default(),
ty: const_.ty.clone(),
default: None,
semi_token: Default::default(),
}),
ImplItem::Type(_) => {
return Err(syn::Error::new(
item.span(),
"Associated types are not allowed in #[ext] impls",
))
}
ImplItem::Macro(_) => {
return Err(syn::Error::new(
item.span(),
"Macros are not allowed in #[ext] impls",
))
}
ImplItem::Verbatim(_) | _ => {
return Err(syn::Error::new(item.span(), "Not allowed in #[ext] impls"))
}
}
}
Ok(acc)
}
#[derive(Debug)]
struct Config {
ext_trait_name: Option<Ident>,
visibility: Visibility,
supertraits: Option<Punctuated<TypeParamBound, Plus>>,
}
impl Parse for Config {
fn parse(input: ParseStream) -> parse::Result<Self> {
let mut config = Config::default();
if let Ok(visibility) = input.parse::<Visibility>() {
config.visibility = visibility;
}
input.parse::<Token![,]>().ok();
while !input.is_empty() {
let ident = input.parse::<Ident>()?;
input.parse::<Token![=]>()?;
match &*ident.to_string() {
"name" => {
config.ext_trait_name = Some(input.parse()?);
}
"supertraits" => {
config.supertraits =
Some(Punctuated::<TypeParamBound, Plus>::parse_terminated(input)?);
}
_ => return Err(syn::Error::new(ident.span(), "Unknown configuration name")),
}
input.parse::<Token![,]>().ok();
}
Ok(config)
}
}
impl Default for Config {
fn default() -> Self {
Self {
ext_trait_name: None,
visibility: Visibility::Inherited,
supertraits: None,
}
}
}
fn punctuated_from_iter<I, T, P>(i: I) -> Punctuated<T, P>
where
P: Default,
I: IntoIterator<Item = T>,
{
let mut iter = i.into_iter().peekable();
let mut acc = Punctuated::default();
while let Some(item) = iter.next() {
acc.push_value(item);
if iter.peek().is_some() {
acc.push_punct(P::default());
}
}
acc
}
#[cfg(test)]
mod test {
#[allow(unused_imports)]
use super::*;
#[test]
fn test_ui() {
let t = trybuild::TestCases::new();
t.pass("tests/compile_pass/*.rs");
t.compile_fail("tests/compile_fail/*.rs");
}
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_fail/double_vis.rs | tests/compile_fail/double_vis.rs | mod a {
use extend::ext;
#[ext(pub(super))]
pub impl i32 {
fn foo() -> Foo {
Foo
}
}
pub struct Foo;
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_fail/supertraits_are_actually_included.rs | tests/compile_fail/supertraits_are_actually_included.rs | use extend::ext;
trait MyTrait {}
#[ext(supertraits = MyTrait)]
impl String {
fn my_len(&self) -> usize {
self.len()
}
}
fn main() {
assert_eq!(String::new().my_len(), 0);
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/pub_impl.rs | tests/compile_pass/pub_impl.rs | mod a {
use extend::ext;
#[ext]
pub impl i32 {
fn foo() -> Foo { Foo }
}
pub struct Foo;
}
fn main() {
use a::i32Ext;
i32::foo();
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/complex_trait_name.rs | tests/compile_pass/complex_trait_name.rs | mod foo {
use extend::ext;
#[ext(pub)]
impl<T1, T2, T3> (T1, T2, T3) {
fn size(&self) -> usize {
3
}
}
}
fn main() {
use foo::TupleOfT1T2T3Ext;
assert_eq!(3, (0, 0, 0).size());
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/destructure.rs | tests/compile_pass/destructure.rs | #![allow(warnings)]
use extend::ext;
#[ext]
impl i32 {
fn foo(self, (a, b): (i32, i32)) {}
fn bar(self, [a, b]: [i32; 2]) {}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/ref_and_ref_mut.rs | tests/compile_pass/ref_and_ref_mut.rs | use extend::ext;
#[ext]
impl &i32 {
fn foo() {}
}
#[ext]
impl &mut i32 {
fn bar() {}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/double_ext_on_same_type.rs | tests/compile_pass/double_ext_on_same_type.rs | use extend::ext;
#[ext]
impl Option<usize> {
fn foo() -> usize {
1
}
}
#[ext]
impl Option<i32> {
fn bar() -> i32 {
1
}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/issue_2.rs | tests/compile_pass/issue_2.rs | #![allow(unused_variables)]
use extend::ext;
use std::iter::FromIterator;
#[ext]
impl<T, K, F, C> C
where
C: IntoIterator<Item = T>,
K: Eq,
F: Fn(&T) -> K,
{
fn group_by<Out>(self, f: F) -> Out
where
Out: FromIterator<(K, Vec<T>)>,
{
todo!()
}
fn group_by_and_map_values<Out, G, T2>(self, f: F, g: G) -> Out
where
G: Fn(T) -> T2 + Copy,
Out: FromIterator<(K, Vec<T2>)>,
{
todo!()
}
fn group_by_and_return_groups(self, f: F) -> Vec<Vec<T>> {
todo!()
}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/super_trait.rs | tests/compile_pass/super_trait.rs | use extend::ext;
trait MyTrait {}
impl MyTrait for String {}
#[ext(supertraits = Default + Clone + MyTrait)]
impl String {
fn my_len(&self) -> usize {
self.len()
}
}
fn main() {
assert_eq!(String::new().my_len(), 0);
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/changing_extension_trait_name.rs | tests/compile_pass/changing_extension_trait_name.rs | use extend::ext;
#[ext(name = Foo)]
impl i32 {
fn foo() {}
}
fn main() {
<i32 as Foo>::foo();
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/hello_world.rs | tests/compile_pass/hello_world.rs | use extend::ext;
#[ext]
impl i32 {
fn add_one(&self) -> Self {
self + 1
}
fn foo() -> MyType {
MyType
}
}
#[derive(Debug, Eq, PartialEq)]
struct MyType;
fn main() {
assert_eq!(i32::foo(), MyType);
assert_eq!(1.add_one(), 2);
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/multiple_config.rs | tests/compile_pass/multiple_config.rs | use extend::ext;
#[ext(pub(crate), name = Foo)]
impl i32 {
fn foo() {}
}
#[ext(pub, name = Bar)]
impl i64 {
fn foo() {}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/more_than_one_extension.rs | tests/compile_pass/more_than_one_extension.rs | use extend::ext;
#[ext]
impl i32 {
fn foo() {}
}
#[ext]
impl i64 {
fn bar() {}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/multiple_generic_params.rs | tests/compile_pass/multiple_generic_params.rs | use extend::ext;
use std::marker::PhantomData;
struct Foo<T>(PhantomData<T>);
#[ext]
impl<T, K> T {
fn some_method(&self, _: Foo<K>) {}
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/associated_constants.rs | tests/compile_pass/associated_constants.rs | use extend::ext;
#[ext]
impl Option<String> {
const FOO: usize = 1;
}
fn main() {
assert_eq!(Option::<String>::FOO, 1);
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/generics.rs | tests/compile_pass/generics.rs | use extend::ext;
#[ext]
impl<'a, T: Clone> Vec<&'a T>
where
T: 'a + Copy,
{
fn size(&self) -> usize {
self.len()
}
}
fn main() {
assert_eq!(3, vec![&1, &2, &3].size());
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/async_trait.rs | tests/compile_pass/async_trait.rs | use extend::ext;
use async_trait::async_trait;
#[ext]
#[async_trait]
impl String {
async fn foo() -> usize {
1
}
}
#[ext]
#[async_trait]
pub impl i32 {
async fn bar() -> usize {
1
}
}
async fn foo() {
let _: usize = String::foo().await;
let _: usize = i32::bar().await;
}
fn main() {}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/extension_on_complex_types.rs | tests/compile_pass/extension_on_complex_types.rs | #![allow(warnings)]
use extend::ext;
#[ext]
impl<'a> &'a str {
fn foo(self) {}
}
#[ext]
impl<T> [T; 3] {
fn foo(self) {}
}
#[ext]
impl *const i32 {
fn foo(self) {}
}
#[ext]
impl<T> [T] {
fn foo(&self) {}
}
#[ext]
impl<'a, T> &'a [T] {
fn foo(self) {}
}
#[ext]
impl (i32, i64) {
fn foo(self) {}
}
#[ext]
impl fn(i32) -> bool {
fn foo(self) {}
}
fn bare_fn(_: i32) -> bool {
false
}
#[ext]
impl dyn Send + Sync + 'static {}
fn main() {
"".foo();
[1, 2, 3].foo();
let ptr: *const i32 = &123;
ptr.foo();
&[1, 2, 3].foo();
(1i32, 1i64).foo();
(bare_fn as fn(i32) -> bool).foo();
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/visibility_config.rs | tests/compile_pass/visibility_config.rs | mod a {
use extend::ext;
#[ext(pub)]
impl i32 {
fn foo() -> Foo { Foo }
}
pub struct Foo;
}
fn main() {
use a::i32Ext;
i32::foo();
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
davidpdrsn/extend | https://github.com/davidpdrsn/extend/blob/954456f54d5e9e89233d58d0b630ca0361a3fc5a/tests/compile_pass/sized.rs | tests/compile_pass/sized.rs | use extend::ext_sized;
#[ext_sized(name = One)]
impl i32 {
fn requires_sized(self) -> Option<Self> {
Some(self)
}
}
#[ext_sized(name = Two, supertraits = Default)]
impl i32 {
fn with_another_supertrait(self) -> Option<Self> {
Some(self)
}
}
#[ext_sized(name = Three, supertraits = Default + Clone + Copy)]
impl i32 {
fn multiple_supertraits(self) -> Option<Self> {
Some(self)
}
}
#[ext_sized(name = Four, supertraits = Sized)]
impl i32 {
fn already_sized(self) -> Option<Self> {
Some(self)
}
}
fn main() {
1.requires_sized();
1.with_another_supertrait();
1.multiple_supertraits();
1.already_sized();
}
| rust | MIT | 954456f54d5e9e89233d58d0b630ca0361a3fc5a | 2026-01-04T20:20:27.405623Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/lib.rs | src/lib.rs | #[macro_use]
extern crate bitflags;
extern crate bit_set;
extern crate bit_vec;
extern crate image;
#[macro_use]
extern crate log;
#[macro_use]
extern crate ndarray;
extern crate pbr;
extern crate vosealias;
extern crate rand;
mod source;
mod wave;
pub use source::*;
pub use wave::Wave;
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/wave.rs | src/wave.rs | use std::cell::RefCell;
use std::collections::HashSet;
use std::fmt;
use std::hash::Hash;
use std::io;
use bit_set::BitSet;
use ndarray::prelude::*;
use ndarray::NdIndex;
use rand::Rng;
use rand::StdRng;
use source::{Source, OverlappingSource2};
#[derive(Clone)]
pub struct State<P> {
pub pos: P,
pub entropy: f64,
pub cfg: BitSet,
}
pub struct Wave<'a, R, S: ?Sized>
where S: 'a + Source
{
source: &'a S,
states: Array<RefCell<State<S::Dims>>, S::Dims>,
unobserved: HashSet<S::Dims>,
dims: S::Dims,
periodic: S::Periodicity,
rng: R,
}
impl<'a, R, P: Eq + Hash + Copy> fmt::Debug for Wave<'a, R, OverlappingSource2<P>> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut grid = String::new();
for j in 0..self.states.dim().1 {
for i in 0..self.states.dim().0 {
grid.push_str(format!(" {:<4.2}", self.states[(i, j)].borrow().entropy).as_str());
}
grid.push('\n');
}
write!(f, "{}", grid)
}
}
impl<'a, S: 'a + Source> Wave<'a, StdRng, S>
where S::Dims: Copy + Eq + Hash + Dimension + NdIndex<Dim = S::Dims>,
S::Periodicity: Copy
{
pub fn new(dims: S::Dims,
periodic: S::Periodicity,
source: &'a S)
-> io::Result<Wave<'a, StdRng, S>> {
let mut unobserved = HashSet::new();
let states = Array::from_shape_fn(source.wave_dims(dims, periodic), |pos| {
unobserved.insert(pos);
RefCell::new(source.initial_state(pos))
});
let rng = try!(StdRng::new());
Ok(Wave {
source: source,
states: states,
unobserved: unobserved,
dims: dims,
periodic: periodic,
rng: rng,
})
}
}
impl<'a, R: Rng, S: 'a + Source> Wave<'a, R, S>
where S::Dims: Copy + Eq + Ord + Hash + Dimension + NdIndex<Dim = S::Dims>,
S::Periodicity: Copy
{
pub fn observe(self) -> Option<Self> {
let Wave { source, states, mut unobserved, dims, periodic, mut rng } = self;
let observed = {
let mut iter = unobserved.iter();
let first: S::Dims = match iter.next() {
Some(&p) => p,
None => return None,
};
iter.fold(first,
|f, &p| if states[p].borrow().entropy < states[f].borrow().entropy {
p
} else {
f
})
};
unobserved.remove(&observed);
let states = match source.observe(states, observed, periodic, &mut rng) {
Some(states) => states,
None => return None,
};
return Some(Wave {
source: source,
states: states,
unobserved: unobserved,
dims: dims,
periodic: periodic,
rng: rng,
});
}
pub fn collapse(mut self) -> Option<Array<S::Pixel, S::Dims>> {
while !self.is_collapsed() {
self = match self.observe() {
Some(wave) => wave,
None => return None,
};
}
Some(self.source.resolve(self.dims, self.states.view()))
}
pub fn resolve(&self) -> Array<S::Pixel, S::Dims> {
assert!(self.is_collapsed());
self.source.resolve(self.dims, self.states.view())
}
pub fn is_collapsed(&self) -> bool {
self.unobserved.is_empty()
}
pub fn len(&self) -> usize {
self.states.len()
}
pub fn uncollapsed(&self) -> usize {
self.unobserved.len()
}
pub fn constrain(self, pos: S::Dims, val: S::Pixel) -> Option<Self> {
let Wave { source, states, unobserved, dims, periodic, rng } = self;
let states = match source.constrain(states, pos, periodic, val) {
Some(states) => states,
None => return None,
};
return Some(Wave {
source: source,
states: states,
unobserved: unobserved,
dims: dims,
periodic: periodic,
rng: rng,
});
}
}
// #[cfg(test)]
// mod tests {
// extern crate env_logger;
// extern crate image;
//
// use super::*;
//
// use std::cell::RefCell;
// use std::collections::HashSet;
//
// use image::{GenericImage, ImageBuffer};
//
// use ndarray::prelude::*;
//
// use rand::{StdRng, SeedableRng};
//
// use source::*;
//
// #[test]
// fn should_collapse_rooms() {
// let _ = env_logger::init();
//
// let img = image::open("resources/Rooms.png").expect("Failed to open source image");
// let src = OverlappingSource2::from_image(&img, (3, 3), (true, true), Symmetry2::all());
//
// let wave = Wave::new((64, 64), (true, true), &src).expect("IO error");
//
// let pixels = wave.collapse().expect("Wave contradiction!");
//
// let buffer = ImageBuffer::from_fn(pixels.dim().0 as u32,
// pixels.dim().1 as u32,
// |x, y| pixels[(x as usize, y as usize)]);
// buffer.save("output/CollapseTestRooms.png").expect("Error saving buffer");
// }
//
//
// #[test]
// fn should_collapse_flowers() {
// let _ = env_logger::init();
//
// let img = image::open("resources/Flowers.png").expect("Failed to open source image");
// let sky = img.get_pixel(0, 0);
// let gnd = img.get_pixel(0, 21);
// let src = OverlappingSource2::from_image(&img, (3, 3), (true, false), S2_IDENTITY | S2_REFLECT_Y);
//
// let mut wave = Wave::new((128, 128), (true, false), &src).expect("IO error");
// for i in 0..128 {
// wave = wave.constrain((i, 127), gnd)
// .expect("Constraint error")
// .constrain((i, 0), sky)
// .expect("Constraint error");
// }
//
// let pixels = wave.collapse().expect("Wave contradiction!");
//
// let buffer = ImageBuffer::from_fn(pixels.dim().0 as u32,
// pixels.dim().1 as u32,
// |x, y| pixels[(x as usize, y as usize)]);
// buffer.save("output/CollapseTestFlowers.png").expect("Error saving buffer");
// }
//
//
// #[test]
// fn should_collapse_sword() {
// let _ = env_logger::init();
//
// let img = image::open("resources/DitheringSword.png").expect("Failed to open source image");
// let empty = img.get_pixel(0, 0);
// let src = OverlappingSource2::from_image(&img, (3, 3), (true, true), S2_IDENTITY);
//
// let mut wave = Wave::new((128, 128), (true, true), &src).expect("IO error");
// for i in 0..128 {
// wave = wave.constrain((i, 127), empty)
// .expect("Constraint error")
// .constrain((i, 0), empty)
// .expect("Constraint error");
// }
//
// let pixels = wave.collapse().expect("Wave contradiction!");
//
// let buffer = ImageBuffer::from_fn(pixels.dim().0 as u32,
// pixels.dim().1 as u32,
// |x, y| pixels[(x as usize, y as usize)]);
// buffer.save("output/CollapseTestSword.png").expect("Error saving buffer");
// }
// }
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/main.rs | src/main.rs | #[macro_use]
extern crate clap;
extern crate collapse;
extern crate image;
#[macro_use]
extern crate lazy_static;
extern crate pbr;
extern crate regex;
use std::str::FromStr;
use image::{GenericImageView, ImageBuffer};
use clap::{App, AppSettings, Arg, ArgGroup, SubCommand};
use collapse::*;
use pbr::ProgressBar;
use regex::Regex;
lazy_static! {
static ref MATCH_RANGE: Regex = Regex::new(r"^(-?\d+)..(-?\d+)$").unwrap();
}
fn validate_constraint(constraint: String) -> Result<(), String> {
let split: Vec<_> = constraint.split(',').collect();
if split.len() != 4 {
return Err(format!("Expected four values to the constraint - the x and y of the \
pixel in the input, and the x and y pixels/ranges in the \
output, but got {} values",
split.len()));
} else {
match (split[0].parse::<i32>(), split[1].parse::<i32>()) {
(Ok(_), Ok(_)) => (),
_ => {
return Err(String::from("Expected the input x and y coordinates to be signed \
integers"));
}
}
match (split[2].parse::<i32>(), split[3].parse::<i32>()) {
(Err(_), Ok(_)) => {
if !MATCH_RANGE.is_match(split[2]) {
return Err(String::from("Expected the output x coordinate to either be a \
signed integer or a range of signed integers"));
}
}
(Ok(_), Err(_)) => {
if !MATCH_RANGE.is_match(split[3]) {
return Err(String::from("Expected the output y coordinate to either be a \
signed integer or a range of signed integers"));
}
}
(Err(_), Err(_)) => {
if !MATCH_RANGE.is_match(split[2]) || !MATCH_RANGE.is_match(split[3]) {
return Err(String::from("Expected the output x and y coordinates to either \
be a signed integer or a range of signed integers"));
}
}
_ => (),
}
}
Ok(())
}
fn main() {
let app = App::new("collapse")
.version(crate_version!())
.author(crate_authors!())
.about("A command-line interface for the wavefunction collapse texture synthesis \
algorithm.")
.subcommand(SubCommand::with_name("2d")
.about("The 2D case of the wavefunction collapse algorithm. Samples an input image \
and produces an output image.")
.arg(Arg::with_name("INPUT")
.help("The input file to sample.")
.required(true)
.index(1))
.arg(Arg::with_name("OUTPUT")
.help("The output file to generate.")
.required(true)
.index(2))
.arg(Arg::with_name("periodic-input")
.help("The input periodicity; expects two booleans, corresponding to the x and \
y axes. If true, then the input will be processed as wrapping on that \
axis. Defaults to false for both axes.")
.short("p")
.long("periodic-input")
.takes_value(true)
.number_of_values(2)
.require_delimiter(true)
.value_name("x, y"))
.arg(Arg::with_name("periodic-output")
.help("The output periodicity; expects two booleans, corresponding to the x \
and y axes. If true, then the output will be processed as wrapping on \
that axis. Defaults to true for both axes.")
.short("P")
.long("periodic-output")
.takes_value(true)
.number_of_values(2)
.require_delimiter(true)
.value_name("x, y"))
.arg(Arg::with_name("n")
.help("The sample dimensions; expects two positive nonzero integers, \
corresponding to the width and height of the rectangle to be used for \
sampling the input. Defaults to 3 for both axes.")
.short("n")
.takes_value(true)
.number_of_values(2)
.require_delimiter(true)
.value_name("x, y"))
.arg(Arg::with_name("no-symmetry")
.long("no-symmetry")
.help("Do not augment the sample image with rotations/reflections."))
.arg(Arg::with_name("all-symmetry")
.long("all-symmetry")
.help("Augment the sample image with rotations/reflections, using all members \
of the relevant symmetry group. This is the default symmetry setting."))
.arg(Arg::with_name("identity")
.long("identity")
.help("The original image, since the identity transformation is a no-op. Don't \
forget this if you're building up a custom set of symmetries."))
.arg(Arg::with_name("reflect-x").long("reflect-x").help("Reflect over the x axis."))
.arg(Arg::with_name("reflect-y").long("reflect-y").help("Reflect over the y axis."))
.arg(Arg::with_name("reflect-y-rot90")
.long("reflect-y-rot90")
.help("Reflect over the y axis, and then rotate by 90 degrees. This is \
equivalent to a reflection over the line y = -x."))
.arg(Arg::with_name("reflect-x-rot90")
.long("reflect-x-rot90")
.help("Reflect the over the x axis, and then rotate by 90 degrees. This is \
equivalent to a reflection over the line y = x."))
.arg(Arg::with_name("rot90")
.long("rot90")
.help("Rotate the image 90 degrees clockwise."))
.arg(Arg::with_name("rot180")
.long("rot180")
.help("Rotate the image 180 degrees clockwise."))
.arg(Arg::with_name("rot270")
.long("rot270")
.help("Rotate the image 270 degrees clockwise."))
.group(ArgGroup::with_name("symmetry-simple").args(&["no-symmetry", "all-symmetry"]))
.group(ArgGroup::with_name("symmetry-complex")
.args(&["identity",
"reflect-x",
"reflect-y",
"reflect-x-rot90",
"reflect-y-rot90",
"rot90",
"rot180",
"rot270"])
.multiple(true)
.conflicts_with("symmetry-simple"))
.arg(Arg::with_name("output-dims")
.help("The dimensions of the output image.")
.short("d")
.takes_value(true)
.number_of_values(2)
.require_delimiter(true)
.required(true)
.value_name("x, y"))
.arg(Arg::with_name("pixel-constraint")
.help("source x, source y, output x (optionally a range \
x0..x1, x0 inclusive, x1 exclusive), output y (optionally a range \
y0..y1, y0 inclusive, y1 exclusive)")
.short("c")
.takes_value(true)
.multiple(true)
.value_name("source x, source y, output x/range, output y/range")
.validator(validate_constraint)))
.settings(&[AppSettings::SubcommandRequiredElseHelp]);
let matches = app.get_matches();
if let Some(matches) = matches.subcommand_matches("2d") {
let periodic_input = if matches.is_present("periodic-input") {
let vec = values_t!(matches.values_of("periodic-input"), bool)
.unwrap_or_else(|e| e.exit());
(vec[0], vec[1])
} else {
(false, false)
};
let periodic_output = if matches.is_present("periodic-output") {
let vec = values_t!(matches.values_of("periodic-output"), bool)
.unwrap_or_else(|e| e.exit());
(vec[0], vec[1])
} else {
(true, true)
};
let n = if matches.is_present("n") {
let vec = values_t!(matches.values_of("n"), usize).unwrap_or_else(|e| e.exit());
(vec[0], vec[1])
} else {
(3, 3)
};
let output_dims = {
let vec = values_t!(matches.values_of("output-dims"), usize)
.unwrap_or_else(|e| e.exit());
(vec[0], vec[1])
};
let input = matches.value_of("INPUT").unwrap();
let output = matches.value_of("OUTPUT").unwrap();
let img = image::open(input).expect("Failed to open input image");
let symmetries = if matches.is_present("no-symmetry") {
S2_IDENTITY
} else if matches.is_present("symmetry-complex") {
let mut symmetries = Symmetry2::empty();
if matches.is_present("identity") {
symmetries |= S2_IDENTITY;
}
if matches.is_present("reflect-x") {
symmetries |= S2_REFLECT_X;
}
if matches.is_present("reflect-y") {
symmetries |= S2_REFLECT_Y;
}
if matches.is_present("reflect-x-rot90") {
symmetries |= S2_REFLECT_X_ROT90;
}
if matches.is_present("reflect-y-rot90") {
symmetries |= S2_REFLECT_Y_ROT90;
}
if matches.is_present("rot90") {
symmetries |= S2_ROTATE_90;
}
if matches.is_present("rot180") {
symmetries |= S2_ROTATE_180;
}
if matches.is_present("rot270") {
symmetries |= S2_ROTATE_270;
}
symmetries
} else {
Symmetry2::all()
};
let input_dims = (img.width(), img.height());
let constraints: Vec<((usize, usize), (u32, u32))> =
if matches.is_present("pixel-constraint") {
matches.values_of("pixel-constraint")
.unwrap()
.flat_map(|coords| {
let coords: Vec<_> = coords.split(',').collect();
let (input_x, input_y) = (coords[0].parse::<i32>().unwrap(),
coords[1].parse::<i32>().unwrap());
let input_x = if input_x < 0 {
input_x + input_dims.0 as i32
} else {
input_x
} as u32;
let input_y = if input_y < 0 {
input_y + input_dims.1 as i32
} else {
input_y
} as u32;
let outputs_x: Vec<_> =
if let Ok(output_x) = coords[2].parse::<i32>() {
vec![if output_x < 0 {
output_x + output_dims.0 as i32
} else {
output_x
} as usize]
} else if let Some(caps) = MATCH_RANGE.captures(coords[2]) {
let (x0, x1) =
(caps.at(1).and_then(|s| i32::from_str(s).ok()).unwrap(),
caps.at(2).and_then(|s| i32::from_str(s).ok()).unwrap());
let x0 = if x0 < 0 {
x0 as usize + output_dims.0
} else {
x0 as usize
};
let x1 = if x1 < 0 {
x1 as usize + output_dims.0
} else {
x1 as usize
};
if x0 < x1 { x0..x1 } else { x1..x0 }.collect()
} else {
unreachable!();
};
let outputs_y: Vec<_> =
if let Ok(output_y) = coords[3].parse::<i32>() {
vec![if output_y < 0 {
output_y + output_dims.0 as i32
} else {
output_y
} as usize]
} else if let Some(caps) = MATCH_RANGE.captures(coords[3]) {
let (y0, y1) =
(caps.at(1).and_then(|s| i32::from_str(s).ok()).unwrap(),
caps.at(2).and_then(|s| i32::from_str(s).ok()).unwrap());
let y0 = if y0 < 0 {
y0 as usize + output_dims.1
} else {
y0 as usize
};
let y1 = if y1 < 0 {
y1 as usize + output_dims.1
} else {
y1 as usize
};
if y0 < y1 { y0..y1 } else { y1..y0 }.collect()
} else {
unreachable!();
};
let mut constrained = Vec::new();
for output_x in outputs_x {
for &output_y in outputs_y.iter() {
constrained.push(((output_x, output_y), (input_x, input_y)));
}
}
constrained.into_iter()
})
.collect()
} else {
vec![]
};
let src = OverlappingSource2::from_image_cli(&img, n, periodic_input, symmetries);
let mut pb = ProgressBar::new(1);
pb.message("Building Wave object... ");
pb.inc();
let mut wave = Wave::new(output_dims, periodic_output, &src)
.expect("Error constructing wave");
let mut pb = ProgressBar::new(constraints.len() as u64);
pb.message("Propagating individual pixel constraints... ");
for (output_pos, (x, y)) in constraints {
pb.inc();
wave = match wave.constrain(output_pos, img.get_pixel(x, y)) {
Some(wave) => wave,
None => {
println!("Wave contradiction! The wave has failed to collapse due to a \
specified individual pixel constraint: output at {:?} == input at \
{:?}",
output_pos,
(x, y));
return;
}
};
}
let count = wave.len();
let mut pb = ProgressBar::new(count as u64);
pb.message("Collapsing wave... ");
pb.tick();
while !wave.is_collapsed() {
wave = match wave.observe() {
Some(wave) => wave,
None => {
println!("Wave contradiction! The wave has failed to collapse, and no output \
was generated. Please try again - maybe another seed will be \
kinder to you.");
return;
}
};
pb.inc();
}
println!("The wave has fully collapsed! Saving result...");
let pixels = wave.resolve();
let buffer = ImageBuffer::from_fn(pixels.dim().0 as u32,
pixels.dim().1 as u32,
|x, y| pixels[(x as usize, y as usize)]);
buffer.save(output).expect("Error saving result");
}
}
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/source/source3d.rs | src/source/source3d.rs | use std::cell::RefCell;
use std::collections::{HashMap, HashSet, VecDeque};
use std::hash::Hash;
use std::ops::{Neg, Sub};
use std::vec::Vec;
use bit_set::BitSet;
use bit_vec::BitVec;
use image::{GenericImage, Pixel};
use ndarray::prelude::*;
use ndarray::{Array3, ArrayView3, Ix3};
use vosealias::AliasTable;
use rand::Rng;
use source::Source;
use wave::State;
type RcArray3<A> = RcArray<A, Ix3>;
#[derive(Copy, Clone)]
pub struct Point3(u32, u32, u32);
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone, Debug)]
struct Pixel3(u32);
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub struct Offset3(isize, isize, isize);
impl Sub for Point3 {
type Output = Offset3;
fn sub(self, rhs: Point3) -> Offset3 {
let x = self.0 as isize - rhs.0 as isize;
let y = self.1 as isize - rhs.1 as isize;
let z = self.2 as isize - rhs.2 as isize;
Offset3(x, y, z)
}
}
impl Neg for Offset3 {
type Output = Self;
fn neg(self) -> Self {
Offset3(-self.0, -self.1, -self.2)
}
}
pub struct OverlappingSource3<P> {
palette: Vec<P>,
inverse_palette: HashMap<P, Pixel3>,
samples: Vec<Sample3<Pixel3>>,
weights: Vec<(usize, f64)>,
collide: HashMap<Offset3, Vec<BitSet>>,
n: (usize, usize, usize),
}
pub mod symmetry {
bitflags! {
pub flags Symmetry3: u64 {
const S3_0 = 0b1 << 0,
const S3_1 = 0b1 << 1,
const S3_2 = 0b1 << 2,
const S3_3 = 0b1 << 3,
const S3_4 = 0b1 << 4,
const S3_5 = 0b1 << 5,
const S3_6 = 0b1 << 6,
const S3_7 = 0b1 << 7,
const S3_8 = 0b1 << 8,
const S3_9 = 0b1 << 9,
const S3_10 = 0b1 << 10,
const S3_11 = 0b1 << 11,
const S3_16 = 0b1 << 16,
const S3_17 = 0b1 << 17,
const S3_18 = 0b1 << 18,
const S3_19 = 0b1 << 19,
const S3_20 = 0b1 << 20,
const S3_21 = 0b1 << 21,
const S3_22 = 0b1 << 22,
const S3_23 = 0b1 << 23,
const S3_24 = 0b1 << 24,
const S3_25 = 0b1 << 25,
const S3_26 = 0b1 << 26,
const S3_27 = 0b1 << 27,
const S3_32 = 0b1 << 32,
const S3_33 = 0b1 << 33,
const S3_34 = 0b1 << 34,
const S3_35 = 0b1 << 35,
const S3_36 = 0b1 << 36,
const S3_37 = 0b1 << 37,
const S3_38 = 0b1 << 38,
const S3_39 = 0b1 << 39,
const S3_40 = 0b1 << 40,
const S3_41 = 0b1 << 41,
const S3_42 = 0b1 << 42,
const S3_43 = 0b1 << 43,
const S3_48 = 0b1 << 48,
const S3_49 = 0b1 << 49,
const S3_50 = 0b1 << 50,
const S3_51 = 0b1 << 51,
const S3_52 = 0b1 << 52,
const S3_53 = 0b1 << 53,
const S3_54 = 0b1 << 54,
const S3_55 = 0b1 << 55,
const S3_56 = 0b1 << 56,
const S3_57 = 0b1 << 57,
const S3_58 = 0b1 << 58,
const S3_59 = 0b1 << 59,
const S3_REFLECT_X = 0b1 << (JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_Y),
const S3_REFLECT_Y = 0b1 << (JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_X),
const S3_REFLECT_Z = 0b1 << (JS3_REFLECT | JS3_SWAP_TETRA),
}
}
pub const JS3_REFLECT: u64 = 0b100000;
pub const JS3_SWAP_TETRA: u64 = 0b10000;
pub const JS3_ROT120_ID: u64 = 0b0000;
pub const JS3_ROT120_YZX: u64 = 0b0100;
pub const JS3_ROT120_ZXY: u64 = 0b1000;
pub const JS3_ROT120_MASK: u64 = 0b1100;
pub const JS3_ROT180_ID: u64 = 0b00;
pub const JS3_ROT180_Z: u64 = 0b01;
pub const JS3_ROT180_X: u64 = 0b10;
pub const JS3_ROT180_Y: u64 = 0b11;
pub const JS3_ROT180_MASK: u64 = 0b11;
impl Symmetry3 {
#[inline]
pub fn find_symmetry_dimensions(symmetry: u64,
(mut x, mut y, mut z): (usize, usize, usize))
-> (usize, usize, usize) {
use std::mem::swap;
if symmetry & JS3_REFLECT != 0 {
swap(&mut x, &mut y);
}
if symmetry & JS3_SWAP_TETRA != 0 {
swap(&mut x, &mut y);
}
match symmetry & JS3_ROT120_MASK {
JS3_ROT120_ID => {}
JS3_ROT120_YZX => {
swap(&mut y, &mut z);
swap(&mut x, &mut z);
} // XYZ -> XZY -> YZX
JS3_ROT120_ZXY => {
swap(&mut x, &mut y);
swap(&mut x, &mut z);
} // XYZ -> YXZ -> ZXY
_ => panic!("Invalid symmetry: {:?}", symmetry),
}
(x, y, z)
}
#[inline]
pub fn apply_symmetry(symmetry: u64,
(bx, by, bz): (usize, usize, usize),
(mut x, mut y, mut z): (usize, usize, usize))
-> (usize, usize, usize) {
use std::mem::swap;
if symmetry & JS3_REFLECT != 0 {
swap(&mut x, &mut y);
}
if symmetry & JS3_SWAP_TETRA != 0 {
swap(&mut x, &mut y);
z = bz - z;
}
match symmetry & JS3_ROT120_MASK {
JS3_ROT120_ID => {}
JS3_ROT120_YZX => {
swap(&mut y, &mut z);
swap(&mut x, &mut z);
} // YZX -> YXZ -> XYZ
JS3_ROT120_ZXY => {
swap(&mut x, &mut y);
swap(&mut x, &mut z);
} // ZXY -> XZY -> XYZ
_ => panic!("Invalid symmetry: {:?}", symmetry),
}
match symmetry & JS3_ROT180_MASK {
JS3_ROT180_ID => {}
JS3_ROT180_Z => {
x = bx - x;
y = by - y;
}
JS3_ROT180_X => {
y = by - y;
z = bz - z;
}
JS3_ROT180_Y => {
x = bx - x;
z = bz - z;
}
_ => unreachable!(),
}
(x, y, z)
}
#[inline]
pub fn invert_symmetry(symmetry: u64,
(bx, by, bz): (usize, usize, usize),
(mut x, mut y, mut z): (usize, usize, usize))
-> (usize, usize, usize) {
use std::mem::swap;
match symmetry & JS3_ROT180_MASK {
JS3_ROT180_ID => {}
JS3_ROT180_Z => {
y = by - y;
x = bx - x;
}
JS3_ROT180_X => {
z = bz - z;
y = by - y;
}
JS3_ROT180_Y => {
z = bz - z;
x = bx - x;
}
_ => unreachable!(),
}
match symmetry & JS3_ROT120_MASK {
JS3_ROT120_ID => {}
JS3_ROT120_YZX => {
swap(&mut x, &mut z);
swap(&mut y, &mut z);
} // XYZ -> XZY -> YZX
JS3_ROT120_ZXY => {
swap(&mut x, &mut z);
swap(&mut x, &mut y);
} // XYZ -> YXZ -> ZXY
_ => panic!("Invalid symmetry: {:?}", symmetry),
}
if symmetry & JS3_SWAP_TETRA != 0 {
z = bz - z;
swap(&mut x, &mut y);
}
if symmetry & JS3_REFLECT != 0 {
swap(&mut x, &mut y);
}
(x, y, z)
}
}
}
use self::symmetry::*;
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Sample3<P>(RcArray3<P>);
impl<P> OverlappingSource3<P>
where P: Copy
{
pub fn from_image_stack<I: GenericImage<Pixel = P> + 'static>(imgs: Vec<I>,
n: (usize, usize, usize),
periodic: (bool, bool, bool),
symmetry: Symmetry3)
-> Self
where P: Pixel + Eq + Hash + 'static
{
debug!("Generating palette map...");
let palette_set: HashSet<P> =
imgs.iter().flat_map(|img| img.pixels()).map(|(_, _, p)| p).collect();
let palette_map: HashMap<P, Pixel3> = palette_set.into_iter()
.enumerate()
.map(|(i, p)| (p, Pixel3(i as u32)))
.collect();
debug!("Palette size: {}.", palette_map.len());
debug!("Stacking images into Array3...");
let symmetries: Vec<RcArray3<Pixel3>> = {
let pixels = RcArray3::from_shape_fn(n, |(x, y, z)| {
palette_map[&imgs[z].get_pixel(x as u32, y as u32)]
});
let mut symmetries = Vec::new();
let mut symm_bits = symmetry.bits();
let mut symm_n = 0;
while symm_bits > 0 {
if symm_bits & 0b1 != 0 {
let symm_dims = {
let (x, y, z) = Symmetry3::find_symmetry_dimensions(symm_n, n);
(if periodic.0 { x } else { x - (n.0 - 1) },
if periodic.1 { y } else { y - (n.1 - 1) },
if periodic.2 { z } else { z - (n.2 - 1) })
};
symmetries.push(RcArray3::from_shape_fn(symm_dims, |(x, y, z)| {
pixels[{
let (x, y, z) =
Symmetry3::invert_symmetry(symm_n, symm_dims, (x, y, z));
(x % n.0, y % n.1, z % n.2)
}]
}));
}
symm_bits >>= 1;
symm_n += 1;
}
symmetries
};
let (samples, weights) = {
let mut sample_set = HashMap::new();
for symmetry in symmetries {
for i in 0..symmetry.dim().0 - (n.0 - 1) {
for j in 0..symmetry.dim().1 - (n.1 - 1) {
for k in 0..symmetry.dim().2 - (n.2 - 1) {
let mut sample = symmetry.to_shared();
sample.islice(s![i as isize..(i + n.0) as isize,
j as isize..(j + n.1) as isize,
k as isize..(k + n.2) as isize]);
*sample_set.entry(Sample3(sample)).or_insert(0) += 1;
}
}
}
}
debug!("Converting intermediate sample type into full sample type.");
let (samples, weight_vec): (Vec<_>, Vec<_>) = sample_set.into_iter().unzip();
let weights: Vec<_> = weight_vec.into_iter()
.enumerate()
.map(|(i, x)| (i, x as f64))
.collect();
(samples, weights)
};
// for (s, &Sample2(ref sample)) in samples.iter().enumerate() {
// let mut string = String::new();
// for j in 0..sample.dim().1 {
// for i in 0..sample.dim().0 {
// string.push_str(format!("{} ", sample[(i, j)].0).as_str());
// }
// string.push('\n');
// }
//
// debug!("Sample {}, weighted {}: \n{}", s, weights[s].1, string);
// debug_assert_eq!(sample.dim(), (3, 3));
// }
debug!("Generating collision map. {} samples to collide.",
samples.len());
let collide = {
let mut collide = HashMap::new();
let n = (n.0 as isize, n.1 as isize, n.2 as isize);
let check_at_offset = |dx, dy, dz, lx, ly, lz, rx, ry, rz| {
let mut bitsets = Vec::new();
for &Sample3(ref l) in samples.iter() {
let mut bs = BitSet::with_capacity(samples.len());
'rcheck: for (s, &Sample3(ref r)) in samples.iter().enumerate() {
for i in 0..dx {
for j in 0..dy {
for k in 0..dz {
let p_l = l[((lx + i) as usize,
(ly + j) as usize,
(lz + k) as usize)];
let p_r = r[((rx + i) as usize,
(ry + j) as usize,
(rz + k) as usize)];
if p_l != p_r {
continue 'rcheck;
}
}
}
}
bs.insert(s);
}
bitsets.push(bs);
}
bitsets
};
for dx in 0..n.0 {
for dy in 0..n.1 {
for dz in 0..n.2 {
collide.insert(Offset3(dx, dy, dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
dx,
dy,
dz,
0,
0,
0));
collide.insert(Offset3(-dx, dy, dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
0,
dy,
dz,
dx,
0,
0));
collide.insert(Offset3(dx, -dy, dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
dx,
0,
dz,
0,
dy,
0));
collide.insert(Offset3(-dx, -dy, dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
0,
0,
dz,
dx,
dy,
0));
collide.insert(Offset3(dx, dy, -dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
dx,
dy,
0,
0,
0,
dz));
collide.insert(Offset3(-dx, dy, -dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
0,
dy,
0,
dx,
0,
dz));
collide.insert(Offset3(dx, -dy, -dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
dx,
0,
0,
0,
dy,
dz));
collide.insert(Offset3(-dx, -dy, -dz),
check_at_offset(n.0 - dx,
n.1 - dy,
n.2 - dz,
0,
0,
0,
dx,
dy,
dz));
}
}
}
collide
};
debug!("Done.");
OverlappingSource3 {
palette: {
let mut vec = palette_map.iter().map(|(&p, &px)| (p, px)).collect::<Vec<_>>();
vec.sort_by_key(|x| x.1);
vec.into_iter()
.map(|x| x.0)
.collect()
},
inverse_palette: palette_map,
samples: samples,
weights: weights,
collide: collide,
n: n,
}
}
fn pick_sample<R: Rng>(&self, cfg: &mut BitSet, rng: &mut R) {
let table: AliasTable<_, _> = cfg.iter().map(|i| self.weights[i]).collect();
let chosen = table.pick(rng);
cfg.clear();
cfg.insert(*chosen);
}
}
impl<P> Source for OverlappingSource3<P>
where P: Eq + Hash + Copy
{
type Dims = Ix3;
type Periodicity = (bool, bool, bool);
type Pixel = P;
fn wave_dims(&self,
dims: (usize, usize, usize),
periodic: (bool, bool, bool))
-> (usize, usize, usize) {
(if periodic.0 {
dims.0
} else {
dims.0 - (self.n.0 - 1)
},
if periodic.1 {
dims.1
} else {
dims.1 - (self.n.1 - 1)
},
if periodic.2 {
dims.2
} else {
dims.2 - (self.n.2 - 1)
})
}
fn initial_state(&self, pos: Ix3) -> State<Ix3> {
let cfg = BitSet::from_bit_vec(BitVec::from_elem(self.samples.len(), true));
let entropy = self.entropy(&cfg);
State {
pos: pos,
entropy: entropy,
cfg: cfg,
}
}
fn constrain(&self,
states: Array3<RefCell<State<Ix3>>>,
pos: Ix3,
periodic: Self::Periodicity,
val: P)
-> Option<Array3<RefCell<State<Ix3>>>> {
let n = (self.n.0 as isize, self.n.1 as isize, self.n.2 as isize);
let pid = self.inverse_palette[&val];
for i in 0..n.0 {
let dim_adj_x = (states.dim().0 as isize - i) as usize;
for j in 0..n.1 {
let dim_adj_y = (states.dim().1 as isize - j) as usize;
for k in 0..n.2 {
let dim_adj_z = (states.dim().2 as isize - k) as usize;
let subject_pos = ((pos.0 + dim_adj_x) % states.dim().0,
(pos.1 + dim_adj_y) % states.dim().1,
(pos.2 + dim_adj_z) % states.dim().2);
let mut subject = states[subject_pos].borrow_mut();
if !periodic.0 && (pos.0 as isize - subject.pos.0 as isize).abs() >= n.0 {
continue;
}
if !periodic.1 && (pos.1 as isize - subject.pos.1 as isize).abs() >= n.1 {
continue;
}
if !periodic.2 && (pos.2 as isize - subject.pos.2 as isize).abs() >= n.2 {
continue;
}
subject.cfg = subject.cfg
.iter()
.filter(|&idx| {
self.samples[idx].0[(i as usize, j as usize, k as usize)] == pid
})
.collect();
subject.entropy = self.entropy(&subject.cfg);
if !(subject.entropy >= 0.0) {
debug!("Destroyed wave position {:?}'s hopes and dreams.",
subject_pos);
}
}
}
}
self.propagate(states, pos, periodic)
}
fn propagate(&self,
states: Array3<RefCell<State<Ix3>>>,
observe: Ix3,
periodic: Self::Periodicity)
-> Option<Array3<RefCell<State<Ix3>>>> {
let n = (self.n.0 as isize, self.n.1 as isize, self.n.2 as isize);
let mut queue = VecDeque::new();
queue.push_back(observe);
while let Some(focus) = queue.pop_front() {
let mut focus = match states.get(focus) {
Some(state) => state,
None => continue,
}
.borrow_mut();
focus.entropy = self.entropy(&focus.cfg);
if !(focus.entropy >= 0.0) {
return None;
}
let mut focus_dirty = false;
{
for i in -n.0 + 1..n.0 {
let dim_adj_x = (states.dim().0 as isize + i) as usize;
for j in -n.1 + 1..n.1 {
let dim_adj_y = (states.dim().1 as isize + j) as usize;
for k in -n.2 + 1..n.2 {
if i == 0 && j == 0 && k == 0 {
continue;
}
let dim_adj_z = (states.dim().2 as isize + k) as usize;
let subject_pos = ((focus.pos.0 + dim_adj_x) % states.dim().0,
(focus.pos.1 + dim_adj_y) % states.dim().1,
(focus.pos.2 + dim_adj_z) % states.dim().2);
let mut subject = states[subject_pos].borrow_mut();
if !periodic.0 &&
(focus.pos.0 as isize - subject.pos.0 as isize).abs() >= n.0 {
continue;
}
if !periodic.1 &&
(focus.pos.1 as isize - subject.pos.1 as isize).abs() >= n.1 {
continue;
}
if !periodic.2 &&
(focus.pos.2 as isize - subject.pos.2 as isize).abs() >= n.2 {
continue;
}
let mut subject_dirty = false;
let mut focus_allowed = BitSet::new();
let mut subject_allowed = BitSet::new();
loop {
focus_allowed.clear();
subject_allowed.clear();
for focus_cfg in focus.cfg.iter() {
subject_allowed.union_with(&self.collide[&Offset3(i, j, k)][focus_cfg]);
}
for subject_cfg in subject.cfg.iter() {
focus_allowed.union_with(&self.collide[&Offset3(-i, -j, -k)][subject_cfg]);
}
let focus_len = focus.cfg.len();
let subject_len = subject.cfg.len();
focus.cfg.intersect_with(&focus_allowed);
subject.cfg.intersect_with(&subject_allowed);
let focus_modified = focus_len > focus.cfg.len();
let subject_modified = subject_len > subject.cfg.len();
if focus_modified {
focus_dirty = true;
}
if subject_modified {
subject_dirty = true;
}
if !(focus_modified || subject_modified) {
break;
}
}
if subject_dirty {
queue.push_back(subject.pos);
}
}
}
}
}
if focus_dirty {
queue.push_back(focus.pos);
}
}
return Some(states);
}
fn observe<R: Rng>(&self,
mut states: Array3<RefCell<State<Ix3>>>,
observe: Ix3,
periodic: Self::Periodicity,
rng: &mut R)
-> Option<Array3<RefCell<State<Ix3>>>> {
self.pick_sample(&mut states[observe].borrow_mut().cfg, rng);
self.propagate(states, observe, periodic)
}
fn entropy(&self, cfg: &BitSet) -> f64 {
use std::f64;
if cfg.is_empty() {
return f64::NAN;
}
let weights: Vec<f64> = cfg.iter().map(|i| self.weights[i].1).collect();
let sum: f64 = weights.iter().sum();
weights.into_iter()
.map(|w| {
let p = w / sum;
-(p * p.ln())
})
.sum()
}
fn resolve<'a>(&self, dim: Self::Dims, wave: ArrayView3<'a, RefCell<State<Ix3>>>) -> Array3<P> {
Array::from_shape_fn(dim, |(x, y, z)| {
let (wx, dx) = if x < wave.dim().0 {
(x, 0)
} else {
(wave.dim().0 - 1, x - (wave.dim().0 - 1))
};
let (wy, dy) = if y < wave.dim().1 {
(y, 0)
} else {
(wave.dim().1 - 1, y - (wave.dim().1 - 1))
};
let (wz, dz) = if z < wave.dim().2 {
(z, 0)
} else {
(wave.dim().2 - 1, z - (wave.dim().2 - 1))
};
self.palette[self.samples[wave[(wx, wy, wz)]
.borrow()
.cfg
.iter()
.next()
.unwrap()]
.0[(dx, dy, dz)]
.0 as usize]
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::symmetry::*;
#[test]
fn schmidt_symmetries() {
// Ensure that black magic symmetry code *actually works.*
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_Y,
(1, 1, 1),
(0, 0, 0)),
(1, 0, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_X,
(1, 1, 1),
(0, 0, 0)),
(0, 1, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA, (1, 1, 1), (0, 0, 0)),
(0, 0, 1));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_Y,
(1, 1, 1),
(1, 0, 0)),
(0, 0, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_X,
(1, 1, 1),
(0, 1, 0)),
(0, 0, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA, (1, 1, 1), (0, 0, 1)),
(0, 0, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_Y,
(1, 1, 1),
(0, 0, 0)),
(1, 0, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA | JS3_ROT180_X,
(1, 1, 1),
(0, 0, 0)),
(0, 1, 0));
assert_eq!(Symmetry3::apply_symmetry(JS3_REFLECT | JS3_SWAP_TETRA, (1, 1, 1), (0, 0, 0)),
(0, 0, 1));
for x in 0..2 {
for y in 0..2 {
for z in 0..2 {
for &s in [0u64, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
42, 43, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59]
.into_iter() {
assert_eq!(Symmetry3::invert_symmetry(s,
(1, 1, 1),
Symmetry3::apply_symmetry(s,
(1,
1,
1),
(x,
y,
z))),
(x, y, z));
}
}
}
}
}
}
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/source/mod.rs | src/source/mod.rs | use std::cell::RefCell;
use bit_set::BitSet;
use ndarray::prelude::*;
use rand::Rng;
use wave::State;
mod source2d;
mod source3d;
pub use self::source2d::OverlappingSource2;
pub use self::source2d::symmetry::*;
pub trait Source {
type Dims;
type Periodicity;
type Pixel;
fn wave_dims(&self, Self::Dims, Self::Periodicity) -> Self::Dims;
fn initial_state(&self, Self::Dims) -> State<Self::Dims>;
fn entropy(&self, &BitSet) -> f64;
fn constrain(&self,
Array<RefCell<State<Self::Dims>>, Self::Dims>,
Self::Dims,
Self::Periodicity,
Self::Pixel)
-> Option<Array<RefCell<State<Self::Dims>>, Self::Dims>>;
fn observe<R: Rng>(&self,
Array<RefCell<State<Self::Dims>>, Self::Dims>,
Self::Dims,
Self::Periodicity,
&mut R)
-> Option<Array<RefCell<State<Self::Dims>>, Self::Dims>>;
fn propagate(&self,
Array<RefCell<State<Self::Dims>>, Self::Dims>,
Self::Dims,
Self::Periodicity)
-> Option<Array<RefCell<State<Self::Dims>>, Self::Dims>>;
fn resolve<'a>(&self,
Self::Dims,
ArrayView<'a, RefCell<State<Self::Dims>>, Self::Dims>)
-> Array<Self::Pixel, Self::Dims>;
}
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
sdleffler/collapse | https://github.com/sdleffler/collapse/blob/075f5449bc20ff4ecff9740c90e542f691a8c7f5/src/source/source2d.rs | src/source/source2d.rs | use std::cell::RefCell;
use std::collections::{HashMap, HashSet, VecDeque};
use std::hash::Hash;
use std::ops::{Neg, Sub};
use std::vec::Vec;
use bit_set::BitSet;
use bit_vec::BitVec;
use image::{GenericImage, ImageBuffer, Pixel};
use image::imageops::{flip_horizontal, flip_vertical, rotate90, rotate180, rotate270};
use ndarray::prelude::*;
use ndarray::{Array2, ArrayView2, Ix2};
use pbr::{PbIter, ProgressBar};
use vosealias::AliasTable;
use rand::Rng;
use source::Source;
use wave::State;
type RcArray2<A> = RcArray<A, Ix2>;
#[derive(Copy, Clone)]
pub struct Point2(u32, u32);
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone, Debug)]
struct Pixel2(u32);
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub struct Offset2(isize, isize);
impl Sub for Point2 {
type Output = Offset2;
fn sub(self, rhs: Point2) -> Offset2 {
let x = self.0 as isize - rhs.0 as isize;
let y = self.1 as isize - rhs.1 as isize;
Offset2(x, y)
}
}
impl Neg for Offset2 {
type Output = Self;
fn neg(self) -> Self {
Offset2(-self.0, -self.1)
}
}
pub struct OverlappingSource2<P> {
palette: Vec<P>,
inverse_palette: HashMap<P, Pixel2>,
samples: Vec<Sample2<Pixel2>>,
weights: Vec<(usize, f64)>,
collide: HashMap<Offset2, Vec<BitSet>>,
n: (usize, usize),
}
pub mod symmetry {
bitflags! {
pub flags Symmetry2: u8 {
const S2_IDENTITY = 0b00000001,
const S2_ROTATE_90 = 0b00000010,
const S2_ROTATE_180 = 0b00000100,
const S2_ROTATE_270 = 0b00001000,
const S2_REFLECT_Y = 0b00010000,
const S2_REFLECT_X = 0b00100000,
const S2_REFLECT_Y_ROT90 = 0b01000000,
const S2_REFLECT_X_ROT90 = 0b10000000,
}
}
}
use self::symmetry::*;
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Sample2<P>(RcArray2<P>);
impl<P> OverlappingSource2<P>
where P: Copy
{
pub fn from_image<I: GenericImage<Pixel = P> + 'static>(img: &I,
n: (usize, usize),
periodic: (bool, bool),
symmetry: Symmetry2)
-> Self
where P: Pixel + Eq + Hash + 'static
{
debug!("Generating palette map...");
let palette_set: HashSet<P> = img.pixels().map(|(_, _, p)| p).collect();
let palette_map: HashMap<P, Pixel2> = palette_set.into_iter()
.enumerate()
.map(|(i, p)| (p, Pixel2(i as u32)))
.collect();
debug!("Palette size: {}.", palette_map.len());
debug!("Allocating ImageBuffer and copying source image...");
let mut buf = ImageBuffer::<P, _>::new(img.width(), img.height());
buf.copy_from(img, 0, 0);
let sample_buf_size: usize = (img.width() * img.height()) as usize;
debug!("Generating symmetry buffers...");
let symmetries: Vec<RcArray2<Pixel2>> = {
let symm_bufs = {
let mut symm_bufs = Vec::new();
if symmetry.contains(S2_IDENTITY) {
symm_bufs.push(buf.clone())
}
if symmetry.contains(S2_ROTATE_90) {
symm_bufs.push(rotate90(&buf))
}
if symmetry.contains(S2_ROTATE_180) {
symm_bufs.push(rotate180(&buf))
}
if symmetry.contains(S2_ROTATE_270) {
symm_bufs.push(rotate270(&buf))
}
if symmetry.contains(S2_REFLECT_Y) {
symm_bufs.push(flip_horizontal(&buf))
}
if symmetry.contains(S2_REFLECT_X) {
symm_bufs.push(flip_vertical(&buf))
}
if symmetry.contains(S2_REFLECT_X_ROT90) {
symm_bufs.push(flip_vertical(&rotate90(&buf)))
}
if symmetry.contains(S2_REFLECT_Y_ROT90) {
symm_bufs.push(flip_horizontal(&rotate90(&buf)))
}
symm_bufs
};
debug!("Allocating and filling symmetry 2d-arrays...");
symm_bufs.into_iter()
.map(|symm| {
RcArray::from_shape_fn((symm.width() as usize +
if periodic.0 { n.0 - 1 } else { 0 },
symm.height() as usize +
if periodic.1 { n.1 - 1 } else { 0 }),
|(x, y)| {
palette_map[symm.get_pixel(x as u32 % symm.width(),
y as u32 % symm.height())]
})
})
.collect()
};
let mut symm0str = String::new();
for j in 0..symmetries[0].dim().1 {
for i in 0..symmetries[0].dim().0 {
symm0str.push_str(format!("{} ", symmetries[0][(i, j)].0).as_str());
}
symm0str.push('\n');
}
debug!("Symmetry 1: \n{}", symm0str);
debug!("Generating and deduplicating samples... 8 symmetries, {} samples per symmetry: \
{} samples undeduplicated.",
sample_buf_size,
8 * sample_buf_size);
let (samples, weights) = {
let mut sample_set = HashMap::new();
for (s, symmetry) in symmetries.iter().enumerate() {
debug!("Processing symmetry {}.", s);
for i in 0..symmetry.dim().0 - (n.0 - 1) {
for j in 0..symmetry.dim().1 - (n.1 - 1) {
let mut sample = symmetry.to_shared();
sample.islice(s![i as isize..(i + n.0 as usize) as isize,
j as isize..(j + n.1 as usize) as isize]);
*sample_set.entry(Sample2(sample)).or_insert(0) += 1;
}
}
}
debug!("Converting intermediate sample type into full sample type.");
let (sample_vec, weight_vec): (Vec<_>, Vec<_>) = sample_set.into_iter().unzip();
let weights: Vec<_> = weight_vec.into_iter()
.enumerate()
.map(|(i, x)| (i, x as f64))
.collect();
(sample_vec.clone(), weights)
};
// for (s, &Sample2(ref sample)) in samples.iter().enumerate() {
// let mut string = String::new();
// for j in 0..sample.dim().1 {
// for i in 0..sample.dim().0 {
// string.push_str(format!("{} ", sample[(i, j)].0).as_str());
// }
// string.push('\n');
// }
//
// debug!("Sample {}, weighted {}: \n{}", s, weights[s].1, string);
// debug_assert_eq!(sample.dim(), (3, 3));
// }
debug!("Generating collision map. {} samples to collide.",
samples.len());
let collide = {
let mut collide = HashMap::new();
let n = (n.0 as isize, n.1 as isize);
let check_at_offset = |dx, dy, lx, ly, rx, ry| {
let mut bitsets = Vec::new();
for &Sample2(ref l) in samples.iter() {
let mut bs = BitSet::with_capacity(samples.len());
'rcheck: for (s, &Sample2(ref r)) in samples.iter().enumerate() {
for i in 0..dx {
for j in 0..dy {
let p_l = l[((lx + i) as usize, (ly + j) as usize)];
let p_r = r[((rx + i) as usize, (ry + j) as usize)];
if p_l != p_r {
continue 'rcheck;
}
}
}
bs.insert(s);
}
bitsets.push(bs);
}
bitsets
};
for dx in 0..n.0 {
for dy in 0..n.1 {
collide.insert(Offset2(dx, dy),
check_at_offset(n.0 - dx, n.1 - dy, dx, dy, 0, 0));
collide.insert(Offset2(-dx, dy),
check_at_offset(n.0 - dx, n.1 - dy, 0, dy, dx, 0));
collide.insert(Offset2(dx, -dy),
check_at_offset(n.0 - dx, n.1 - dy, dx, 0, 0, dy));
collide.insert(Offset2(-dx, -dy),
check_at_offset(n.0 - dx, n.1 - dy, 0, 0, dx, dy));
}
}
collide
};
debug!("Done.");
OverlappingSource2 {
palette: {
let mut vec = palette_map.iter().map(|(&p, &px)| (p, px)).collect::<Vec<_>>();
vec.sort_by_key(|x| x.1);
vec.into_iter()
.map(|x| x.0)
.collect()
},
inverse_palette: palette_map,
samples: samples,
weights: weights,
collide: collide,
n: n,
}
}
pub fn from_image_cli<I: GenericImage<Pixel = P> + 'static>(img: &I,
n: (usize, usize),
periodic: (bool, bool),
symmetry: Symmetry2)
-> Self
where P: Pixel + Eq + Hash + 'static
{
let mut progress = ProgressBar::new(2);
progress.message("Deduplicating palette...");
progress.tick();
let palette_set: HashSet<P> = img.pixels().map(|(_, _, p)| p).collect();
progress.message("Building palette map...");
progress.inc();
let palette_map: HashMap<P, Pixel2> = PbIter::new(palette_set.into_iter())
.enumerate()
.map(|(i, p)| (p, Pixel2(i as u32)))
.collect();
progress.inc();
let mut progress = ProgressBar::new(symmetry.bits().count_ones() as u64 * 2 + 1);
progress.message("Copying image into buffer...");
progress.tick();
let mut buf = ImageBuffer::<P, _>::new(img.width(), img.height());
buf.copy_from(img, 0, 0);
progress.message("Processing symmetries...");
progress.inc();
let symmetries: Vec<RcArray2<Pixel2>> = {
let symm_bufs = {
let mut symm_bufs = Vec::new();
if symmetry.contains(S2_ROTATE_90) {
symm_bufs.push(rotate90(&buf));
progress.inc();
}
if symmetry.contains(S2_ROTATE_180) {
symm_bufs.push(rotate180(&buf));
progress.inc();
}
if symmetry.contains(S2_ROTATE_270) {
symm_bufs.push(rotate270(&buf));
progress.inc();
}
if symmetry.contains(S2_REFLECT_X) {
symm_bufs.push(flip_horizontal(&buf));
progress.inc();
}
if symmetry.contains(S2_REFLECT_Y) {
symm_bufs.push(flip_vertical(&buf));
progress.inc();
}
if symmetry.contains(S2_REFLECT_Y_ROT90) {
symm_bufs.push(flip_vertical(&rotate90(&buf)));
progress.inc();
}
if symmetry.contains(S2_REFLECT_X_ROT90) {
symm_bufs.push(flip_horizontal(&rotate90(&buf)));
progress.inc();
}
if symmetry.contains(S2_IDENTITY) {
symm_bufs.push(buf);
progress.inc();
}
symm_bufs
};
progress.message("Copying into array...");
progress.tick();
symm_bufs.into_iter()
.map(|symm| {
let array = RcArray::from_shape_fn((symm.width() as usize +
if periodic.0 { n.0 - 1 } else { 0 },
symm.height() as usize +
if periodic.1 { n.1 - 1 } else { 0 }),
|(x, y)| {
palette_map[symm.get_pixel(x as u32 % symm.width(),
y as u32 % symm.height())]
});
progress.inc();
array
})
.collect()
};
let mut progress = ProgressBar::new(symmetries.len() as u64);
let (samples, weights) = {
let mut sample_set = HashMap::new();
progress.message("Sampling...");
progress.tick();
for symmetry in symmetries.iter() {
for i in 0..symmetry.dim().0 - (n.0 - 1) {
for j in 0..symmetry.dim().1 - (n.1 - 1) {
let mut sample = symmetry.to_shared();
sample.islice(s![i as isize..(i + n.0 as usize) as isize,
j as isize..(j + n.1 as usize) as isize]);
*sample_set.entry(Sample2(sample)).or_insert(0) += 1;
}
}
progress.inc();
}
let (sample_vec, weight_vec): (Vec<_>, Vec<_>) = sample_set.into_iter().unzip();
let weights: Vec<_> = weight_vec.into_iter()
.enumerate()
.map(|(i, x)| (i, x as f64))
.collect();
(sample_vec, weights)
};
let mut progress = ProgressBar::new((n.0 * n.1 * 4) as u64);
progress.message(format!("Colliding {} samples...", samples.len()).as_str());
progress.tick();
let collide = {
let mut collide = HashMap::new();
let n = (n.0 as isize, n.1 as isize);
let mut check_at_offset = |dx, dy, lx, ly, rx, ry| {
let mut bitsets = Vec::new();
for &Sample2(ref l) in samples.iter() {
let mut bs = BitSet::with_capacity(samples.len());
'rcheck: for (s, &Sample2(ref r)) in samples.iter().enumerate() {
for i in 0..dx {
for j in 0..dy {
let p_l = l[((lx + i) as usize, (ly + j) as usize)];
let p_r = r[((rx + i) as usize, (ry + j) as usize)];
if p_l != p_r {
continue 'rcheck;
}
}
}
bs.insert(s);
}
bitsets.push(bs);
}
progress.inc();
bitsets
};
for dx in 0..n.0 {
for dy in 0..n.1 {
collide.insert(Offset2(dx, dy),
check_at_offset(n.0 - dx, n.1 - dy, dx, dy, 0, 0));
collide.insert(Offset2(-dx, dy),
check_at_offset(n.0 - dx, n.1 - dy, 0, dy, dx, 0));
collide.insert(Offset2(dx, -dy),
check_at_offset(n.0 - dx, n.1 - dy, dx, 0, 0, dy));
collide.insert(Offset2(-dx, -dy),
check_at_offset(n.0 - dx, n.1 - dy, 0, 0, dx, dy));
}
}
collide
};
OverlappingSource2 {
palette: {
let mut vec = palette_map.iter().map(|(&p, &px)| (p, px)).collect::<Vec<_>>();
vec.sort_by_key(|x| x.1);
vec.into_iter()
.map(|x| x.0)
.collect()
},
inverse_palette: palette_map,
samples: samples,
weights: weights,
collide: collide,
n: n,
}
}
fn pick_sample<R: Rng>(&self, cfg: &mut BitSet, rng: &mut R) {
let table: AliasTable<_, _> = cfg.iter().map(|i| self.weights[i]).collect();
let chosen = table.pick(rng);
cfg.clear();
cfg.insert(*chosen);
assert_eq!(cfg.len(), 1);
}
}
impl<P> Source for OverlappingSource2<P>
where P: Eq + Hash + Copy
{
type Dims = Ix2;
type Periodicity = (bool, bool);
type Pixel = P;
fn wave_dims(&self, dims: (usize, usize), periodic: (bool, bool)) -> (usize, usize) {
(if periodic.0 {
dims.0
} else {
dims.0 - (self.n.0 - 1)
},
if periodic.1 {
dims.1
} else {
dims.1 - (self.n.1 - 1)
})
}
fn initial_state(&self, pos: Ix2) -> State<Ix2> {
let cfg = BitSet::from_bit_vec(BitVec::from_elem(self.samples.len(), true));
let entropy = self.entropy(&cfg);
State {
pos: pos,
entropy: entropy,
cfg: cfg,
}
}
fn constrain(&self,
states: Array2<RefCell<State<Ix2>>>,
pos: Ix2,
periodic: Self::Periodicity,
val: P)
-> Option<Array2<RefCell<State<Ix2>>>> {
let n = (self.n.0 as isize, self.n.1 as isize);
let pid = self.inverse_palette[&val];
for i in 0..n.0 {
let dim_adj_x = (states.dim().0 as isize - i) as usize;
for j in 0..n.1 {
let dim_adj_y = (states.dim().1 as isize - j) as usize;
let subject_pos = ((pos.0 + dim_adj_x) % states.dim().0,
(pos.1 + dim_adj_y) % states.dim().1);
let mut subject = states[subject_pos].borrow_mut();
if !periodic.0 && (pos.0 as isize - subject.pos.0 as isize).abs() >= n.0 {
continue;
}
if !periodic.1 && (pos.1 as isize - subject.pos.1 as isize).abs() >= n.1 {
continue;
}
subject.cfg = subject.cfg
.iter()
.filter(|&idx| self.samples[idx].0[(i as usize, j as usize)] == pid)
.collect();
subject.entropy = self.entropy(&subject.cfg);
if !(subject.entropy >= 0.0) {
debug!("Destroyed wave position {:?}'s hopes and dreams.",
subject_pos);
}
}
}
self.propagate(states, pos, periodic)
}
fn propagate(&self,
states: Array2<RefCell<State<Ix2>>>,
observe: Ix2,
periodic: Self::Periodicity)
-> Option<Array2<RefCell<State<Ix2>>>> {
let n = (self.n.0 as isize, self.n.1 as isize);
let mut queue = VecDeque::new();
queue.push_back(observe);
while let Some(focus) = queue.pop_front() {
let mut focus = match states.get(focus) {
Some(state) => state,
None => continue,
}
.borrow_mut();
focus.entropy = self.entropy(&focus.cfg);
if !(focus.entropy >= 0.0) {
return None;
}
let mut focus_dirty = false;
{
for i in -n.0 + 1..n.0 {
let dim_adj_x = (states.dim().0 as isize + i) as usize;
for j in -n.1 + 1..n.1 {
if i == 0 && j == 0 {
continue;
}
let dim_adj_y = (states.dim().1 as isize + j) as usize;
let subject_pos = ((focus.pos.0 + dim_adj_x) % states.dim().0,
(focus.pos.1 + dim_adj_y) % states.dim().1);
let mut subject = states[subject_pos].borrow_mut();
if !periodic.0 &&
(focus.pos.0 as isize - subject.pos.0 as isize).abs() >= n.0 {
continue;
}
if !periodic.1 &&
(focus.pos.1 as isize - subject.pos.1 as isize).abs() >= n.1 {
continue;
}
let mut subject_dirty = false;
let mut focus_allowed = BitSet::new();
let mut subject_allowed = BitSet::new();
loop {
focus_allowed.clear();
subject_allowed.clear();
for focus_cfg in focus.cfg.iter() {
subject_allowed.union_with(&self.collide[&Offset2(i, j)][focus_cfg]);
}
for subject_cfg in subject.cfg.iter() {
focus_allowed.union_with(&self.collide[&Offset2(-i, -j)][subject_cfg]);
}
let focus_len = focus.cfg.len();
let subject_len = subject.cfg.len();
focus.cfg.intersect_with(&focus_allowed);
subject.cfg.intersect_with(&subject_allowed);
let focus_modified = focus_len > focus.cfg.len();
let subject_modified = subject_len > subject.cfg.len();
if focus_modified {
focus_dirty = true;
}
if subject_modified {
subject_dirty = true;
}
if !(focus_modified || subject_modified) {
break;
}
}
if subject_dirty {
queue.push_back(subject.pos);
}
}
}
}
if focus_dirty {
queue.push_back(focus.pos);
}
}
return Some(states);
}
fn observe<R: Rng>(&self,
mut states: Array2<RefCell<State<Ix2>>>,
observe: Ix2,
periodic: (bool, bool),
rng: &mut R)
-> Option<Array2<RefCell<State<Ix2>>>> {
self.pick_sample(&mut states[observe].borrow_mut().cfg, rng);
self.propagate(states, observe, periodic)
}
fn entropy(&self, cfg: &BitSet) -> f64 {
use std::f64;
if cfg.is_empty() {
return f64::NAN;
}
let weights: Vec<f64> = cfg.iter().map(|i| self.weights[i].1).collect();
let sum: f64 = weights.iter().sum();
weights.into_iter()
.map(|w| {
let p = w / sum;
-(p * p.ln())
})
.sum()
}
fn resolve(&self, dim: Self::Dims, wave: ArrayView2<RefCell<State<Ix2>>>) -> Array2<P> {
Array::from_shape_fn(dim, |(x, y)| {
let (wx, dx) = if x < wave.dim().0 {
(x, 0)
} else {
(wave.dim().0 - 1, x - (wave.dim().0 - 1))
};
let (wy, dy) = if y < wave.dim().1 {
(y, 0)
} else {
(wave.dim().1 - 1, y - (wave.dim().1 - 1))
};
self.palette[self.samples[wave[(wx, wy)]
.borrow()
.cfg
.iter()
.next()
.unwrap()]
.0[(dx, dy)]
.0 as usize]
})
}
}
| rust | MIT | 075f5449bc20ff4ecff9740c90e542f691a8c7f5 | 2026-01-04T20:20:25.435457Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/build.rs | build.rs | use cargo_lock::Lockfile;
use vergen::{BuildBuilder, Emitter, RustcBuilder};
fn main() -> anyhow::Result<()> {
// Proto
let mut config = prost_build::Config::new();
let proto_file = "proto/event.proto";
println!("cargo:rerun-if-changed={}", proto_file);
config.boxed(".blockdaemon.solana.accountsdb_plugin_kafka.types.MessageWrapper");
config.protoc_arg("--experimental_allow_proto3_optional");
config.compile_protos(&[proto_file], &["proto/"])?;
// Version metrics
let _ = Emitter::default()
.add_instructions(&BuildBuilder::all_build()?)?
.add_instructions(&RustcBuilder::all_rustc()?)?
.emit();
// vergen git version does not looks cool
println!(
"cargo:rustc-env=GIT_VERSION={}",
git_version::git_version!()
);
// Extract Solana version
let lockfile = Lockfile::load("./Cargo.lock")?;
println!(
"cargo:rustc-env=SOLANA_SDK_VERSION={}",
get_pkg_version(&lockfile, "solana-sdk")
);
Ok(())
}
fn get_pkg_version(lockfile: &Lockfile, pkg_name: &str) -> String {
lockfile
.packages
.iter()
.filter(|pkg| pkg.name.as_str() == pkg_name)
.map(|pkg| pkg.version.to_string())
.collect::<Vec<_>>()
.join(",")
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/prom.rs | src/prom.rs | use {
crate::version::VERSION as VERSION_INFO,
bytes::Bytes,
http::StatusCode,
http_body_util::Full,
hyper::{Request, Response, body::Incoming, service::service_fn},
hyper_util::rt::TokioIo,
log::*,
prometheus::{GaugeVec, IntCounterVec, Opts, Registry, TextEncoder},
rdkafka::{
client::ClientContext,
producer::{DeliveryResult, ProducerContext},
statistics::Statistics,
},
std::{io::Result as IoResult, net::SocketAddr, sync::Once, time::Duration},
tokio::net::TcpListener,
tokio::runtime::Runtime,
};
lazy_static::lazy_static! {
pub static ref REGISTRY: Registry = Registry::new();
static ref VERSION: IntCounterVec = IntCounterVec::new(
Opts::new("version", "Plugin version info"),
&["key", "value"]
).unwrap();
pub static ref UPLOAD_ACCOUNTS_TOTAL: IntCounterVec = IntCounterVec::new(
Opts::new("upload_accounts_total", "Status of uploaded accounts"),
&["status"]
).unwrap();
pub static ref UPLOAD_SLOTS_TOTAL: IntCounterVec = IntCounterVec::new(
Opts::new("upload_slots_total", "Status of uploaded slots"),
&["status"]
).unwrap();
pub static ref UPLOAD_TRANSACTIONS_TOTAL: IntCounterVec = IntCounterVec::new(
Opts::new("upload_transactions_total", "Status of uploaded transactions"),
&["status"]
).unwrap();
static ref KAFKA_STATS: GaugeVec = GaugeVec::new(
Opts::new("kafka_stats", "librdkafka metrics"),
&["broker", "metric"]
).unwrap();
}
#[derive(Debug)]
pub struct PrometheusService {
runtime: Runtime,
}
impl PrometheusService {
pub fn new(address: SocketAddr) -> IoResult<Self> {
static REGISTER: Once = Once::new();
REGISTER.call_once(|| {
macro_rules! register {
($collector:ident) => {
REGISTRY
.register(Box::new($collector.clone()))
.expect("collector can't be registered");
};
}
register!(VERSION);
register!(UPLOAD_ACCOUNTS_TOTAL);
register!(UPLOAD_SLOTS_TOTAL);
register!(UPLOAD_TRANSACTIONS_TOTAL);
register!(KAFKA_STATS);
for (key, value) in &[
("version", VERSION_INFO.version),
("solana", VERSION_INFO.solana),
("git", VERSION_INFO.git),
("rustc", VERSION_INFO.rustc),
("buildts", VERSION_INFO.buildts),
] {
VERSION
.with_label_values(&[key.to_string(), value.to_string()])
.inc();
}
});
let runtime = Runtime::new()?;
runtime.spawn(async move {
let listener = TcpListener::bind(address).await.unwrap();
loop {
let (stream, _) = match listener.accept().await {
Ok(conn) => conn,
Err(e) => {
error!("Failed to accept connection: {}", e);
continue;
}
};
let io = TokioIo::new(stream);
let service = service_fn(|req: Request<Incoming>| async move {
let response = match req.uri().path() {
"/metrics" => metrics_handler(),
_ => not_found_handler(),
};
Ok::<_, hyper::Error>(response)
});
tokio::task::spawn(async move {
if let Err(err) = hyper::server::conn::http1::Builder::new()
.serve_connection(io, service)
.await
{
error!("Error serving connection: {}", err);
}
});
}
});
Ok(PrometheusService { runtime })
}
pub fn shutdown(self) {
self.runtime.shutdown_timeout(Duration::from_secs(10));
}
}
fn metrics_handler() -> Response<Full<Bytes>> {
let metrics = TextEncoder::new()
.encode_to_string(®ISTRY.gather())
.unwrap_or_else(|error| {
error!("could not encode custom metrics: {}", error);
String::new()
});
Response::builder()
.body(Full::new(Bytes::from(metrics)))
.unwrap()
}
fn not_found_handler() -> Response<Full<Bytes>> {
Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Full::new(Bytes::from("")))
.unwrap()
}
#[derive(Debug, Default, Clone, Copy)]
pub struct StatsThreadedProducerContext;
impl ClientContext for StatsThreadedProducerContext {
fn stats(&self, statistics: Statistics) {
for (name, broker) in statistics.brokers {
macro_rules! set_value {
($name:expr, $value:expr) => {
KAFKA_STATS
.with_label_values(&[&name.to_string(), &$name.to_string()])
.set($value as f64);
};
}
set_value!("outbuf_cnt", broker.outbuf_cnt);
set_value!("outbuf_msg_cnt", broker.outbuf_msg_cnt);
set_value!("waitresp_cnt", broker.waitresp_cnt);
set_value!("waitresp_msg_cnt", broker.waitresp_msg_cnt);
set_value!("tx", broker.tx);
set_value!("txerrs", broker.txerrs);
set_value!("txretries", broker.txretries);
set_value!("req_timeouts", broker.req_timeouts);
if let Some(window) = broker.int_latency {
set_value!("int_latency.min", window.min);
set_value!("int_latency.max", window.max);
set_value!("int_latency.avg", window.avg);
set_value!("int_latency.sum", window.sum);
set_value!("int_latency.cnt", window.cnt);
set_value!("int_latency.stddev", window.stddev);
set_value!("int_latency.hdrsize", window.hdrsize);
set_value!("int_latency.p50", window.p50);
set_value!("int_latency.p75", window.p75);
set_value!("int_latency.p90", window.p90);
set_value!("int_latency.p95", window.p95);
set_value!("int_latency.p99", window.p99);
set_value!("int_latency.p99_99", window.p99_99);
set_value!("int_latency.outofrange", window.outofrange);
}
if let Some(window) = broker.outbuf_latency {
set_value!("outbuf_latency.min", window.min);
set_value!("outbuf_latency.max", window.max);
set_value!("outbuf_latency.avg", window.avg);
set_value!("outbuf_latency.sum", window.sum);
set_value!("outbuf_latency.cnt", window.cnt);
set_value!("outbuf_latency.stddev", window.stddev);
set_value!("outbuf_latency.hdrsize", window.hdrsize);
set_value!("outbuf_latency.p50", window.p50);
set_value!("outbuf_latency.p75", window.p75);
set_value!("outbuf_latency.p90", window.p90);
set_value!("outbuf_latency.p95", window.p95);
set_value!("outbuf_latency.p99", window.p99);
set_value!("outbuf_latency.p99_99", window.p99_99);
set_value!("outbuf_latency.outofrange", window.outofrange);
}
}
}
}
impl ProducerContext for StatsThreadedProducerContext {
type DeliveryOpaque = ();
fn delivery(&self, _: &DeliveryResult<'_>, _: Self::DeliveryOpaque) {}
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/config.rs | src/config.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use {
crate::{PrometheusService, prom::StatsThreadedProducerContext},
agave_geyser_plugin_interface::geyser_plugin_interface::{
GeyserPluginError, Result as PluginResult,
},
rdkafka::{
ClientConfig,
config::FromClientConfigAndContext,
error::KafkaResult,
producer::{DefaultProducerContext, ThreadedProducer},
},
serde::Deserialize,
std::{collections::HashMap, fs::File, io::Result as IoResult, net::SocketAddr, path::Path},
};
/// Plugin config.
#[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Config {
#[allow(dead_code)]
libpath: String,
/// Kafka config.
pub kafka: HashMap<String, String>,
/// Graceful shutdown timeout.
#[serde(default)]
pub shutdown_timeout_ms: u64,
/// Accounts, transactions filters
pub filters: Vec<ConfigFilter>,
/// Kafka topic to send block events to.
#[serde(default)]
pub block_events_topic: Option<BlockEventsConfig>,
/// Prometheus endpoint.
#[serde(default)]
pub prometheus: Option<SocketAddr>,
}
impl Default for Config {
fn default() -> Self {
Self {
libpath: "".to_owned(),
kafka: HashMap::new(),
shutdown_timeout_ms: 30_000,
filters: vec![],
prometheus: None,
block_events_topic: None,
}
}
}
impl Config {
/// Read plugin from JSON file.
pub fn read_from<P: AsRef<Path>>(config_path: P) -> PluginResult<Self> {
let file = File::open(config_path)?;
let mut this: Self = serde_json::from_reader(file)
.map_err(|e| GeyserPluginError::ConfigFileReadError { msg: e.to_string() })?;
this.fill_defaults();
Ok(this)
}
/// Create rdkafka::FutureProducer from config.
pub fn producer(&self) -> KafkaResult<ThreadedProducer<StatsThreadedProducerContext>> {
let mut config = ClientConfig::new();
for (k, v) in self.kafka.iter() {
config.set(k, v);
}
ThreadedProducer::from_config_and_context(&config, StatsThreadedProducerContext)
}
fn set_default(&mut self, k: &'static str, v: &'static str) {
if !self.kafka.contains_key(k) {
self.kafka.insert(k.to_owned(), v.to_owned());
}
}
fn fill_defaults(&mut self) {
self.set_default("request.required.acks", "1");
self.set_default("message.timeout.ms", "30000");
self.set_default("compression.type", "lz4");
self.set_default("partitioner", "murmur2_random");
}
pub fn create_prometheus(&self) -> IoResult<Option<PrometheusService>> {
self.prometheus.map(PrometheusService::new).transpose()
}
}
/// Plugin config.
#[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields, default)]
pub struct ConfigFilter {
/// Kafka topic to send account updates to.
pub update_account_topic: String,
/// Kafka topic to send slot status updates to.
pub slot_status_topic: String,
/// Kafka topic to send transaction to.
pub transaction_topic: String,
/// List of programs to ignore.
pub program_ignores: Vec<String>,
/// List of programs to include
pub program_filters: Vec<String>,
// List of accounts to include
pub account_filters: Vec<String>,
/// Publish all accounts on startup.
pub publish_all_accounts: bool,
/// Publish vote transactions.
pub include_vote_transactions: bool,
/// Publish failed transactions.
pub include_failed_transactions: bool,
/// Wrap all event message in a single message type.
pub wrap_messages: bool,
}
impl Default for ConfigFilter {
fn default() -> Self {
Self {
update_account_topic: "".to_owned(),
slot_status_topic: "".to_owned(),
transaction_topic: "".to_owned(),
program_ignores: Vec::new(),
program_filters: Vec::new(),
account_filters: Vec::new(),
publish_all_accounts: false,
include_vote_transactions: true,
include_failed_transactions: true,
wrap_messages: false,
}
}
}
#[derive(Debug, Deserialize, Clone)]
pub struct BlockEventsConfig {
pub topic: String,
pub wrap_messages: bool,
}
pub type Producer = ThreadedProducer<DefaultProducerContext>;
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/event.rs | src/event.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use agave_geyser_plugin_interface::geyser_plugin_interface::SlotStatus as PluginSlotStatus;
include!(concat!(
env!("OUT_DIR"),
"/blockdaemon.solana.accountsdb_plugin_kafka.types.rs"
));
impl From<PluginSlotStatus> for SlotStatus {
fn from(other: PluginSlotStatus) -> Self {
match other {
PluginSlotStatus::Processed => SlotStatus::Processed,
PluginSlotStatus::Rooted => SlotStatus::Rooted,
PluginSlotStatus::Confirmed => SlotStatus::Confirmed,
PluginSlotStatus::FirstShredReceived => SlotStatus::FirstShredReceived,
PluginSlotStatus::Completed => SlotStatus::Completed,
PluginSlotStatus::CreatedBank => SlotStatus::CreatedBank,
PluginSlotStatus::Dead(_) => SlotStatus::Dead,
}
}
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/lib.rs | src/lib.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use agave_geyser_plugin_interface::geyser_plugin_interface::GeyserPlugin;
mod config;
mod event;
mod filter;
mod plugin;
mod prom;
mod publisher;
mod version;
pub use {
config::{Config, ConfigFilter, Producer},
event::*,
filter::Filter,
plugin::KafkaPlugin,
prom::PrometheusService,
publisher::Publisher,
};
#[unsafe(no_mangle)]
#[allow(improper_ctypes_definitions)]
/// # Safety
///
/// This function returns a pointer to the Kafka Plugin box implementing trait GeyserPlugin.
///
/// The Solana validator and this plugin must be compiled with the same Rust compiler version and Solana core version.
/// Loading this plugin with mismatching versions is undefined behavior and will likely cause memory corruption.
pub unsafe extern "C" fn _create_plugin() -> *mut dyn GeyserPlugin {
let plugin = KafkaPlugin::new();
let plugin: Box<dyn GeyserPlugin> = Box::new(plugin);
Box::into_raw(plugin)
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/version.rs | src/version.rs | use {serde::Serialize, std::env};
#[derive(Debug, Serialize)]
pub struct Version {
pub version: &'static str,
pub solana: &'static str,
pub git: &'static str,
pub rustc: &'static str,
pub buildts: &'static str,
}
pub const VERSION: Version = Version {
version: env!("CARGO_PKG_VERSION"),
solana: env!("SOLANA_SDK_VERSION"),
git: env!("GIT_VERSION"),
rustc: env!("VERGEN_RUSTC_SEMVER"),
buildts: env!("VERGEN_BUILD_TIMESTAMP"),
};
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/filter.rs | src/filter.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use {
crate::ConfigFilter,
solana_pubkey::Pubkey,
std::{collections::HashSet, str::FromStr},
};
pub struct Filter {
pub publish_all_accounts: bool,
pub program_ignores: HashSet<[u8; 32]>,
pub program_filters: HashSet<[u8; 32]>,
pub account_filters: HashSet<[u8; 32]>,
pub include_vote_transactions: bool,
pub include_failed_transactions: bool,
pub update_account_topic: String,
pub slot_status_topic: String,
pub transaction_topic: String,
pub wrap_messages: bool,
}
impl Filter {
pub fn new(config: &ConfigFilter) -> Self {
Self {
publish_all_accounts: config.publish_all_accounts,
program_ignores: config
.program_ignores
.iter()
.flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes()))
.collect(),
program_filters: config
.program_filters
.iter()
.flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes()))
.collect(),
account_filters: config
.account_filters
.iter()
.flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes()))
.collect(),
include_vote_transactions: config.include_vote_transactions,
include_failed_transactions: config.include_failed_transactions,
update_account_topic: config.update_account_topic.clone(),
slot_status_topic: config.slot_status_topic.clone(),
transaction_topic: config.transaction_topic.clone(),
wrap_messages: config.wrap_messages,
}
}
pub fn wants_program(&self, program: &[u8]) -> bool {
match <&[u8; 32]>::try_from(program) {
Ok(key) => {
!self.program_ignores.contains(key)
&& (self.program_filters.is_empty() || self.program_filters.contains(key))
}
Err(_error) => true,
}
}
pub fn wants_account(&self, account: &[u8]) -> bool {
match <&[u8; 32]>::try_from(account) {
Ok(key) => self.account_filters.contains(key),
Err(_error) => true,
}
}
pub fn wants_vote_tx(&self) -> bool {
self.include_vote_transactions
}
pub fn wants_failed_tx(&self) -> bool {
self.include_failed_transactions
}
}
#[cfg(test)]
mod tests {
use {
crate::{ConfigFilter, Filter},
solana_pubkey::Pubkey,
std::str::FromStr,
};
#[test]
fn test_filter() {
let config = ConfigFilter {
program_ignores: vec![
"Sysvar1111111111111111111111111111111111111".to_owned(),
"Vote111111111111111111111111111111111111111".to_owned(),
],
program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()],
..Default::default()
};
let filter = Filter::new(&config);
assert_eq!(filter.program_ignores.len(), 2);
assert!(
filter.wants_program(
&Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin")
.unwrap()
.to_bytes()
)
);
assert!(
!filter.wants_program(
&Pubkey::from_str("Vote111111111111111111111111111111111111111")
.unwrap()
.to_bytes()
)
);
}
#[test]
fn test_owner_filter() {
let config = ConfigFilter {
program_ignores: vec![
"Sysvar1111111111111111111111111111111111111".to_owned(),
"Vote111111111111111111111111111111111111111".to_owned(),
],
program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()],
..Default::default()
};
let filter = Filter::new(&config);
assert_eq!(filter.program_ignores.len(), 2);
assert!(
filter.wants_program(
&Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin")
.unwrap()
.to_bytes()
)
);
assert!(
!filter.wants_program(
&Pubkey::from_str("Vote111111111111111111111111111111111111111")
.unwrap()
.to_bytes()
)
);
assert!(
!filter.wants_program(
&Pubkey::from_str("cndy3Z4yapfJBmL3ShUp5exZKqR3z33thTzeNMm2gRZ")
.unwrap()
.to_bytes()
)
);
}
#[test]
fn test_account_filter() {
let config = ConfigFilter {
program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()],
account_filters: vec!["5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht".to_owned()],
..Default::default()
};
let filter = Filter::new(&config);
assert_eq!(filter.program_filters.len(), 1);
assert_eq!(filter.account_filters.len(), 1);
println!("{:?}", filter.account_filters);
println!(
"{:?}",
&Pubkey::from_str("5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht")
.unwrap()
.to_bytes()
);
assert!(
filter.wants_program(
&Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin")
.unwrap()
.to_bytes()
)
);
assert!(
filter.wants_account(
&Pubkey::from_str("5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht")
.unwrap()
.to_bytes()
)
);
}
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/publisher.rs | src/publisher.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use {
crate::{
BlockEvent, Config, MessageWrapper, SlotStatusEvent, TransactionEvent, UpdateAccountEvent,
message_wrapper::EventMessage::{self, Account, Slot, Transaction},
prom::{
StatsThreadedProducerContext, UPLOAD_ACCOUNTS_TOTAL, UPLOAD_SLOTS_TOTAL,
UPLOAD_TRANSACTIONS_TOTAL,
},
},
prost::Message,
rdkafka::{
error::KafkaError,
producer::{BaseRecord, Producer, ThreadedProducer},
},
std::time::Duration,
};
pub struct Publisher {
producer: ThreadedProducer<StatsThreadedProducerContext>,
shutdown_timeout: Duration,
}
impl Publisher {
pub fn new(producer: ThreadedProducer<StatsThreadedProducerContext>, config: &Config) -> Self {
Self {
producer,
shutdown_timeout: Duration::from_millis(config.shutdown_timeout_ms),
}
}
pub fn update_account(
&self,
ev: UpdateAccountEvent,
wrap_messages: bool,
topic: &str,
) -> Result<(), KafkaError> {
let temp_key;
let (key, buf) = if wrap_messages {
(
&ev.pubkey.clone(),
Self::encode_with_wrapper(Account(Box::new(ev))),
)
} else {
temp_key = self.copy_and_prepend(ev.pubkey.as_slice(), b'A');
(&temp_key, ev.encode_to_vec())
};
let record = BaseRecord::<Vec<u8>, _>::to(topic).key(key).payload(&buf);
let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e);
UPLOAD_ACCOUNTS_TOTAL
.with_label_values(&[if result.is_ok() { "success" } else { "failed" }])
.inc();
result
}
pub fn update_slot_status(
&self,
ev: SlotStatusEvent,
wrap_messages: bool,
topic: &str,
) -> Result<(), KafkaError> {
let temp_key;
let (key, buf) = if wrap_messages {
temp_key = ev.slot.to_le_bytes().to_vec();
(&temp_key, Self::encode_with_wrapper(Slot(Box::new(ev))))
} else {
temp_key = self.copy_and_prepend(&ev.slot.to_le_bytes(), b'S');
(&temp_key, ev.encode_to_vec())
};
let record = BaseRecord::<Vec<u8>, _>::to(topic).key(key).payload(&buf);
let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e);
UPLOAD_SLOTS_TOTAL
.with_label_values(&[if result.is_ok() { "success" } else { "failed" }])
.inc();
result
}
pub fn update_transaction(
&self,
ev: TransactionEvent,
wrap_messages: bool,
topic: &str,
) -> Result<(), KafkaError> {
let temp_key;
let (key, buf) = if wrap_messages {
(
&ev.signature.clone(),
Self::encode_with_wrapper(Transaction(Box::new(ev))),
)
} else {
temp_key = self.copy_and_prepend(ev.signature.as_slice(), b'T');
(&temp_key, ev.encode_to_vec())
};
let record = BaseRecord::<Vec<u8>, _>::to(topic).key(key).payload(&buf);
let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e);
UPLOAD_TRANSACTIONS_TOTAL
.with_label_values(&[if result.is_ok() { "success" } else { "failed" }])
.inc();
result
}
pub fn update_block(
&self,
ev: BlockEvent,
wrap_messages: bool,
topic: &str,
) -> Result<(), KafkaError> {
let temp_key;
let (key, buf) = if wrap_messages {
temp_key = ev.blockhash.as_bytes().to_vec();
(
&temp_key,
Self::encode_with_wrapper(EventMessage::Block(Box::new(ev))),
)
} else {
temp_key = self.copy_and_prepend(ev.blockhash.as_bytes(), b'B');
(&temp_key, ev.encode_to_vec())
};
let record = BaseRecord::<Vec<u8>, _>::to(topic).key(key).payload(&buf);
self.producer.send(record).map(|_| ()).map_err(|(e, _)| e)
}
fn encode_with_wrapper(message: EventMessage) -> Vec<u8> {
MessageWrapper {
event_message: Some(message),
}
.encode_to_vec()
}
fn copy_and_prepend(&self, data: &[u8], prefix: u8) -> Vec<u8> {
let mut temp_key = Vec::with_capacity(data.len() + 1);
temp_key.push(prefix);
temp_key.extend_from_slice(data);
temp_key
}
}
impl Drop for Publisher {
fn drop(&mut self) {
let _ = self.producer.flush(self.shutdown_timeout);
}
}
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | false |
Blockdaemon/solana-accountsdb-plugin-kafka | https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/blob/c96300ace4d1c5c2b1e2feb3f5583cca16906984/src/plugin.rs | src/plugin.rs | // Copyright 2022 Blockdaemon Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use {
crate::{
BlockEvent, CompiledInstruction, Config, Filter, InnerInstruction, InnerInstructions,
LegacyLoadedMessage, LegacyMessage, LoadedAddresses, MessageAddressTableLookup,
MessageHeader, PrometheusService, Publisher, Reward, RewardsAndNumPartitions,
SanitizedMessage, SanitizedTransaction, SlotStatus, SlotStatusEvent, TransactionEvent,
TransactionStatusMeta, TransactionTokenBalance, UiTokenAmount, UpdateAccountEvent,
V0LoadedMessage, V0Message, sanitized_message,
},
agave_geyser_plugin_interface::geyser_plugin_interface::{
GeyserPlugin, GeyserPluginError as PluginError, ReplicaAccountInfoV3,
ReplicaAccountInfoVersions, ReplicaBlockInfoV4, ReplicaBlockInfoVersions,
ReplicaTransactionInfoV3, ReplicaTransactionInfoVersions, Result as PluginResult,
SlotStatus as PluginSlotStatus,
},
log::{debug, error, info, log_enabled},
rdkafka::util::get_rdkafka_version,
solana_pubkey::{Pubkey, pubkey},
std::fmt::{Debug, Formatter},
};
#[derive(Default)]
pub struct KafkaPlugin {
publisher: Option<Publisher>,
filter: Option<Vec<Filter>>,
block_events_topic: Option<(String, bool)>,
prometheus: Option<PrometheusService>,
}
impl Debug for KafkaPlugin {
fn fmt(&self, _: &mut Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
impl GeyserPlugin for KafkaPlugin {
fn name(&self) -> &'static str {
"KafkaPlugin"
}
fn on_load(&mut self, config_file: &str, _: bool) -> PluginResult<()> {
if self.publisher.is_some() {
return Err(PluginError::Custom("plugin already loaded".into()));
}
solana_logger::setup_with_default("info");
info!(
"Loading plugin {:?} from config_file {:?}",
self.name(),
config_file
);
let config = Config::read_from(config_file)?;
let (version_n, version_s) = get_rdkafka_version();
info!("rd_kafka_version: {:#08x}, {}", version_n, version_s);
let producer = config.producer().map_err(|error| {
error!("Failed to create kafka producer: {error:?}");
PluginError::Custom(Box::new(error))
})?;
info!("Created rdkafka::FutureProducer");
let publisher = Publisher::new(producer, &config);
let prometheus = config
.create_prometheus()
.map_err(|error| PluginError::Custom(Box::new(error)))?;
self.publisher = Some(publisher);
self.filter = Some(config.filters.iter().map(Filter::new).collect());
self.prometheus = prometheus;
self.block_events_topic = config
.block_events_topic
.map(|b| (b.topic, b.wrap_messages));
info!("Spawned producer");
Ok(())
}
fn on_unload(&mut self) {
self.publisher = None;
self.filter = None;
if let Some(prometheus) = self.prometheus.take() {
prometheus.shutdown();
}
}
fn update_account(
&self,
account: ReplicaAccountInfoVersions,
slot: u64,
is_startup: bool,
) -> PluginResult<()> {
let filters = self.unwrap_filters();
if is_startup && filters.iter().all(|filter| !filter.publish_all_accounts) {
return Ok(());
}
let info = Self::unwrap_update_account(account);
let publisher = self.unwrap_publisher();
for filter in filters {
if !filter.update_account_topic.is_empty() {
if !filter.wants_program(info.owner) && !filter.wants_account(info.pubkey) {
Self::log_ignore_account_update(info);
continue;
}
let event = UpdateAccountEvent {
slot,
pubkey: info.pubkey.to_vec(),
lamports: info.lamports,
owner: info.owner.to_vec(),
executable: info.executable,
rent_epoch: info.rent_epoch,
data: info.data.to_vec(),
write_version: info.write_version,
txn_signature: info.txn.map(|v| v.signature().as_ref().to_owned()),
data_version: info.write_version as u32, // Use write_version as data version
is_startup, // Use the is_startup parameter
account_age: slot.saturating_sub(info.rent_epoch), // Approximate age from rent epoch
};
publisher
.update_account(event, filter.wrap_messages, &filter.update_account_topic)
.map_err(|e| PluginError::AccountsUpdateError { msg: e.to_string() })?;
}
}
Ok(())
}
fn update_slot_status(
&self,
slot: u64,
parent: Option<u64>,
status: &PluginSlotStatus,
) -> PluginResult<()> {
let publisher = self.unwrap_publisher();
let value = SlotStatus::from(status.clone());
for filter in self.unwrap_filters() {
if !filter.slot_status_topic.is_empty() {
let event = SlotStatusEvent {
slot,
parent: parent.unwrap_or(0),
status: value.into(),
is_confirmed: Self::is_slot_confirmed(&value), // Derived from status
confirmation_count: Self::calculate_confirmation_count(&value), // Calculate from status
status_description: Self::get_slot_status_description(&value), // Get human-readable status
};
publisher
.update_slot_status(event, filter.wrap_messages, &filter.slot_status_topic)
.map_err(|e| PluginError::AccountsUpdateError { msg: e.to_string() })?;
}
}
Ok(())
}
fn notify_transaction(
&self,
transaction: ReplicaTransactionInfoVersions,
slot: u64,
) -> PluginResult<()> {
let info = Self::unwrap_transaction(transaction);
let publisher = self.unwrap_publisher();
for filter in self.unwrap_filters() {
if !filter.transaction_topic.is_empty() {
let is_failed = info.transaction_status_meta.status.is_err();
if (!filter.wants_vote_tx() && info.is_vote)
|| (!filter.wants_failed_tx() && is_failed)
{
debug!("Ignoring vote/failed transaction");
continue;
}
if !info
.transaction
.message
.static_account_keys()
.iter()
.any(|pubkey| {
filter.wants_program(pubkey.as_ref())
|| filter.wants_account(pubkey.as_ref())
})
{
debug!("Ignoring transaction {:?}", info.signature);
continue;
}
let event = Self::build_transaction_event(slot, info);
publisher
.update_transaction(event, filter.wrap_messages, &filter.transaction_topic)
.map_err(|e| PluginError::TransactionUpdateError { msg: e.to_string() })?;
}
}
Ok(())
}
fn notify_block_metadata(&self, blockinfo: ReplicaBlockInfoVersions) -> PluginResult<()> {
let Some((topic, wrap_messages)) = &self.block_events_topic else {
return Ok(());
};
let info = Self::unwrap_block_metadata(blockinfo);
let publisher = self.unwrap_publisher();
let event = Self::build_block_event(info.clone());
publisher
.update_block(event, *wrap_messages, topic)
.unwrap();
Ok(())
}
fn account_data_notifications_enabled(&self) -> bool {
let filters = self.unwrap_filters();
filters
.iter()
.any(|filter| !filter.update_account_topic.is_empty())
}
fn transaction_notifications_enabled(&self) -> bool {
let filters = self.unwrap_filters();
filters
.iter()
.any(|filter| !filter.transaction_topic.is_empty())
}
}
impl KafkaPlugin {
/// Compute Budget Program ID
const COMPUTE_BUDGET_PROGRAM_ID: Pubkey =
pubkey!("ComputeBudget111111111111111111111111111111");
pub fn new() -> Self {
Default::default()
}
fn unwrap_publisher(&self) -> &Publisher {
self.publisher.as_ref().expect("publisher is unavailable")
}
fn unwrap_filters(&self) -> &Vec<Filter> {
self.filter.as_ref().expect("filter is unavailable")
}
fn unwrap_update_account(account: ReplicaAccountInfoVersions<'_>) -> &ReplicaAccountInfoV3<'_> {
match account {
ReplicaAccountInfoVersions::V0_0_1(_info) => {
panic!(
"ReplicaAccountInfoVersions::V0_0_1 unsupported, please upgrade your Solana node."
);
}
ReplicaAccountInfoVersions::V0_0_2(_info) => {
panic!(
"ReplicaAccountInfoVersions::V0_0_2 unsupported, please upgrade your Solana node."
);
}
ReplicaAccountInfoVersions::V0_0_3(info) => info,
}
}
fn unwrap_transaction(
transaction: ReplicaTransactionInfoVersions<'_>,
) -> &ReplicaTransactionInfoV3<'_> {
match transaction {
ReplicaTransactionInfoVersions::V0_0_1(_info) => {
panic!(
"ReplicaTransactionInfoVersions::V0_0_1 unsupported, please upgrade your Solana node."
);
}
ReplicaTransactionInfoVersions::V0_0_2(_info) => {
panic!(
"ReplicaAccountInfoVersions::V0_0_2 unsupported, please upgrade your Solana node."
);
}
ReplicaTransactionInfoVersions::V0_0_3(info) => info,
}
}
fn unwrap_block_metadata(block: ReplicaBlockInfoVersions<'_>) -> &ReplicaBlockInfoV4<'_> {
match block {
ReplicaBlockInfoVersions::V0_0_1(_info) => {
panic!(
"ReplicaBlockInfoVersions::V0_0_1 unsupported, please upgrade your Solana node."
);
}
ReplicaBlockInfoVersions::V0_0_2(_info) => {
panic!(
"ReplicaBlockInfoVersions::V0_0_2 unsupported, please upgrade your Solana node."
);
}
ReplicaBlockInfoVersions::V0_0_3(_info) => {
panic!(
"ReplicaBlockInfoVersions::V0_0_3 unsupported, please upgrade your Solana node."
);
}
ReplicaBlockInfoVersions::V0_0_4(info) => info,
}
}
fn build_compiled_instruction(
ix: &solana_message::compiled_instruction::CompiledInstruction,
) -> CompiledInstruction {
CompiledInstruction {
program_id_index: ix.program_id_index as u32,
accounts: ix.clone().accounts.into_iter().map(|v| v as u32).collect(),
data: ix.data.clone(),
}
}
fn build_inner_instruction(
ix: &solana_transaction_status::InnerInstruction,
) -> InnerInstruction {
InnerInstruction {
instruction: Some(Self::build_compiled_instruction(&ix.instruction)),
stack_height: ix.stack_height,
}
}
fn build_message_header(header: &solana_message::MessageHeader) -> MessageHeader {
MessageHeader {
num_required_signatures: header.num_required_signatures as u32,
num_readonly_signed_accounts: header.num_readonly_signed_accounts as u32,
num_readonly_unsigned_accounts: header.num_readonly_unsigned_accounts as u32,
}
}
fn build_transaction_token_balance(
transaction_token_account_balance: solana_transaction_status::TransactionTokenBalance,
) -> TransactionTokenBalance {
TransactionTokenBalance {
account_index: transaction_token_account_balance.account_index as u32,
ui_token_account: Some(UiTokenAmount {
ui_amount: transaction_token_account_balance.ui_token_amount.ui_amount,
decimals: transaction_token_account_balance.ui_token_amount.decimals as u32,
amount: transaction_token_account_balance.ui_token_amount.amount,
ui_amount_string: transaction_token_account_balance
.ui_token_amount
.ui_amount_string,
}),
mint: transaction_token_account_balance.mint,
owner: transaction_token_account_balance.owner,
}
}
fn build_transaction_event(
slot: u64,
ReplicaTransactionInfoV3 {
signature,
is_vote,
transaction,
transaction_status_meta,
index,
message_hash,
}: &ReplicaTransactionInfoV3,
) -> TransactionEvent {
TransactionEvent {
is_vote: *is_vote,
slot,
index: *index as u64,
signature: signature.as_ref().into(),
transaction_status_meta: Some(TransactionStatusMeta {
is_status_err: transaction_status_meta.status.is_err(),
error_info: match &transaction_status_meta.status {
Err(e) => e.to_string(),
Ok(_) => "".to_owned(),
},
rewards: transaction_status_meta
.rewards
.clone()
.unwrap()
.into_iter()
.map(|x| Reward {
pubkey: x.pubkey,
lamports: x.lamports,
post_balance: x.post_balance,
reward_type: match x.reward_type {
Some(r) => r as i32,
None => 0,
},
commission: match x.commission {
Some(v) => v as u32,
None => 0,
},
})
.collect(),
fee: transaction_status_meta.fee,
log_messages: match &transaction_status_meta.log_messages {
Some(v) => v.to_owned(),
None => vec![],
},
inner_instructions: match &transaction_status_meta.inner_instructions {
Some(inners) => inners
.clone()
.into_iter()
.map(|inner| InnerInstructions {
index: inner.index as u32,
instructions: inner
.instructions
.iter()
.map(Self::build_inner_instruction)
.collect(),
})
.collect(),
None => vec![],
},
pre_balances: transaction_status_meta.pre_balances.clone(),
post_balances: transaction_status_meta.post_balances.clone(),
pre_token_balances: match &transaction_status_meta.pre_token_balances {
Some(v) => v
.clone()
.into_iter()
.map(Self::build_transaction_token_balance)
.collect(),
None => vec![],
},
post_token_balances: match &transaction_status_meta.post_token_balances {
Some(v) => v
.clone()
.into_iter()
.map(Self::build_transaction_token_balance)
.collect(),
None => vec![],
},
compute_units_consumed: Self::extract_compute_units_from_metadata(
transaction_status_meta,
),
compute_units_price: Self::extract_compute_price_from_transaction(
&transaction.message,
),
error_logs: Self::extract_error_logs_from_status(&transaction_status_meta.status),
is_successful: transaction_status_meta.status.is_ok(), // Derived from status
}),
transaction: Some(SanitizedTransaction {
message_hash: message_hash.to_bytes().into(),
is_simple_vote_transaction: *is_vote,
message: Some(SanitizedMessage {
message_payload: Some(match &transaction.message {
solana_message::VersionedMessage::Legacy(lv) => {
// Use LegacyLoadedMessage for Legacy messages
sanitized_message::MessagePayload::Legacy(LegacyLoadedMessage {
message: Some(LegacyMessage {
header: Some(Self::build_message_header(&lv.header)),
account_keys: lv
.account_keys
.iter()
.map(|k| k.as_ref().into())
.collect(),
recent_block_hash: lv.recent_blockhash.as_ref().into(),
instructions: lv
.instructions
.iter()
.map(Self::build_compiled_instruction)
.collect(),
}),
is_writable_account_cache: {
// Derive writable status from message header and account positions
let num_required = lv.header.num_required_signatures as usize;
let num_readonly_signed =
lv.header.num_readonly_signed_accounts as usize;
(0..lv.account_keys.len())
.map(|i| {
if i < num_required {
true // Required signers are always writable
} else if i < num_required + num_readonly_signed {
false // Readonly signed accounts
} else {
true // Remaining accounts are writable
}
})
.collect()
},
})
}
solana_message::VersionedMessage::V0(v0) => {
// Use V0LoadedMessage for V0 messages
sanitized_message::MessagePayload::V0(V0LoadedMessage {
message: Some(V0Message {
header: Some(Self::build_message_header(&v0.header)),
account_keys: v0
.account_keys
.iter()
.map(|k| k.as_ref().into())
.collect(),
recent_block_hash: v0.recent_blockhash.as_ref().into(),
instructions: v0
.instructions
.iter()
.map(Self::build_compiled_instruction)
.collect(),
address_table_lookup: v0
.address_table_lookups
.iter()
.map(|vf| MessageAddressTableLookup {
account_key: vf.account_key.as_ref().into(),
writable_indexes: vf
.writable_indexes
.iter()
.map(|x| *x as u32)
.collect(),
readonly_indexes: vf
.readonly_indexes
.iter()
.map(|x| *x as u32)
.collect(),
})
.collect(),
}),
loaded_adresses: Some(LoadedAddresses {
writable: v0
.address_table_lookups
.iter()
.flat_map(|lookup| {
lookup.writable_indexes.iter().map(|&_idx| {
vec![0u8; 32] // Placeholder - actual keys not available
})
})
.collect(),
readonly: v0
.address_table_lookups
.iter()
.flat_map(|lookup| {
lookup.readonly_indexes.iter().map(|&_idx| {
vec![0u8; 32] // Placeholder - actual keys not available
})
})
.collect(),
writable_info: Self::build_loaded_address_info(
&v0.address_table_lookups,
&v0.account_keys,
true,
),
readonly_info: Self::build_loaded_address_info(
&v0.address_table_lookups,
&v0.account_keys,
false,
),
}),
is_writable_account_cache: {
// Derive writable status from message header and account positions
let num_required = v0.header.num_required_signatures as usize;
let num_readonly_signed =
v0.header.num_readonly_signed_accounts as usize;
(0..v0.account_keys.len())
.map(|i| {
if i < num_required {
true // Required signers are always writable
} else if i < num_required + num_readonly_signed {
false // Readonly signed accounts
} else {
true // Remaining accounts are writable
}
})
.collect()
},
})
}
}),
}),
signatures: transaction
.signatures
.iter()
.copied()
.map(|x| x.as_ref().into())
.collect(),
}),
compute_units_consumed: Self::extract_compute_units_from_metadata(
transaction_status_meta,
),
compute_units_price: Self::extract_compute_price_from_transaction(&transaction.message),
total_cost: transaction_status_meta.fee
+ Self::extract_compute_price_from_transaction(&transaction.message),
instruction_count: transaction.message.instructions().len() as u32,
account_count: Self::get_account_keys_from_message(&transaction.message)
.map(|keys| keys.len() as u32)
.unwrap_or(0),
execution_time_ns: 0, // Not available in current API
is_successful: transaction_status_meta.status.is_ok(),
execution_logs: transaction_status_meta
.log_messages
.clone()
.unwrap_or_default(),
error_details: Self::extract_error_logs_from_status(&transaction_status_meta.status),
confirmation_count: 0, // Will be populated from slot status when available
}
}
fn log_ignore_account_update(info: &ReplicaAccountInfoV3) {
if log_enabled!(::log::Level::Debug) {
match <&[u8; 32]>::try_from(info.owner) {
Ok(key) => debug!(
"Ignoring update for account key: {:?}",
Pubkey::new_from_array(*key)
),
// Err should never happen because wants_account_key only returns false if the input is &[u8; 32]
Err(_err) => debug!("Ignoring update for account key: {:?}", info.owner),
};
}
}
/// Extract compute units consumed from transaction metadata
fn extract_compute_units_from_metadata(
transaction_status_meta: &solana_transaction_status::TransactionStatusMeta,
) -> u32 {
// Check if compute units are available in the metadata
if let Some(compute_units) = transaction_status_meta.compute_units_consumed {
compute_units as u32
} else {
// If not available in metadata, return 0
// We avoid log parsing as it's unreliable
0
}
}
/// Extract compute unit price from transaction message
fn extract_compute_price_from_transaction(message: &solana_message::VersionedMessage) -> u64 {
// Look for compute budget instructions in the transaction
let instructions = message.instructions();
for instruction in instructions {
// Check if this is a compute budget instruction
let program_id_index = instruction.program_id_index as usize;
if let Some(account_keys) = Self::get_account_keys_from_message(message)
.filter(|keys| program_id_index < keys.len())
{
let program_id = &account_keys[program_id_index];
if *program_id == Self::COMPUTE_BUDGET_PROGRAM_ID {
// Parse compute budget instruction data to extract price
let data = &instruction.data;
if data.len() >= 9 && data[0] == 3 {
// SetComputeUnitPrice instruction (discriminator 3)
let price = u64::from_le_bytes([
data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8],
]);
return price;
}
}
}
}
0 // Default price if not found
}
/// Extract account keys from versioned message
fn get_account_keys_from_message(
message: &solana_message::VersionedMessage,
) -> Option<&[solana_pubkey::Pubkey]> {
match message {
solana_message::VersionedMessage::Legacy(lv) => Some(&lv.account_keys),
solana_message::VersionedMessage::V0(v0) => Some(&v0.account_keys),
}
}
/// Extract error information from transaction status (more reliable than log parsing)
fn extract_error_logs_from_status<T: std::fmt::Display>(status: &Result<(), T>) -> Vec<String> {
match status {
Ok(_) => vec![], // No errors
Err(error) => {
// Convert the actual error to a string representation
vec![error.to_string()]
}
}
}
/// Determine if slot is confirmed based on status
fn is_slot_confirmed(status: &SlotStatus) -> bool {
matches!(status, SlotStatus::Confirmed | SlotStatus::Rooted)
}
/// Get human-readable slot status description
fn get_slot_status_description(status: &SlotStatus) -> String {
match status {
SlotStatus::Processed => "Processed - highest slot of heaviest fork".to_string(),
SlotStatus::Rooted => {
"Rooted - highest slot having reached max vote lockout".to_string()
}
SlotStatus::Confirmed => "Confirmed - voted on by supermajority of cluster".to_string(),
SlotStatus::FirstShredReceived => "First shred received".to_string(),
SlotStatus::Completed => "Completed".to_string(),
SlotStatus::CreatedBank => "Created bank".to_string(),
SlotStatus::Dead => "Dead - fork has been abandoned".to_string(),
}
}
/// Build detailed loaded address information
fn build_loaded_address_info(
_address_table_lookups: &[solana_message::v0::MessageAddressTableLookup],
_account_keys: &[solana_pubkey::Pubkey],
is_writable: bool,
) -> Vec<crate::LoadedAddressInfo> {
let mut address_info = Vec::new();
for lookup in _address_table_lookups.iter() {
let indexes = if is_writable {
&lookup.writable_indexes
} else {
&lookup.readonly_indexes
};
for &index in indexes.iter() {
// Create LoadedAddressInfo with available data
let info = crate::LoadedAddressInfo {
address: lookup.account_key.as_ref().into(),
index: index as u32,
is_writable,
};
address_info.push(info);
}
}
address_info
}
/// Calculate confirmation count based on slot status
fn calculate_confirmation_count(status: &SlotStatus) -> u32 {
match status {
SlotStatus::Processed => 0, // Not confirmed yet
SlotStatus::Rooted => 2, // Fully confirmed (rooted)
SlotStatus::Confirmed => 1, // Confirmed by supermajority
SlotStatus::FirstShredReceived => 0, // Early stage
SlotStatus::Completed => 1, // Considered confirmed
SlotStatus::CreatedBank => 0, // Early stage
SlotStatus::Dead => 0, // Abandoned fork
}
}
fn build_block_event(block: ReplicaBlockInfoV4) -> BlockEvent {
let rewards = block
.rewards
.rewards
.iter()
.map(|x| Reward {
pubkey: x.pubkey.clone(),
lamports: x.lamports,
post_balance: x.post_balance,
reward_type: match x.reward_type {
Some(r) => r as i32,
None => 0,
},
| rust | Apache-2.0 | c96300ace4d1c5c2b1e2feb3f5583cca16906984 | 2026-01-04T20:20:13.613825Z | true |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/build.rs | build.rs | #[cfg(target_arch = "x86")]
use std::arch::x86::__cpuid;
use rustc_version::Channel;
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::__cpuid;
/// Performance of PDEP/PEXT relative to arithmetic/bit operations
#[derive(PartialOrd, PartialEq)]
enum PdepPerf {
VeryFast = 20,
Fast = 10,
Slow = 0,
}
#[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
fn pdep_speed() -> PdepPerf {
let leaf0 = unsafe { __cpuid(0) };
let mut buf = Vec::with_capacity(12);
buf.extend_from_slice(&leaf0.ebx.to_le_bytes());
buf.extend_from_slice(&leaf0.edx.to_le_bytes());
buf.extend_from_slice(&leaf0.ecx.to_le_bytes());
println!(
"Detected CPU manufacturer {}",
String::from_utf8_lossy(&buf)
);
if buf.as_slice() == b"AuthenticAMD" || buf.as_slice() == b"HygonGenuine" {
let leaf1 = unsafe { __cpuid(1) };
let family = (leaf1.eax >> 8) & 0b1111;
println!("family {}", family);
let extended_family = (leaf1.eax >> 20) & 0b11111111;
println!("extended_family {}", extended_family);
// Zen, Zen+, and Zen 2 CPUs have very poor PDEP/PEXT performance
if family == 0xF && (extended_family == 0x8 || extended_family == 0x9) {
println!("Detected Zen CPU");
return PdepPerf::Slow;
}
if family == 0xF && extended_family == 0xA {
println!("Detected Zen 3 CPU");
return PdepPerf::Fast;
}
}
PdepPerf::VeryFast
}
#[cfg(not(any(target_arch = "x86_64", target_arch = "x86")))]
fn pdep_speed() -> PdepPerf {
PdepPerf::Slow
}
fn main() {
println!("cargo:rustc-check-cfg=cfg(fast_pdep)");
println!("cargo:rustc-check-cfg=cfg(very_fast_pdep)");
println!("cargo:rustc-check-cfg=cfg(rustc_nightly)");
if std::env::var(
"CARGO_FEATURE_DANGEROUSLY_FORCE_ENABLE_PDEP_SINCE_I_REALLY_KNOW_WHAT_IM_DOING",
)
.is_ok()
{
println!("cargo:rustc-cfg=fast_pdep");
println!("cargo:rustc-cfg=very_fast_pdep");
} else if std::env::var("CARGO_FEATURE_NATIVE_OPTIMIZATIONS").is_ok() {
println!("Compiling with native optimizations");
let speed = pdep_speed();
if speed >= PdepPerf::Fast {
println!("cargo:rustc-cfg=fast_pdep");
}
if speed >= PdepPerf::VeryFast {
println!("cargo:rustc-cfg=very_fast_pdep");
}
}
if rustc_version::version_meta().unwrap().channel == Channel::Nightly {
println!("cargo:rustc-cfg=rustc_nightly");
}
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/src/lib.rs | src/lib.rs | /*!
`varint_simd` is a fast SIMD-accelerated [variable-length integer](https://developers.google.com/protocol-buffers/docs/encoding)
encoder and decoder written in Rust.
**For more information, please see the [README](https://github.com/as-com/varint-simd#readme).**
*/
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(rustc_nightly, feature(doc_cfg))]
#[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
use core::fmt::Debug;
pub mod decode;
pub mod encode;
pub mod num;
#[doc(inline)]
pub use decode::*;
#[doc(inline)]
pub use encode::*;
pub use num::*;
// Functions to help with debugging
#[allow(dead_code)]
fn slice_m128i(n: __m128i) -> [u8; 16] {
unsafe { core::mem::transmute(n) }
}
#[allow(dead_code)]
fn slice_m256i(n: __m256i) -> [i8; 32] {
unsafe { core::mem::transmute(n) }
}
#[derive(Debug, PartialEq, Eq)]
pub enum VarIntDecodeError {
Overflow,
NotEnoughBytes,
}
impl core::fmt::Display for VarIntDecodeError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(self, f)
}
}
#[cfg(feature = "std")]
impl std::error::Error for VarIntDecodeError {}
#[cfg(test)]
mod tests {
#[cfg(target_feature = "avx2")]
use crate::decode_two_wide_unsafe;
use crate::{
decode, decode_eight_u8_unsafe, decode_four_unsafe, decode_len, decode_two_unsafe, encode,
encode_to_slice, VarIntDecodeError, VarIntTarget,
};
use lazy_static::lazy_static;
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
fn check<T: VarIntTarget>(value: T, encoded: &[u8]) {
let mut expected = [0u8; 16];
expected[..encoded.len()].copy_from_slice(encoded);
let a = encode(value);
assert_eq!(a.0, expected);
assert_eq!(a.1 as usize, encoded.len());
let roundtrip: (T, usize) = decode(&expected).unwrap();
assert_eq!(roundtrip.0, value);
assert_eq!(roundtrip.1 as usize, encoded.len());
let len = decode_len::<T>(&expected).unwrap();
assert_eq!(len, encoded.len());
}
// Test cases borrowed from prost
#[test]
fn roundtrip_u8() {
check(2u8.pow(0) - 1, &[0x00]);
check(2u8.pow(0), &[0x01]);
check(2u8.pow(7) - 1, &[0x7F]);
check(2u8.pow(7), &[0x80, 0x01]);
}
#[test]
fn roundtrip_u16() {
check(2u16.pow(0) - 1, &[0x00]);
check(2u16.pow(0), &[0x01]);
check(2u16.pow(7) - 1, &[0x7F]);
check(2u16.pow(7), &[0x80, 0x01]);
check(300u16, &[0xAC, 0x02]);
check(2u16.pow(14) - 1, &[0xFF, 0x7F]);
check(2u16.pow(14), &[0x80, 0x80, 0x01]);
}
#[test]
fn roundtrip_u32() {
check(2u32.pow(0) - 1, &[0x00]);
check(2u32.pow(0), &[0x01]);
check(2u32.pow(7) - 1, &[0x7F]);
check(2u32.pow(7), &[0x80, 0x01]);
check(300u32, &[0xAC, 0x02]);
check(2u32.pow(14) - 1, &[0xFF, 0x7F]);
check(2u32.pow(14), &[0x80, 0x80, 0x01]);
check(2u32.pow(21) - 1, &[0xFF, 0xFF, 0x7F]);
check(2u32.pow(21), &[0x80, 0x80, 0x80, 0x01]);
check(2u32.pow(28) - 1, &[0xFF, 0xFF, 0xFF, 0x7F]);
check(2u32.pow(28), &[0x80, 0x80, 0x80, 0x80, 0x01]);
}
#[test]
fn roundtrip_u64() {
check(2u64.pow(0) - 1, &[0x00]);
check(2u64.pow(0), &[0x01]);
check(2u64.pow(7) - 1, &[0x7F]);
check(2u64.pow(7), &[0x80, 0x01]);
check(300u64, &[0xAC, 0x02]);
check(2u64.pow(14) - 1, &[0xFF, 0x7F]);
check(2u64.pow(14), &[0x80, 0x80, 0x01]);
check(2u64.pow(21) - 1, &[0xFF, 0xFF, 0x7F]);
check(2u64.pow(21), &[0x80, 0x80, 0x80, 0x01]);
check(2u64.pow(28) - 1, &[0xFF, 0xFF, 0xFF, 0x7F]);
check(2u64.pow(28), &[0x80, 0x80, 0x80, 0x80, 0x01]);
check(2u64.pow(35) - 1, &[0xFF, 0xFF, 0xFF, 0xFF, 0x7F]);
check(2u64.pow(35), &[0x80, 0x80, 0x80, 0x80, 0x80, 0x01]);
check(2u64.pow(42) - 1, &[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F]);
check(2u64.pow(42), &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01]);
check(
2u64.pow(49) - 1,
&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F],
);
check(
2u64.pow(49),
&[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01],
);
check(
2u64.pow(56) - 1,
&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F],
);
check(
2u64.pow(56),
&[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01],
);
check(
2u64.pow(63) - 1,
&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F],
);
check(
2u64.pow(63),
&[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01],
);
check(
u64::MAX,
&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01],
);
}
#[test]
fn overflow_u8() {
let encoded = encode(u8::MAX as u16 + 1);
decode::<u8>(&encoded.0).expect_err("should overflow");
}
#[test]
fn overflow_u16() {
let encoded = encode(u16::MAX as u32 + 1);
decode::<u16>(&encoded.0).expect_err("should overflow");
}
#[test]
fn overflow_u32() {
let encoded = encode(u32::MAX as u64 + 1);
decode::<u32>(&encoded.0).expect_err("should overflow");
}
#[test]
fn overflow_u64() {
decode::<u8>(&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x02])
.expect_err("should overflow");
}
#[test]
fn truncated() {
for i in 1..10 {
let encoded = encode(1u64 << 7 * i);
for j in 0..=i {
assert_eq!(
decode::<u64>(&encoded.0[..j]),
Err(VarIntDecodeError::NotEnoughBytes)
);
}
}
}
fn check_decode_2x<T: VarIntTarget, U: VarIntTarget>(a: &[T], b: &[U]) {
for i in a {
for j in b {
let mut enc = [0u8; 16];
let first_len = encode_to_slice(*i, &mut enc);
let second_len = encode_to_slice(*j, &mut enc[first_len as usize..]);
let decoded = unsafe { decode_two_unsafe::<T, U>(enc.as_ptr()) };
assert_eq!(decoded.0, *i);
assert_eq!(decoded.1, *j);
assert_eq!(decoded.2, first_len);
assert_eq!(decoded.3, second_len);
}
}
}
#[cfg(target_feature = "avx2")]
fn check_decode_wide_2x<T: VarIntTarget, U: VarIntTarget>(a: &[T], b: &[U]) {
for i in a {
for j in b {
let mut enc = [0u8; 32];
let first_len = encode_to_slice(*i, &mut enc);
let second_len = encode_to_slice(*j, &mut enc[first_len as usize..]);
let decoded = unsafe { decode_two_wide_unsafe::<T, U>(enc.as_ptr()) };
assert_eq!(decoded.0, *i);
assert_eq!(decoded.1, *j);
assert_eq!(decoded.2, first_len);
assert_eq!(decoded.3, second_len);
}
}
}
fn check_decode_4x<T: VarIntTarget, U: VarIntTarget, V: VarIntTarget, W: VarIntTarget>(
a: &[T],
b: &[U],
c: &[V],
d: &[W],
) {
for i in a {
for j in b {
for k in c {
for l in d {
let mut enc = [0u8; 16];
let first_len = encode_to_slice(*i, &mut enc);
let second_len = encode_to_slice(*j, &mut enc[first_len as usize..]);
let third_len =
encode_to_slice(*k, &mut enc[(first_len + second_len) as usize..]);
let fourth_len = encode_to_slice(
*l,
&mut enc[(first_len + second_len + third_len) as usize..],
);
let decoded = unsafe { decode_four_unsafe::<T, U, V, W>(enc.as_ptr()) };
assert_eq!(decoded.0, *i);
assert_eq!(decoded.1, *j);
assert_eq!(decoded.2, *k);
assert_eq!(decoded.3, *l);
assert_eq!(decoded.4, first_len);
assert_eq!(decoded.5, second_len);
assert_eq!(decoded.6, third_len);
assert_eq!(decoded.7, fourth_len);
assert!(!decoded.8);
}
}
}
}
}
lazy_static! {
static ref NUMS_U8: [u8; 5] = [
2u8.pow(0) - 1,
2u8.pow(0),
2u8.pow(7) - 1,
2u8.pow(7),
u8::MAX
];
static ref NUMS_U16: [u16; 8] = [
2u16.pow(0) - 1,
2u16.pow(0),
2u16.pow(7) - 1,
2u16.pow(7),
300,
2u16.pow(14) - 1,
2u16.pow(14),
u16::MAX
];
static ref NUMS_U32: [u32; 12] = [
2u32.pow(0) - 1,
2u32.pow(0),
2u32.pow(7) - 1,
2u32.pow(7),
300,
2u32.pow(14) - 1,
2u32.pow(14),
2u32.pow(21) - 1,
2u32.pow(21),
2u32.pow(28) - 1,
2u32.pow(28),
u32::MAX
];
static ref NUMS_U64: [u64; 22] = [
2u64.pow(0) - 1,
2u64.pow(0),
2u64.pow(7) - 1,
2u64.pow(7),
300,
2u64.pow(14) - 1,
2u64.pow(14),
2u64.pow(21) - 1,
2u64.pow(21),
2u64.pow(28) - 1,
2u64.pow(28),
2u64.pow(35) - 1,
2u64.pow(35),
2u64.pow(42) - 1,
2u64.pow(42),
2u64.pow(49) - 1,
2u64.pow(49),
2u64.pow(56) - 1,
2u64.pow(56),
2u64.pow(63) - 1,
2u64.pow(63),
u64::MAX
];
}
#[test]
fn test_decode_2x_u8_x() {
check_decode_2x::<u8, u8>(&NUMS_U8[..], &NUMS_U8[..]);
check_decode_2x::<u8, u16>(&NUMS_U8[..], &NUMS_U16[..]);
check_decode_2x::<u8, u32>(&NUMS_U8[..], &NUMS_U32[..]);
check_decode_2x::<u8, u64>(&NUMS_U8[..], &NUMS_U64[..]);
}
#[test]
#[cfg(target_feature = "avx2")]
fn test_decode_2x_wide_u8_x() {
check_decode_wide_2x::<u8, u8>(&NUMS_U8[..], &NUMS_U8[..]);
check_decode_wide_2x::<u8, u16>(&NUMS_U8[..], &NUMS_U16[..]);
check_decode_wide_2x::<u8, u32>(&NUMS_U8[..], &NUMS_U32[..]);
check_decode_wide_2x::<u8, u64>(&NUMS_U8[..], &NUMS_U64[..]);
}
#[test]
fn test_decode_2x_u16_x() {
check_decode_2x::<u16, u8>(&NUMS_U16[..], &NUMS_U8[..]);
check_decode_2x::<u16, u16>(&NUMS_U16[..], &NUMS_U16[..]);
check_decode_2x::<u16, u32>(&NUMS_U16[..], &NUMS_U32[..]);
check_decode_2x::<u16, u64>(&NUMS_U16[..], &NUMS_U64[..]);
}
#[test]
#[cfg(target_feature = "avx2")]
fn test_decode_2x_wide_u16_x() {
check_decode_wide_2x::<u16, u8>(&NUMS_U16[..], &NUMS_U8[..]);
check_decode_wide_2x::<u16, u16>(&NUMS_U16[..], &NUMS_U16[..]);
check_decode_wide_2x::<u16, u32>(&NUMS_U16[..], &NUMS_U32[..]);
check_decode_wide_2x::<u16, u64>(&NUMS_U16[..], &NUMS_U64[..]);
}
#[test]
fn test_decode_2x_u32_x() {
check_decode_2x::<u32, u8>(&NUMS_U32[..], &NUMS_U8[..]);
check_decode_2x::<u32, u16>(&NUMS_U32[..], &NUMS_U16[..]);
check_decode_2x::<u32, u32>(&NUMS_U32[..], &NUMS_U32[..]);
check_decode_2x::<u32, u64>(&NUMS_U32[..], &NUMS_U64[..]);
}
#[test]
#[cfg(target_feature = "avx2")]
fn test_decode_2x_wide_u32_x() {
check_decode_wide_2x::<u32, u8>(&NUMS_U32[..], &NUMS_U8[..]);
check_decode_wide_2x::<u32, u16>(&NUMS_U32[..], &NUMS_U16[..]);
check_decode_wide_2x::<u32, u32>(&NUMS_U32[..], &NUMS_U32[..]);
check_decode_wide_2x::<u32, u64>(&NUMS_U32[..], &NUMS_U64[..]);
}
#[test]
fn test_decode_2x_u64_x() {
check_decode_2x::<u64, u8>(&NUMS_U64[..], &NUMS_U8[..]);
check_decode_2x::<u64, u16>(&NUMS_U64[..], &NUMS_U16[..]);
check_decode_2x::<u64, u32>(&NUMS_U64[..], &NUMS_U32[..]);
// check_decode_2x::<u64, u64>(&NUMS_U64[..], &NUMS_U64[..]);
}
#[test]
#[cfg(target_feature = "avx2")]
fn test_decode_2x_wide_u64_x() {
check_decode_wide_2x::<u64, u8>(&NUMS_U64[..], &NUMS_U8[..]);
check_decode_wide_2x::<u64, u16>(&NUMS_U64[..], &NUMS_U16[..]);
check_decode_wide_2x::<u64, u32>(&NUMS_U64[..], &NUMS_U32[..]);
check_decode_wide_2x::<u64, u64>(&NUMS_U64[..], &NUMS_U64[..]);
}
#[test]
fn test_decode_4x_u8_u8_x_x() {
check_decode_4x::<u8, u8, u8, u8>(&NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..]);
check_decode_4x::<u8, u8, u8, u16>(&NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U16[..]);
check_decode_4x::<u8, u8, u8, u32>(&NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U32[..]);
check_decode_4x::<u8, u8, u8, u64>(&NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U64[..]);
check_decode_4x::<u8, u8, u16, u8>(&NUMS_U8[..], &NUMS_U8[..], &NUMS_U16[..], &NUMS_U8[..]);
check_decode_4x::<u8, u8, u16, u16>(
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u8, u8, u16, u32>(
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u8_u16_x_x() {
check_decode_4x::<u8, u16, u8, u8>(&NUMS_U8[..], &NUMS_U16[..], &NUMS_U8[..], &NUMS_U8[..]);
check_decode_4x::<u8, u16, u8, u16>(
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u8, u16, u8, u32>(
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u8, u16, u16, u8>(
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u8, u16, u16, u16>(
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u8, u16, u16, u32>(
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u8_u32_x_x() {
check_decode_4x::<u8, u32, u8, u8>(&NUMS_U8[..], &NUMS_U32[..], &NUMS_U8[..], &NUMS_U8[..]);
check_decode_4x::<u8, u32, u8, u16>(
&NUMS_U8[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u8, u32, u8, u32>(
&NUMS_U8[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u8, u32, u16, u8>(
&NUMS_U8[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u8, u32, u16, u16>(
&NUMS_U8[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u8, u32, u16, u32>(
&NUMS_U8[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u8_u64_x_x() {
check_decode_4x::<u8, u64, u8, u8>(&NUMS_U8[..], &NUMS_U64[..], &NUMS_U8[..], &NUMS_U8[..]);
}
#[test]
fn test_decode_4x_u16_u8_x_x() {
check_decode_4x::<u16, u8, u8, u8>(&NUMS_U16[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..]);
check_decode_4x::<u16, u8, u8, u16>(
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u8, u8, u32>(
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u16, u8, u16, u8>(
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u16, u8, u16, u16>(
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u8, u16, u32>(
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u16_u16_x_x() {
check_decode_4x::<u16, u16, u8, u8>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U8[..],
);
check_decode_4x::<u16, u16, u8, u16>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u16, u8, u32>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u16, u16, u16, u8>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u16, u16, u16, u16>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u16, u16, u32>(
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u16_u32_x_x() {
check_decode_4x::<u16, u32, u8, u8>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U8[..],
);
check_decode_4x::<u16, u32, u8, u16>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u32, u8, u32>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u16, u32, u16, u8>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u16, u32, u16, u16>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u16, u32, u16, u32>(
&NUMS_U16[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u32_u8_x_x() {
check_decode_4x::<u32, u8, u8, u8>(&NUMS_U32[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..]);
check_decode_4x::<u32, u8, u8, u16>(
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u32, u8, u8, u32>(
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u32, u8, u16, u8>(
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u32, u8, u16, u16>(
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u32, u8, u16, u32>(
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u32_u16_x_x() {
check_decode_4x::<u32, u16, u8, u8>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U8[..],
);
check_decode_4x::<u32, u16, u8, u16>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u32, u16, u8, u32>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
&NUMS_U32[..],
);
check_decode_4x::<u32, u16, u16, u8>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u32, u16, u16, u16>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
check_decode_4x::<u32, u16, u16, u32>(
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
&NUMS_U32[..],
);
}
#[test]
fn test_decode_4x_u32_u32_x_x() {
check_decode_4x::<u32, u32, u8, u8>(
&NUMS_U32[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U8[..],
);
check_decode_4x::<u32, u32, u8, u16>(
&NUMS_U32[..],
&NUMS_U32[..],
&NUMS_U8[..],
&NUMS_U16[..],
);
check_decode_4x::<u32, u32, u16, u8>(
&NUMS_U32[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U8[..],
);
check_decode_4x::<u32, u32, u16, u16>(
&NUMS_U32[..],
&NUMS_U32[..],
&NUMS_U16[..],
&NUMS_U16[..],
);
}
#[test]
fn test_decode_4x_u64_u8_x_x() {
check_decode_4x::<u64, u8, u8, u8>(&NUMS_U64[..], &NUMS_U8[..], &NUMS_U8[..], &NUMS_U8[..]);
}
fn check_decode_8x_u8(a: &[u8]) {
for i in a {
for j in a {
for k in a {
for l in a {
for m in a {
for n in a {
for o in a {
for p in a {
let mut enc = [0u8; 16];
let first_len = encode_to_slice(*i, &mut enc);
let second_len =
encode_to_slice(*j, &mut enc[first_len as usize..]);
let third_len = encode_to_slice(
*k,
&mut enc[(first_len + second_len) as usize..],
);
let fourth_len = encode_to_slice(
*l,
&mut enc
[(first_len + second_len + third_len) as usize..],
);
let fifth_len = encode_to_slice(
*m,
&mut enc[(first_len
+ second_len
+ third_len
+ fourth_len)
as usize..],
);
let sixth_len = encode_to_slice(
*n,
&mut enc[(first_len
+ second_len
+ third_len
+ fourth_len
+ fifth_len)
as usize..],
);
let seventh_len = encode_to_slice(
*o,
&mut enc[(first_len
+ second_len
+ third_len
+ fourth_len
+ fifth_len
+ sixth_len)
as usize..],
);
let eighth_len = encode_to_slice(
*p,
&mut enc[(first_len
+ second_len
+ third_len
+ fourth_len
+ fifth_len
+ sixth_len
+ seventh_len)
as usize..],
);
let decoded =
unsafe { decode_eight_u8_unsafe(enc.as_ptr()) };
assert_eq!(decoded.0, [*i, *j, *k, *l, *m, *n, *o, *p]);
assert_eq!(
decoded.1,
first_len
+ second_len
+ third_len
+ fourth_len
+ fifth_len
+ sixth_len
+ seventh_len
+ eighth_len
);
}
}
}
}
}
}
}
}
}
#[test]
fn test_decode_8x_u8() {
check_decode_8x_u8(&NUMS_U8[..]);
}
// #[test]
// fn test_two() {
// // let result = unsafe { decode_two_unsafe::<u32, u32>([0x80, 0x80, 0x80, 0x80, 0x01, 0x80, 0x80, 0x80, 0x80, 0x01, 0, 0, 0, 0, 0, 0].as_ptr()) };
// let result = unsafe {
// decode_two_wide_unsafe::<u8, u8>(
// [
// 0x80, 0x01, 0x70, 0x01, 0x01, 0x80, 0x80, 0x80, 0x80, 0x01, 0, 0, 0, 0, 0, 0,
// ]
// .as_ptr(),
// )
// };
// println!("{:?}", result);
// }
//
// #[test]
// fn test_four() {
// let result = unsafe {
// decode_four_unsafe::<u16, u16, u16, u16>(
// [
// 0x01, 0x82, 0x01, 0x83, 0x80, 0x01, 0x84, 0x80, 0x01, 0, 0, 0, 0, 0, 0,
// ]
// .as_ptr(),
// )
// };
//
// println!("{:?}", result);
// }
//
// #[test]
// fn test_eight() {
// let result = unsafe {
// decode_eight_u8_unsafe(
// [
// 0x80, 0x01, 0x80, 0x01, 0x01, 0x90, 0x01, 0x01, 0x01, 0x02, 0x90, 0x01, 0, 0,
// 0, 0, 0, 0, 0, 0,
// ]
// .as_ptr(),
// )
// };
//
// println!("{:?}", result);
// }
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/src/num.rs | src/num.rs | #[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
use core::fmt::Debug;
/// Represents an unsigned scalar value that can be encoded to and decoded from a varint.
pub trait VarIntTarget: Debug + Eq + PartialEq + PartialOrd + Sized + Copy {
/// The signed version of this type
type Signed: SignedVarIntTarget;
/// The maximum length of varint that is necessary to represent this number
const MAX_VARINT_BYTES: u8;
/// The maximum value of the last byte if the varint is MAX_VARINT_BYTES long such that the
/// varint would not overflow the target
const MAX_LAST_VARINT_BYTE: u8;
/// Converts a 128-bit vector to this number
///
/// Note: Despite operating on 128-bit SIMD vectors, these functions accept and return static
/// arrays due to a lack of optimization capability by the compiler when passing or returning
/// intrinsic vectors.
fn vector_to_num(res: [u8; 16]) -> Self;
fn scalar_to_num(x: u64) -> Self;
/// Cast from u32 to self
fn cast_u32(num: u32) -> Self;
/// Cast from u64 to self
fn cast_u64(num: u64) -> Self;
/// Splits this number into 7-bit segments for encoding
fn num_to_scalar_stage1(self) -> u64;
/// Splits this number into 7-bit segments for encoding
fn num_to_vector_stage1(self) -> [u8; 16];
/// ZigZag encodes this value
fn zigzag(from: Self::Signed) -> Self;
/// ZigZag decodes this value
fn unzigzag(self) -> Self::Signed;
}
impl VarIntTarget for u8 {
type Signed = i8;
const MAX_VARINT_BYTES: u8 = 2;
const MAX_LAST_VARINT_BYTE: u8 = 0b00000001;
#[inline(always)]
fn vector_to_num(res: [u8; 16]) -> Self {
let res: [u64; 2] = unsafe { core::mem::transmute(res) };
let x = res[0];
Self::scalar_to_num(x)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn scalar_to_num(x: u64) -> Self {
unsafe { _pext_u64(x, 0x000000000000017f) as u8 }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn scalar_to_num(x: u64) -> Self {
((x & 0x000000000000007f) | ((x & 0x0000000000000100) >> 1)) as u8
}
#[inline(always)]
fn cast_u32(num: u32) -> Self {
num as u8
}
#[inline(always)]
fn cast_u64(num: u64) -> Self {
num as u8
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
unsafe { _pdep_u64(x, 0x000000000000017f) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
(x & 0x000000000000007f) | ((x & 0x0000000000000080) << 1)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self as u64;
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
fn zigzag(from: Self::Signed) -> Self {
((from << 1) ^ (from >> 7)) as Self
}
#[inline(always)]
fn unzigzag(self) -> Self::Signed {
((self >> 1) ^ (-((self & 1) as i8)) as u8) as i8
}
}
impl VarIntTarget for u16 {
type Signed = i16;
const MAX_VARINT_BYTES: u8 = 3;
const MAX_LAST_VARINT_BYTE: u8 = 0b00000011;
#[inline(always)]
fn vector_to_num(res: [u8; 16]) -> Self {
let arr: [u64; 2] = unsafe { core::mem::transmute(res) };
let x = arr[0];
Self::scalar_to_num(x)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn scalar_to_num(x: u64) -> Self {
unsafe { _pext_u64(x, 0x0000000000037f7f) as u16 }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn scalar_to_num(x: u64) -> Self {
((x & 0x000000000000007f)
| ((x & 0x0000000000030000) >> 2)
| ((x & 0x0000000000007f00) >> 1)) as u16
}
#[inline(always)]
fn cast_u32(num: u32) -> Self {
num as u16
}
#[inline(always)]
fn cast_u64(num: u64) -> Self {
num as u16
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
unsafe { _pdep_u64(x, 0x0000000000037f7f) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
(x & 0x000000000000007f) | ((x & 0x0000000000003f80) << 1) | ((x & 0x000000000000c000) << 2)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self as u64;
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
fn zigzag(from: Self::Signed) -> Self {
((from << 1) ^ (from >> 15)) as Self
}
#[inline(always)]
fn unzigzag(self) -> Self::Signed {
((self >> 1) ^ (-((self & 1) as i16)) as u16) as i16
}
}
impl VarIntTarget for u32 {
type Signed = i32;
const MAX_VARINT_BYTES: u8 = 5;
const MAX_LAST_VARINT_BYTE: u8 = 0b00001111;
#[inline(always)]
fn vector_to_num(res: [u8; 16]) -> Self {
let arr: [u64; 2] = unsafe { core::mem::transmute(res) };
let x = arr[0];
Self::scalar_to_num(x)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn scalar_to_num(x: u64) -> Self {
unsafe { _pext_u64(x, 0x0000000f7f7f7f7f) as u32 }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn scalar_to_num(x: u64) -> Self {
((x & 0x000000000000007f)
| ((x & 0x0000000f00000000) >> 4)
| ((x & 0x000000007f000000) >> 3)
| ((x & 0x00000000007f0000) >> 2)
| ((x & 0x0000000000007f00) >> 1)) as u32
}
#[inline(always)]
fn cast_u32(num: u32) -> Self {
num
}
#[inline(always)]
fn cast_u64(num: u64) -> Self {
num as u32
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
unsafe { _pdep_u64(x, 0x0000000f7f7f7f7f) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_scalar_stage1(self) -> u64 {
let x = self as u64;
(x & 0x000000000000007f)
| ((x & 0x0000000000003f80) << 1)
| ((x & 0x00000000001fc000) << 2)
| ((x & 0x000000000fe00000) << 3)
| ((x & 0x00000000f0000000) << 4)
}
#[inline(always)]
#[cfg(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self as u64;
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
#[cfg(not(all(target_arch = "x86_64", target_feature = "bmi2", fast_pdep)))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
res[0] = self.num_to_scalar_stage1();
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
fn zigzag(from: Self::Signed) -> Self {
((from << 1) ^ (from >> 31)) as Self
}
#[inline(always)]
fn unzigzag(self) -> Self::Signed {
((self >> 1) ^ (-((self & 1) as i32)) as u32) as i32
}
}
impl VarIntTarget for u64 {
type Signed = i64;
const MAX_VARINT_BYTES: u8 = 10;
const MAX_LAST_VARINT_BYTE: u8 = 0b00000001;
fn scalar_to_num(_x: u64) -> Self {
unimplemented!("destination too wide")
}
#[inline(always)]
#[cfg(all(target_feature = "bmi2", fast_pdep))]
fn vector_to_num(res: [u8; 16]) -> Self {
let arr: [u64; 2] = unsafe { core::mem::transmute(res) };
let x = arr[0];
let y = arr[1];
let res = unsafe { _pext_u64(x, 0x7f7f7f7f7f7f7f7f) }
| (unsafe { _pext_u64(y, 0x000000000000017f) } << 56);
res
}
#[inline(always)]
#[cfg(all(target_feature = "bmi2", fast_pdep))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self;
res[0] = unsafe { _pdep_u64(x, 0x7f7f7f7f7f7f7f7f) };
res[1] = unsafe { _pdep_u64(x >> 56, 0x000000000000017f) };
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
#[cfg(all(target_feature = "avx2", not(all(target_feature = "bmi2", fast_pdep))))]
fn vector_to_num(res: [u8; 16]) -> Self {
let pt1 = unsafe {
let b = core::mem::transmute::<[u8; 16], __m128i>(res);
let c = _mm_broadcastq_epi64(b);
let d = _mm_or_si128(
_mm_or_si128(
_mm_srlv_epi64(
_mm_and_si128(c, _mm_set_epi64x(0x000000000000007f, 0x7f00000000000000)),
_mm_set_epi64x(0, 7),
),
_mm_srlv_epi64(
_mm_and_si128(c, _mm_set_epi64x(0x007f000000000000, 0x00007f0000000000)),
_mm_set_epi64x(6, 5),
),
),
_mm_or_si128(
_mm_srlv_epi64(
_mm_and_si128(c, _mm_set_epi64x(0x0000007f00000000, 0x000000007f000000)),
_mm_set_epi64x(4, 3),
),
_mm_srlv_epi64(
_mm_and_si128(c, _mm_set_epi64x(0x00000000007f0000, 0x0000000000007f00)),
_mm_set_epi64x(2, 1),
),
),
);
let e = _mm_or_si128(d, _mm_bsrli_si128(d, 8));
_mm_extract_epi64(e, 0) as u64
};
let arr: [u64; 2] = unsafe { core::mem::transmute(res) };
let y = arr[1];
// This incantation was generated with calcperm
pt1
// don't forget about bytes spilling to the other word
| ((y & 0x0000000000000100) << 55)
| ((y & 0x000000000000007f) << 56)
}
fn num_to_scalar_stage1(self) -> u64 {
panic!("source too wide")
}
#[inline(always)]
#[cfg(all(target_feature = "avx2", not(all(target_feature = "bmi2", fast_pdep))))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self;
let b = unsafe { _mm_set1_epi64x(self as i64) };
let c = unsafe {
_mm_or_si128(
_mm_or_si128(
_mm_sllv_epi64(
_mm_and_si128(b, _mm_set_epi64x(0x00000007f0000000, 0x000003f800000000)),
_mm_set_epi64x(4, 5),
),
_mm_sllv_epi64(
_mm_and_si128(b, _mm_set_epi64x(0x0001fc0000000000, 0x00fe000000000000)),
_mm_set_epi64x(6, 7),
),
),
_mm_or_si128(
_mm_sllv_epi64(
_mm_and_si128(b, _mm_set_epi64x(0x000000000000007f, 0x0000000000003f80)),
_mm_set_epi64x(0, 1),
),
_mm_sllv_epi64(
_mm_and_si128(b, _mm_set_epi64x(0x00000000001fc000, 0x000000000fe00000)),
_mm_set_epi64x(2, 3),
),
),
)
};
let d = unsafe { _mm_or_si128(c, _mm_bsrli_si128(c, 8)) };
res[0] = unsafe { _mm_extract_epi64(d, 0) as u64 };
res[1] = ((x & 0x7f00000000000000) >> 56) | ((x & 0x8000000000000000) >> 55);
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
#[cfg(not(any(target_feature = "avx2", all(target_feature = "bmi2", fast_pdep))))]
fn vector_to_num(res: [u8; 16]) -> Self {
let arr: [u64; 2] = unsafe { core::mem::transmute(res) };
let x = arr[0];
let y = arr[1];
// This incantation was generated with calcperm
(x & 0x000000000000007f)
| ((x & 0x7f00000000000000) >> 7)
| ((x & 0x007f000000000000) >> 6)
| ((x & 0x00007f0000000000) >> 5)
| ((x & 0x0000007f00000000) >> 4)
| ((x & 0x000000007f000000) >> 3)
| ((x & 0x00000000007f0000) >> 2)
| ((x & 0x0000000000007f00) >> 1)
// don't forget about bytes spilling to the other word
| ((y & 0x0000000000000100) << 55)
| ((y & 0x000000000000007f) << 56)
}
#[inline(always)]
#[cfg(not(any(target_feature = "avx2", all(target_feature = "bmi2", fast_pdep))))]
fn num_to_vector_stage1(self) -> [u8; 16] {
let mut res = [0u64; 2];
let x = self;
res[0] = (x & 0x000000000000007f)
| ((x & 0x0000000000003f80) << 1)
| ((x & 0x00000000001fc000) << 2)
| ((x & 0x000000000fe00000) << 3)
| ((x & 0x00000007f0000000) << 4)
| ((x & 0x000003f800000000) << 5)
| ((x & 0x0001fc0000000000) << 6)
| ((x & 0x00fe000000000000) << 7);
res[1] = ((x & 0x7f00000000000000) >> 56) | ((x & 0x8000000000000000) >> 55);
unsafe { core::mem::transmute(res) }
}
#[inline(always)]
fn cast_u32(num: u32) -> Self {
num as u64
}
#[inline(always)]
fn cast_u64(num: u64) -> Self {
num
}
#[inline(always)]
fn zigzag(from: Self::Signed) -> Self {
((from << 1) ^ (from >> 63)) as Self
}
#[inline(always)]
fn unzigzag(self) -> Self::Signed {
((self >> 1) ^ (-((self & 1) as i64)) as u64) as i64
}
}
/// Represents a signed scalar value that can be encoded to and decoded from a varint in ZigZag
/// format.
pub trait SignedVarIntTarget: Debug + Eq + PartialEq + Sized + Copy {
type Unsigned: VarIntTarget<Signed = Self>;
/// ZigZag encodes this value
#[inline(always)]
fn zigzag(from: Self) -> Self::Unsigned {
Self::Unsigned::zigzag(from)
}
/// ZigZag decodes this value
#[inline(always)]
fn unzigzag(from: Self::Unsigned) -> Self {
Self::Unsigned::unzigzag(from)
}
}
impl SignedVarIntTarget for i8 {
type Unsigned = u8;
}
impl SignedVarIntTarget for i16 {
type Unsigned = u16;
}
impl SignedVarIntTarget for i32 {
type Unsigned = u32;
}
impl SignedVarIntTarget for i64 {
type Unsigned = u64;
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/src/decode/lookup.rs | src/decode/lookup.rs | #[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
#[repr(align(16), C)]
struct Align128<T>(T);
pub static LOOKUP_DOUBLE_VEC: [__m128i; 90] = unsafe {
core::mem::transmute(Align128::<[u8; 1440]>([
0, 255, 255, 255, 255, 255, 255, 255, 1, 255, 255, 255, 255, 255, 255, 255, // 1, 1
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 255, 255, 255, 255, 255, 255, // 1, 2
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 255, 255, 255, 255, 255, // 1, 3
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 255, 255, 255, 255, // 1, 4
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 255, 255, 255, // 1, 5
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 6, 255, 255, // 1, 6
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 6, 7, 255, // 1, 7
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 6, 7, 8, // 1, 8
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 6, 7, 8, // 1, 9
0, 255, 255, 255, 255, 255, 255, 255, 1, 2, 3, 4, 5, 6, 7, 8, // 1, 10
0, 1, 255, 255, 255, 255, 255, 255, 2, 255, 255, 255, 255, 255, 255, 255, // 2, 1
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 255, 255, 255, 255, 255, 255, // 2, 2
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 255, 255, 255, 255, 255, // 2, 3
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 255, 255, 255, 255, // 2, 4
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 255, 255, 255, // 2, 5
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 7, 255, 255, // 2, 6
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 7, 8, 255, // 2, 7
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 7, 8, 9, // 2, 8
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 7, 8, 9, // 2, 9
0, 1, 255, 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, 7, 8, 9, // 2, 10
0, 1, 2, 255, 255, 255, 255, 255, 3, 255, 255, 255, 255, 255, 255, 255, // 3, 1
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 255, 255, 255, 255, 255, 255, // 3, 2
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 255, 255, 255, 255, 255, // 3, 3
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 255, 255, 255, 255, // 3, 4
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 255, 255, 255, // 3, 5
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 8, 255, 255, // 3, 6
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 8, 9, 255, // 3, 7
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 8, 9, 10, // 3, 8
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 8, 9, 10, // 3, 9
0, 1, 2, 255, 255, 255, 255, 255, 3, 4, 5, 6, 7, 8, 9, 10, // 3, 10
0, 1, 2, 3, 255, 255, 255, 255, 4, 255, 255, 255, 255, 255, 255, 255, // 4, 1
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 255, 255, 255, 255, 255, 255, // 4, 2
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 255, 255, 255, 255, 255, // 4, 3
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 255, 255, 255, 255, // 4, 4
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 255, 255, 255, // 4, 5
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 9, 255, 255, // 4, 6
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 9, 10, 255, // 4, 7
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 9, 10, 11, // 4, 8
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 9, 10, 11, // 4, 9
0, 1, 2, 3, 255, 255, 255, 255, 4, 5, 6, 7, 8, 9, 10, 11, // 4, 10
0, 1, 2, 3, 4, 255, 255, 255, 5, 255, 255, 255, 255, 255, 255, 255, // 5, 1
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 255, 255, 255, 255, 255, 255, // 5, 2
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 255, 255, 255, 255, 255, // 5, 3
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 255, 255, 255, 255, // 5, 4
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 255, 255, 255, // 5, 5
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 10, 255, 255, // 5, 6
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 10, 11, 255, // 5, 7
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 10, 11, 12, // 5, 8
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 10, 11, 12, // 5, 9
0, 1, 2, 3, 4, 255, 255, 255, 5, 6, 7, 8, 9, 10, 11, 12, // 5, 10
0, 1, 2, 3, 4, 5, 255, 255, 6, 255, 255, 255, 255, 255, 255, 255, // 6, 1
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 255, 255, 255, 255, 255, 255, // 6, 2
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 255, 255, 255, 255, 255, // 6, 3
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 255, 255, 255, 255, // 6, 4
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 255, 255, 255, // 6, 5
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 11, 255, 255, // 6, 6
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 11, 12, 255, // 6, 7
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 11, 12, 13, // 6, 8
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 11, 12, 13, // 6, 9
0, 1, 2, 3, 4, 5, 255, 255, 6, 7, 8, 9, 10, 11, 12, 13, // 6, 10
0, 1, 2, 3, 4, 5, 6, 255, 7, 255, 255, 255, 255, 255, 255, 255, // 7, 1
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 255, 255, 255, 255, 255, 255, // 7, 2
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 255, 255, 255, 255, 255, // 7, 3
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 255, 255, 255, 255, // 7, 4
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 11, 255, 255, 255, // 7, 5
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 11, 12, 255, 255, // 7, 6
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 11, 12, 13, 255, // 7, 7
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 11, 12, 13, 14, // 7, 8
0, 1, 2, 3, 4, 5, 6, 255, 7, 8, 9, 10, 11, 12, 13, 14, // 7, 9
0, 1, 2, 3, 4, 5, 6, 7, 8, 255, 255, 255, 255, 255, 255, 255, // 8, 1
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255, // 8, 2
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 255, 255, 255, 255, 255, // 8, 3
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 255, 255, 255, 255, // 8, 4
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 255, 255, 255, // 8, 5
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 255, 255, // 8, 6
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 255, // 8, 7
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, // 8, 8
0, 1, 2, 3, 4, 5, 6, 7, 9, 255, 255, 255, 255, 255, 255, 255, // 9, 1
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 255, 255, 255, 255, 255, 255, // 9, 2
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 255, 255, 255, 255, 255, // 9, 3
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 255, 255, 255, 255, // 9, 4
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 255, 255, 255, // 9, 5
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 255, 255, // 9, 6
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 255, // 9, 7
0, 1, 2, 3, 4, 5, 6, 7, 10, 255, 255, 255, 255, 255, 255, 255, // 10, 1
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 255, 255, 255, 255, 255, 255, // 10, 2
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 255, 255, 255, 255, 255, // 10, 3
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 255, 255, 255, 255, // 10, 4
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 255, 255, 255, // 10, 5
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 255, 255, // 10, 6
]))
};
pub static LOOKUP_DOUBLE_STEP1: [(u8, u8, u8); 1024] = [
(0, 1, 1), // 0b0000000000000000
(10, 2, 1), // 0b0000000000000001
(1, 1, 2), // 0b0000000000000010
(20, 3, 1), // 0b0000000000000011
(0, 1, 1), // 0b0000000000000100
(11, 2, 2), // 0b0000000000000101
(2, 1, 3), // 0b0000000000000110
(30, 4, 1), // 0b0000000000000111
(0, 1, 1), // 0b0000000000001000
(10, 2, 1), // 0b0000000000001001
(1, 1, 2), // 0b0000000000001010
(21, 3, 2), // 0b0000000000001011
(0, 1, 1), // 0b0000000000001100
(12, 2, 3), // 0b0000000000001101
(3, 1, 4), // 0b0000000000001110
(40, 5, 1), // 0b0000000000001111
(0, 1, 1), // 0b0000000000010000
(10, 2, 1), // 0b0000000000010001
(1, 1, 2), // 0b0000000000010010
(20, 3, 1), // 0b0000000000010011
(0, 1, 1), // 0b0000000000010100
(11, 2, 2), // 0b0000000000010101
(2, 1, 3), // 0b0000000000010110
(31, 4, 2), // 0b0000000000010111
(0, 1, 1), // 0b0000000000011000
(10, 2, 1), // 0b0000000000011001
(1, 1, 2), // 0b0000000000011010
(22, 3, 3), // 0b0000000000011011
(0, 1, 1), // 0b0000000000011100
(13, 2, 4), // 0b0000000000011101
(4, 1, 5), // 0b0000000000011110
(50, 6, 1), // 0b0000000000011111
(0, 1, 1), // 0b0000000000100000
(10, 2, 1), // 0b0000000000100001
(1, 1, 2), // 0b0000000000100010
(20, 3, 1), // 0b0000000000100011
(0, 1, 1), // 0b0000000000100100
(11, 2, 2), // 0b0000000000100101
(2, 1, 3), // 0b0000000000100110
(30, 4, 1), // 0b0000000000100111
(0, 1, 1), // 0b0000000000101000
(10, 2, 1), // 0b0000000000101001
(1, 1, 2), // 0b0000000000101010
(21, 3, 2), // 0b0000000000101011
(0, 1, 1), // 0b0000000000101100
(12, 2, 3), // 0b0000000000101101
(3, 1, 4), // 0b0000000000101110
(41, 5, 2), // 0b0000000000101111
(0, 1, 1), // 0b0000000000110000
(10, 2, 1), // 0b0000000000110001
(1, 1, 2), // 0b0000000000110010
(20, 3, 1), // 0b0000000000110011
(0, 1, 1), // 0b0000000000110100
(11, 2, 2), // 0b0000000000110101
(2, 1, 3), // 0b0000000000110110
(32, 4, 3), // 0b0000000000110111
(0, 1, 1), // 0b0000000000111000
(10, 2, 1), // 0b0000000000111001
(1, 1, 2), // 0b0000000000111010
(23, 3, 4), // 0b0000000000111011
(0, 1, 1), // 0b0000000000111100
(14, 2, 5), // 0b0000000000111101
(5, 1, 6), // 0b0000000000111110
(60, 7, 1), // 0b0000000000111111
(0, 1, 1), // 0b0000000001000000
(10, 2, 1), // 0b0000000001000001
(1, 1, 2), // 0b0000000001000010
(20, 3, 1), // 0b0000000001000011
(0, 1, 1), // 0b0000000001000100
(11, 2, 2), // 0b0000000001000101
(2, 1, 3), // 0b0000000001000110
(30, 4, 1), // 0b0000000001000111
(0, 1, 1), // 0b0000000001001000
(10, 2, 1), // 0b0000000001001001
(1, 1, 2), // 0b0000000001001010
(21, 3, 2), // 0b0000000001001011
(0, 1, 1), // 0b0000000001001100
(12, 2, 3), // 0b0000000001001101
(3, 1, 4), // 0b0000000001001110
(40, 5, 1), // 0b0000000001001111
(0, 1, 1), // 0b0000000001010000
(10, 2, 1), // 0b0000000001010001
(1, 1, 2), // 0b0000000001010010
(20, 3, 1), // 0b0000000001010011
(0, 1, 1), // 0b0000000001010100
(11, 2, 2), // 0b0000000001010101
(2, 1, 3), // 0b0000000001010110
(31, 4, 2), // 0b0000000001010111
(0, 1, 1), // 0b0000000001011000
(10, 2, 1), // 0b0000000001011001
(1, 1, 2), // 0b0000000001011010
(22, 3, 3), // 0b0000000001011011
(0, 1, 1), // 0b0000000001011100
(13, 2, 4), // 0b0000000001011101
(4, 1, 5), // 0b0000000001011110
(51, 6, 2), // 0b0000000001011111
(0, 1, 1), // 0b0000000001100000
(10, 2, 1), // 0b0000000001100001
(1, 1, 2), // 0b0000000001100010
(20, 3, 1), // 0b0000000001100011
(0, 1, 1), // 0b0000000001100100
(11, 2, 2), // 0b0000000001100101
(2, 1, 3), // 0b0000000001100110
(30, 4, 1), // 0b0000000001100111
(0, 1, 1), // 0b0000000001101000
(10, 2, 1), // 0b0000000001101001
(1, 1, 2), // 0b0000000001101010
(21, 3, 2), // 0b0000000001101011
(0, 1, 1), // 0b0000000001101100
(12, 2, 3), // 0b0000000001101101
(3, 1, 4), // 0b0000000001101110
(42, 5, 3), // 0b0000000001101111
(0, 1, 1), // 0b0000000001110000
(10, 2, 1), // 0b0000000001110001
(1, 1, 2), // 0b0000000001110010
(20, 3, 1), // 0b0000000001110011
(0, 1, 1), // 0b0000000001110100
(11, 2, 2), // 0b0000000001110101
(2, 1, 3), // 0b0000000001110110
(33, 4, 4), // 0b0000000001110111
(0, 1, 1), // 0b0000000001111000
(10, 2, 1), // 0b0000000001111001
(1, 1, 2), // 0b0000000001111010
(24, 3, 5), // 0b0000000001111011
(0, 1, 1), // 0b0000000001111100
(15, 2, 6), // 0b0000000001111101
(6, 1, 7), // 0b0000000001111110
(69, 8, 1), // 0b0000000001111111
(0, 1, 1), // 0b0000000010000000
(10, 2, 1), // 0b0000000010000001
(1, 1, 2), // 0b0000000010000010
(20, 3, 1), // 0b0000000010000011
(0, 1, 1), // 0b0000000010000100
(11, 2, 2), // 0b0000000010000101
(2, 1, 3), // 0b0000000010000110
(30, 4, 1), // 0b0000000010000111
(0, 1, 1), // 0b0000000010001000
(10, 2, 1), // 0b0000000010001001
(1, 1, 2), // 0b0000000010001010
(21, 3, 2), // 0b0000000010001011
(0, 1, 1), // 0b0000000010001100
(12, 2, 3), // 0b0000000010001101
(3, 1, 4), // 0b0000000010001110
(40, 5, 1), // 0b0000000010001111
(0, 1, 1), // 0b0000000010010000
(10, 2, 1), // 0b0000000010010001
(1, 1, 2), // 0b0000000010010010
(20, 3, 1), // 0b0000000010010011
(0, 1, 1), // 0b0000000010010100
(11, 2, 2), // 0b0000000010010101
(2, 1, 3), // 0b0000000010010110
(31, 4, 2), // 0b0000000010010111
(0, 1, 1), // 0b0000000010011000
(10, 2, 1), // 0b0000000010011001
(1, 1, 2), // 0b0000000010011010
(22, 3, 3), // 0b0000000010011011
(0, 1, 1), // 0b0000000010011100
(13, 2, 4), // 0b0000000010011101
(4, 1, 5), // 0b0000000010011110
(50, 6, 1), // 0b0000000010011111
(0, 1, 1), // 0b0000000010100000
(10, 2, 1), // 0b0000000010100001
(1, 1, 2), // 0b0000000010100010
(20, 3, 1), // 0b0000000010100011
(0, 1, 1), // 0b0000000010100100
(11, 2, 2), // 0b0000000010100101
(2, 1, 3), // 0b0000000010100110
(30, 4, 1), // 0b0000000010100111
(0, 1, 1), // 0b0000000010101000
(10, 2, 1), // 0b0000000010101001
(1, 1, 2), // 0b0000000010101010
(21, 3, 2), // 0b0000000010101011
(0, 1, 1), // 0b0000000010101100
(12, 2, 3), // 0b0000000010101101
(3, 1, 4), // 0b0000000010101110
(41, 5, 2), // 0b0000000010101111
(0, 1, 1), // 0b0000000010110000
(10, 2, 1), // 0b0000000010110001
(1, 1, 2), // 0b0000000010110010
(20, 3, 1), // 0b0000000010110011
(0, 1, 1), // 0b0000000010110100
(11, 2, 2), // 0b0000000010110101
(2, 1, 3), // 0b0000000010110110
(32, 4, 3), // 0b0000000010110111
(0, 1, 1), // 0b0000000010111000
(10, 2, 1), // 0b0000000010111001
(1, 1, 2), // 0b0000000010111010
(23, 3, 4), // 0b0000000010111011
(0, 1, 1), // 0b0000000010111100
(14, 2, 5), // 0b0000000010111101
(5, 1, 6), // 0b0000000010111110
(61, 7, 2), // 0b0000000010111111
(0, 1, 1), // 0b0000000011000000
(10, 2, 1), // 0b0000000011000001
(1, 1, 2), // 0b0000000011000010
(20, 3, 1), // 0b0000000011000011
(0, 1, 1), // 0b0000000011000100
(11, 2, 2), // 0b0000000011000101
(2, 1, 3), // 0b0000000011000110
(30, 4, 1), // 0b0000000011000111
(0, 1, 1), // 0b0000000011001000
(10, 2, 1), // 0b0000000011001001
(1, 1, 2), // 0b0000000011001010
(21, 3, 2), // 0b0000000011001011
(0, 1, 1), // 0b0000000011001100
(12, 2, 3), // 0b0000000011001101
(3, 1, 4), // 0b0000000011001110
(40, 5, 1), // 0b0000000011001111
(0, 1, 1), // 0b0000000011010000
(10, 2, 1), // 0b0000000011010001
(1, 1, 2), // 0b0000000011010010
(20, 3, 1), // 0b0000000011010011
(0, 1, 1), // 0b0000000011010100
(11, 2, 2), // 0b0000000011010101
(2, 1, 3), // 0b0000000011010110
(31, 4, 2), // 0b0000000011010111
(0, 1, 1), // 0b0000000011011000
(10, 2, 1), // 0b0000000011011001
(1, 1, 2), // 0b0000000011011010
(22, 3, 3), // 0b0000000011011011
(0, 1, 1), // 0b0000000011011100
(13, 2, 4), // 0b0000000011011101
(4, 1, 5), // 0b0000000011011110
(52, 6, 3), // 0b0000000011011111
(0, 1, 1), // 0b0000000011100000
(10, 2, 1), // 0b0000000011100001
(1, 1, 2), // 0b0000000011100010
(20, 3, 1), // 0b0000000011100011
(0, 1, 1), // 0b0000000011100100
(11, 2, 2), // 0b0000000011100101
(2, 1, 3), // 0b0000000011100110
(30, 4, 1), // 0b0000000011100111
(0, 1, 1), // 0b0000000011101000
(10, 2, 1), // 0b0000000011101001
(1, 1, 2), // 0b0000000011101010
(21, 3, 2), // 0b0000000011101011
(0, 1, 1), // 0b0000000011101100
(12, 2, 3), // 0b0000000011101101
(3, 1, 4), // 0b0000000011101110
(43, 5, 4), // 0b0000000011101111
(0, 1, 1), // 0b0000000011110000
(10, 2, 1), // 0b0000000011110001
(1, 1, 2), // 0b0000000011110010
(20, 3, 1), // 0b0000000011110011
(0, 1, 1), // 0b0000000011110100
(11, 2, 2), // 0b0000000011110101
(2, 1, 3), // 0b0000000011110110
(34, 4, 5), // 0b0000000011110111
(0, 1, 1), // 0b0000000011111000
(10, 2, 1), // 0b0000000011111001
(1, 1, 2), // 0b0000000011111010
(25, 3, 6), // 0b0000000011111011
(0, 1, 1), // 0b0000000011111100
(16, 2, 7), // 0b0000000011111101
(7, 1, 8), // 0b0000000011111110
(77, 9, 1), // 0b0000000011111111
(0, 1, 1), // 0b0000000100000000
(10, 2, 1), // 0b0000000100000001
(1, 1, 2), // 0b0000000100000010
(20, 3, 1), // 0b0000000100000011
(0, 1, 1), // 0b0000000100000100
(11, 2, 2), // 0b0000000100000101
(2, 1, 3), // 0b0000000100000110
(30, 4, 1), // 0b0000000100000111
(0, 1, 1), // 0b0000000100001000
(10, 2, 1), // 0b0000000100001001
(1, 1, 2), // 0b0000000100001010
(21, 3, 2), // 0b0000000100001011
(0, 1, 1), // 0b0000000100001100
(12, 2, 3), // 0b0000000100001101
(3, 1, 4), // 0b0000000100001110
(40, 5, 1), // 0b0000000100001111
(0, 1, 1), // 0b0000000100010000
(10, 2, 1), // 0b0000000100010001
(1, 1, 2), // 0b0000000100010010
(20, 3, 1), // 0b0000000100010011
(0, 1, 1), // 0b0000000100010100
(11, 2, 2), // 0b0000000100010101
(2, 1, 3), // 0b0000000100010110
(31, 4, 2), // 0b0000000100010111
(0, 1, 1), // 0b0000000100011000
(10, 2, 1), // 0b0000000100011001
(1, 1, 2), // 0b0000000100011010
(22, 3, 3), // 0b0000000100011011
(0, 1, 1), // 0b0000000100011100
(13, 2, 4), // 0b0000000100011101
(4, 1, 5), // 0b0000000100011110
(50, 6, 1), // 0b0000000100011111
(0, 1, 1), // 0b0000000100100000
(10, 2, 1), // 0b0000000100100001
(1, 1, 2), // 0b0000000100100010
(20, 3, 1), // 0b0000000100100011
(0, 1, 1), // 0b0000000100100100
(11, 2, 2), // 0b0000000100100101
(2, 1, 3), // 0b0000000100100110
(30, 4, 1), // 0b0000000100100111
(0, 1, 1), // 0b0000000100101000
(10, 2, 1), // 0b0000000100101001
(1, 1, 2), // 0b0000000100101010
(21, 3, 2), // 0b0000000100101011
(0, 1, 1), // 0b0000000100101100
(12, 2, 3), // 0b0000000100101101
(3, 1, 4), // 0b0000000100101110
(41, 5, 2), // 0b0000000100101111
(0, 1, 1), // 0b0000000100110000
(10, 2, 1), // 0b0000000100110001
(1, 1, 2), // 0b0000000100110010
(20, 3, 1), // 0b0000000100110011
(0, 1, 1), // 0b0000000100110100
(11, 2, 2), // 0b0000000100110101
(2, 1, 3), // 0b0000000100110110
(32, 4, 3), // 0b0000000100110111
(0, 1, 1), // 0b0000000100111000
(10, 2, 1), // 0b0000000100111001
(1, 1, 2), // 0b0000000100111010
(23, 3, 4), // 0b0000000100111011
(0, 1, 1), // 0b0000000100111100
(14, 2, 5), // 0b0000000100111101
(5, 1, 6), // 0b0000000100111110
(60, 7, 1), // 0b0000000100111111
(0, 1, 1), // 0b0000000101000000
(10, 2, 1), // 0b0000000101000001
(1, 1, 2), // 0b0000000101000010
(20, 3, 1), // 0b0000000101000011
(0, 1, 1), // 0b0000000101000100
(11, 2, 2), // 0b0000000101000101
(2, 1, 3), // 0b0000000101000110
(30, 4, 1), // 0b0000000101000111
(0, 1, 1), // 0b0000000101001000
(10, 2, 1), // 0b0000000101001001
(1, 1, 2), // 0b0000000101001010
(21, 3, 2), // 0b0000000101001011
(0, 1, 1), // 0b0000000101001100
(12, 2, 3), // 0b0000000101001101
(3, 1, 4), // 0b0000000101001110
(40, 5, 1), // 0b0000000101001111
(0, 1, 1), // 0b0000000101010000
(10, 2, 1), // 0b0000000101010001
(1, 1, 2), // 0b0000000101010010
(20, 3, 1), // 0b0000000101010011
(0, 1, 1), // 0b0000000101010100
(11, 2, 2), // 0b0000000101010101
(2, 1, 3), // 0b0000000101010110
(31, 4, 2), // 0b0000000101010111
(0, 1, 1), // 0b0000000101011000
(10, 2, 1), // 0b0000000101011001
(1, 1, 2), // 0b0000000101011010
(22, 3, 3), // 0b0000000101011011
(0, 1, 1), // 0b0000000101011100
(13, 2, 4), // 0b0000000101011101
(4, 1, 5), // 0b0000000101011110
(51, 6, 2), // 0b0000000101011111
(0, 1, 1), // 0b0000000101100000
(10, 2, 1), // 0b0000000101100001
(1, 1, 2), // 0b0000000101100010
(20, 3, 1), // 0b0000000101100011
(0, 1, 1), // 0b0000000101100100
(11, 2, 2), // 0b0000000101100101
(2, 1, 3), // 0b0000000101100110
(30, 4, 1), // 0b0000000101100111
(0, 1, 1), // 0b0000000101101000
(10, 2, 1), // 0b0000000101101001
(1, 1, 2), // 0b0000000101101010
(21, 3, 2), // 0b0000000101101011
(0, 1, 1), // 0b0000000101101100
(12, 2, 3), // 0b0000000101101101
(3, 1, 4), // 0b0000000101101110
(42, 5, 3), // 0b0000000101101111
(0, 1, 1), // 0b0000000101110000
(10, 2, 1), // 0b0000000101110001
(1, 1, 2), // 0b0000000101110010
(20, 3, 1), // 0b0000000101110011
(0, 1, 1), // 0b0000000101110100
(11, 2, 2), // 0b0000000101110101
(2, 1, 3), // 0b0000000101110110
(33, 4, 4), // 0b0000000101110111
(0, 1, 1), // 0b0000000101111000
(10, 2, 1), // 0b0000000101111001
(1, 1, 2), // 0b0000000101111010
(24, 3, 5), // 0b0000000101111011
(0, 1, 1), // 0b0000000101111100
(15, 2, 6), // 0b0000000101111101
(6, 1, 7), // 0b0000000101111110
(70, 8, 2), // 0b0000000101111111
(0, 1, 1), // 0b0000000110000000
(10, 2, 1), // 0b0000000110000001
(1, 1, 2), // 0b0000000110000010
(20, 3, 1), // 0b0000000110000011
(0, 1, 1), // 0b0000000110000100
(11, 2, 2), // 0b0000000110000101
(2, 1, 3), // 0b0000000110000110
(30, 4, 1), // 0b0000000110000111
(0, 1, 1), // 0b0000000110001000
(10, 2, 1), // 0b0000000110001001
(1, 1, 2), // 0b0000000110001010
(21, 3, 2), // 0b0000000110001011
(0, 1, 1), // 0b0000000110001100
(12, 2, 3), // 0b0000000110001101
(3, 1, 4), // 0b0000000110001110
(40, 5, 1), // 0b0000000110001111
(0, 1, 1), // 0b0000000110010000
(10, 2, 1), // 0b0000000110010001
(1, 1, 2), // 0b0000000110010010
(20, 3, 1), // 0b0000000110010011
(0, 1, 1), // 0b0000000110010100
(11, 2, 2), // 0b0000000110010101
(2, 1, 3), // 0b0000000110010110
(31, 4, 2), // 0b0000000110010111
(0, 1, 1), // 0b0000000110011000
(10, 2, 1), // 0b0000000110011001
(1, 1, 2), // 0b0000000110011010
(22, 3, 3), // 0b0000000110011011
(0, 1, 1), // 0b0000000110011100
(13, 2, 4), // 0b0000000110011101
(4, 1, 5), // 0b0000000110011110
(50, 6, 1), // 0b0000000110011111
(0, 1, 1), // 0b0000000110100000
(10, 2, 1), // 0b0000000110100001
(1, 1, 2), // 0b0000000110100010
(20, 3, 1), // 0b0000000110100011
(0, 1, 1), // 0b0000000110100100
(11, 2, 2), // 0b0000000110100101
(2, 1, 3), // 0b0000000110100110
(30, 4, 1), // 0b0000000110100111
(0, 1, 1), // 0b0000000110101000
(10, 2, 1), // 0b0000000110101001
(1, 1, 2), // 0b0000000110101010
(21, 3, 2), // 0b0000000110101011
(0, 1, 1), // 0b0000000110101100
(12, 2, 3), // 0b0000000110101101
(3, 1, 4), // 0b0000000110101110
(41, 5, 2), // 0b0000000110101111
(0, 1, 1), // 0b0000000110110000
(10, 2, 1), // 0b0000000110110001
(1, 1, 2), // 0b0000000110110010
(20, 3, 1), // 0b0000000110110011
(0, 1, 1), // 0b0000000110110100
(11, 2, 2), // 0b0000000110110101
(2, 1, 3), // 0b0000000110110110
(32, 4, 3), // 0b0000000110110111
(0, 1, 1), // 0b0000000110111000
(10, 2, 1), // 0b0000000110111001
(1, 1, 2), // 0b0000000110111010
(23, 3, 4), // 0b0000000110111011
(0, 1, 1), // 0b0000000110111100
(14, 2, 5), // 0b0000000110111101
(5, 1, 6), // 0b0000000110111110
(62, 7, 3), // 0b0000000110111111
(0, 1, 1), // 0b0000000111000000
(10, 2, 1), // 0b0000000111000001
(1, 1, 2), // 0b0000000111000010
(20, 3, 1), // 0b0000000111000011
(0, 1, 1), // 0b0000000111000100
(11, 2, 2), // 0b0000000111000101
(2, 1, 3), // 0b0000000111000110
(30, 4, 1), // 0b0000000111000111
(0, 1, 1), // 0b0000000111001000
(10, 2, 1), // 0b0000000111001001
(1, 1, 2), // 0b0000000111001010
(21, 3, 2), // 0b0000000111001011
(0, 1, 1), // 0b0000000111001100
(12, 2, 3), // 0b0000000111001101
(3, 1, 4), // 0b0000000111001110
(40, 5, 1), // 0b0000000111001111
(0, 1, 1), // 0b0000000111010000
(10, 2, 1), // 0b0000000111010001
(1, 1, 2), // 0b0000000111010010
(20, 3, 1), // 0b0000000111010011
(0, 1, 1), // 0b0000000111010100
(11, 2, 2), // 0b0000000111010101
(2, 1, 3), // 0b0000000111010110
(31, 4, 2), // 0b0000000111010111
(0, 1, 1), // 0b0000000111011000
(10, 2, 1), // 0b0000000111011001
(1, 1, 2), // 0b0000000111011010
(22, 3, 3), // 0b0000000111011011
(0, 1, 1), // 0b0000000111011100
(13, 2, 4), // 0b0000000111011101
(4, 1, 5), // 0b0000000111011110
(53, 6, 4), // 0b0000000111011111
(0, 1, 1), // 0b0000000111100000
(10, 2, 1), // 0b0000000111100001
(1, 1, 2), // 0b0000000111100010
(20, 3, 1), // 0b0000000111100011
(0, 1, 1), // 0b0000000111100100
(11, 2, 2), // 0b0000000111100101
(2, 1, 3), // 0b0000000111100110
(30, 4, 1), // 0b0000000111100111
(0, 1, 1), // 0b0000000111101000
(10, 2, 1), // 0b0000000111101001
(1, 1, 2), // 0b0000000111101010
(21, 3, 2), // 0b0000000111101011
(0, 1, 1), // 0b0000000111101100
(12, 2, 3), // 0b0000000111101101
(3, 1, 4), // 0b0000000111101110
(44, 5, 5), // 0b0000000111101111
(0, 1, 1), // 0b0000000111110000
(10, 2, 1), // 0b0000000111110001
(1, 1, 2), // 0b0000000111110010
(20, 3, 1), // 0b0000000111110011
(0, 1, 1), // 0b0000000111110100
(11, 2, 2), // 0b0000000111110101
(2, 1, 3), // 0b0000000111110110
(35, 4, 6), // 0b0000000111110111
(0, 1, 1), // 0b0000000111111000
(10, 2, 1), // 0b0000000111111001
(1, 1, 2), // 0b0000000111111010
(26, 3, 7), // 0b0000000111111011
(0, 1, 1), // 0b0000000111111100
(17, 2, 8), // 0b0000000111111101
(8, 1, 9), // 0b0000000111111110
(84, 10, 1), // 0b0000000111111111
(0, 1, 1), // 0b0000001000000000
(10, 2, 1), // 0b0000001000000001
(1, 1, 2), // 0b0000001000000010
(20, 3, 1), // 0b0000001000000011
(0, 1, 1), // 0b0000001000000100
(11, 2, 2), // 0b0000001000000101
(2, 1, 3), // 0b0000001000000110
(30, 4, 1), // 0b0000001000000111
(0, 1, 1), // 0b0000001000001000
(10, 2, 1), // 0b0000001000001001
(1, 1, 2), // 0b0000001000001010
(21, 3, 2), // 0b0000001000001011
(0, 1, 1), // 0b0000001000001100
(12, 2, 3), // 0b0000001000001101
(3, 1, 4), // 0b0000001000001110
(40, 5, 1), // 0b0000001000001111
(0, 1, 1), // 0b0000001000010000
(10, 2, 1), // 0b0000001000010001
(1, 1, 2), // 0b0000001000010010
(20, 3, 1), // 0b0000001000010011
(0, 1, 1), // 0b0000001000010100
(11, 2, 2), // 0b0000001000010101
(2, 1, 3), // 0b0000001000010110
(31, 4, 2), // 0b0000001000010111
(0, 1, 1), // 0b0000001000011000
(10, 2, 1), // 0b0000001000011001
(1, 1, 2), // 0b0000001000011010
(22, 3, 3), // 0b0000001000011011
(0, 1, 1), // 0b0000001000011100
(13, 2, 4), // 0b0000001000011101
(4, 1, 5), // 0b0000001000011110
(50, 6, 1), // 0b0000001000011111
(0, 1, 1), // 0b0000001000100000
(10, 2, 1), // 0b0000001000100001
(1, 1, 2), // 0b0000001000100010
(20, 3, 1), // 0b0000001000100011
(0, 1, 1), // 0b0000001000100100
(11, 2, 2), // 0b0000001000100101
(2, 1, 3), // 0b0000001000100110
(30, 4, 1), // 0b0000001000100111
(0, 1, 1), // 0b0000001000101000
(10, 2, 1), // 0b0000001000101001
(1, 1, 2), // 0b0000001000101010
(21, 3, 2), // 0b0000001000101011
(0, 1, 1), // 0b0000001000101100
(12, 2, 3), // 0b0000001000101101
(3, 1, 4), // 0b0000001000101110
(41, 5, 2), // 0b0000001000101111
(0, 1, 1), // 0b0000001000110000
(10, 2, 1), // 0b0000001000110001
(1, 1, 2), // 0b0000001000110010
(20, 3, 1), // 0b0000001000110011
(0, 1, 1), // 0b0000001000110100
(11, 2, 2), // 0b0000001000110101
(2, 1, 3), // 0b0000001000110110
(32, 4, 3), // 0b0000001000110111
(0, 1, 1), // 0b0000001000111000
(10, 2, 1), // 0b0000001000111001
(1, 1, 2), // 0b0000001000111010
(23, 3, 4), // 0b0000001000111011
(0, 1, 1), // 0b0000001000111100
(14, 2, 5), // 0b0000001000111101
(5, 1, 6), // 0b0000001000111110
(60, 7, 1), // 0b0000001000111111
(0, 1, 1), // 0b0000001001000000
(10, 2, 1), // 0b0000001001000001
(1, 1, 2), // 0b0000001001000010
(20, 3, 1), // 0b0000001001000011
(0, 1, 1), // 0b0000001001000100
(11, 2, 2), // 0b0000001001000101
(2, 1, 3), // 0b0000001001000110
(30, 4, 1), // 0b0000001001000111
(0, 1, 1), // 0b0000001001001000
(10, 2, 1), // 0b0000001001001001
(1, 1, 2), // 0b0000001001001010
(21, 3, 2), // 0b0000001001001011
(0, 1, 1), // 0b0000001001001100
(12, 2, 3), // 0b0000001001001101
(3, 1, 4), // 0b0000001001001110
(40, 5, 1), // 0b0000001001001111
(0, 1, 1), // 0b0000001001010000
(10, 2, 1), // 0b0000001001010001
(1, 1, 2), // 0b0000001001010010
(20, 3, 1), // 0b0000001001010011
(0, 1, 1), // 0b0000001001010100
(11, 2, 2), // 0b0000001001010101
(2, 1, 3), // 0b0000001001010110
(31, 4, 2), // 0b0000001001010111
(0, 1, 1), // 0b0000001001011000
(10, 2, 1), // 0b0000001001011001
(1, 1, 2), // 0b0000001001011010
(22, 3, 3), // 0b0000001001011011
(0, 1, 1), // 0b0000001001011100
(13, 2, 4), // 0b0000001001011101
(4, 1, 5), // 0b0000001001011110
(51, 6, 2), // 0b0000001001011111
(0, 1, 1), // 0b0000001001100000
(10, 2, 1), // 0b0000001001100001
(1, 1, 2), // 0b0000001001100010
(20, 3, 1), // 0b0000001001100011
(0, 1, 1), // 0b0000001001100100
(11, 2, 2), // 0b0000001001100101
(2, 1, 3), // 0b0000001001100110
(30, 4, 1), // 0b0000001001100111
(0, 1, 1), // 0b0000001001101000
(10, 2, 1), // 0b0000001001101001
(1, 1, 2), // 0b0000001001101010
(21, 3, 2), // 0b0000001001101011
(0, 1, 1), // 0b0000001001101100
(12, 2, 3), // 0b0000001001101101
(3, 1, 4), // 0b0000001001101110
(42, 5, 3), // 0b0000001001101111
(0, 1, 1), // 0b0000001001110000
(10, 2, 1), // 0b0000001001110001
(1, 1, 2), // 0b0000001001110010
(20, 3, 1), // 0b0000001001110011
(0, 1, 1), // 0b0000001001110100
(11, 2, 2), // 0b0000001001110101
(2, 1, 3), // 0b0000001001110110
(33, 4, 4), // 0b0000001001110111
(0, 1, 1), // 0b0000001001111000
(10, 2, 1), // 0b0000001001111001
(1, 1, 2), // 0b0000001001111010
(24, 3, 5), // 0b0000001001111011
(0, 1, 1), // 0b0000001001111100
(15, 2, 6), // 0b0000001001111101
(6, 1, 7), // 0b0000001001111110
(69, 8, 1), // 0b0000001001111111
(0, 1, 1), // 0b0000001010000000
(10, 2, 1), // 0b0000001010000001
(1, 1, 2), // 0b0000001010000010
(20, 3, 1), // 0b0000001010000011
(0, 1, 1), // 0b0000001010000100
(11, 2, 2), // 0b0000001010000101
(2, 1, 3), // 0b0000001010000110
(30, 4, 1), // 0b0000001010000111
(0, 1, 1), // 0b0000001010001000
(10, 2, 1), // 0b0000001010001001
(1, 1, 2), // 0b0000001010001010
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | true |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/src/decode/mod.rs | src/decode/mod.rs | #[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
use core::cmp::min;
use crate::num::{SignedVarIntTarget, VarIntTarget};
use crate::VarIntDecodeError;
mod lookup;
/// Decodes a single varint from the input slice.
///
/// Produces a tuple containing the decoded number and the number of bytes read. For best
/// performance, provide a slice at least 16 bytes in length, or use the unsafe version directly.
///
/// # Examples
/// ```
/// use varint_simd::{decode, VarIntDecodeError};
///
/// fn main() -> Result<(), VarIntDecodeError> {
/// let decoded = decode::<u32>(&[185, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])?;
/// assert_eq!(decoded, (1337, 2));
/// Ok(())
/// }
/// ```
#[inline]
pub fn decode<T: VarIntTarget>(bytes: &[u8]) -> Result<(T, usize), VarIntDecodeError> {
let result = if bytes.len() >= 16 {
unsafe { decode_unsafe(bytes.as_ptr()) }
} else if !bytes.is_empty() {
let mut data = [0u8; 16];
let len = min(16, bytes.len());
// unsafe { core::ptr::copy_nonoverlapping(bytes.as_ptr(), data.as_mut_ptr(), len); }
data[..len].copy_from_slice(&bytes[..len]);
unsafe { decode_unsafe(data.as_ptr()) }
} else {
return Err(VarIntDecodeError::NotEnoughBytes);
};
// The ordering of conditions here is weird because of a performance regression (?) in rustc 1.49
if bytes.len() >= T::MAX_VARINT_BYTES as usize
// we perform a signed comparison here because a valid last byte is always positive
&& unsafe { *bytes.get_unchecked((T::MAX_VARINT_BYTES - 1) as usize) } > T::MAX_LAST_VARINT_BYTE
&& result.1 == T::MAX_VARINT_BYTES as usize
|| result.1 > T::MAX_VARINT_BYTES as usize
{
Err(VarIntDecodeError::Overflow)
} else if result.1 > bytes.len() {
Err(VarIntDecodeError::NotEnoughBytes)
} else {
Ok(result)
}
}
/// Decodes only the length of a single variant from the input slice.
///
/// # Examples
/// ```
/// use varint_simd::{decode_len, VarIntDecodeError};
///
/// fn main() -> Result<(), VarIntDecodeError> {
/// let decoded = decode_len::<u32>(&[185, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])?;
/// assert_eq!(decoded, 2);
/// Ok(())
/// }
/// ```
#[inline]
pub fn decode_len<T: VarIntTarget>(bytes: &[u8]) -> Result<usize, VarIntDecodeError> {
let result = if bytes.len() >= 16 {
unsafe { decode_len_unsafe::<T>(bytes.as_ptr()) }
} else if !bytes.is_empty() {
let mut data = [0u8; 16];
let len = min(16, bytes.len());
// unsafe { core::ptr::copy_nonoverlapping(bytes.as_ptr(), data.as_mut_ptr(), len); }
data[..len].copy_from_slice(&bytes[..len]);
unsafe { decode_len_unsafe::<T>(data.as_ptr()) }
} else {
return Err(VarIntDecodeError::NotEnoughBytes);
};
Ok(result)
}
/// Convenience function for decoding a single varint in ZigZag format from the input slice.
/// See also: [`decode`]
///
/// # Examples
/// ```
/// use varint_simd::{decode_zigzag, VarIntDecodeError};
///
/// fn main() -> Result<(), VarIntDecodeError> {
/// let decoded = decode_zigzag::<i32>(&[39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])?;
/// assert_eq!(decoded, (-20, 1));
/// Ok(())
/// }
/// ```
#[inline]
pub fn decode_zigzag<T: SignedVarIntTarget>(bytes: &[u8]) -> Result<(T, usize), VarIntDecodeError> {
decode::<T::Unsigned>(bytes).map(|r| (r.0.unzigzag(), r.1))
}
/// Decodes the length of the next integer
///
/// # Safety
/// Same as `decode_unsafe`
#[inline]
pub unsafe fn decode_len_unsafe<T: VarIntTarget>(bytes: *const u8) -> usize {
if T::MAX_VARINT_BYTES <= 5 {
let b = bytes.cast::<u64>().read_unaligned();
let msbs = !b & !0x7f7f7f7f7f7f7f7f;
let len = msbs.trailing_zeros() + 1; // in bits
(len / 8) as usize
} else {
let b0 = bytes.cast::<u64>().read_unaligned();
let b1 = bytes.cast::<u64>().add(1).read_unaligned();
let msbs0 = !b0 & !0x7f7f7f7f7f7f7f7f;
let msbs1 = !b1 & !0x7f7f7f7f7f7f7f7f;
let len0 = msbs0.trailing_zeros() + 1;
let len1 = msbs1.trailing_zeros() + 1;
let len = if msbs0 == 0 { len1 + 64 } else { len0 };
len as usize / 8
}
}
/// Decodes a single varint from the input pointer. Returns a tuple containing the decoded number
/// and the number of bytes read.
///
/// # Safety
/// There must be at least 16 bytes of allocated memory after the beginning of the pointer.
/// Otherwise, there may be undefined behavior. Any data after the end of the varint are ignored.
/// A truncated value will be returned if the varint represents a number too large for the target
/// type.
///
/// You may prefer to use this unsafe interface if you know what you are doing and need a little
/// extra performance.
#[inline]
pub unsafe fn decode_unsafe<T: VarIntTarget>(bytes: *const u8) -> (T, usize) {
// It looks like you're trying to understand what this code does. You should probably read
// this first: https://developers.google.com/protocol-buffers/docs/encoding#varints
if T::MAX_VARINT_BYTES <= 5 {
// we can do everything in a normal 64-bit register
let b = bytes.cast::<u64>().read_unaligned();
// println!("{:#066b} b", b);
// println!("{:#066b} op", !0x7f7f7f7f7f7f7f7fu64);
let msbs = !b & !0x7f7f7f7f7f7f7f7f;
// println!("{:#066b} msbs", msbs);
/*
TODO: theoretically, we could delay the `+1` and/or do it in parallel with other parts, but
moving it downwards absolutely tanks performance and I have no idea why
*/
let len = msbs.trailing_zeros() + 1; // in bits
// println!("{}", len);
// b & blsmsk(msbs)
let varint_part = b & (msbs ^ msbs.wrapping_sub(1));
// println!("{:#066b} varint_part", varint_part);
let num = T::scalar_to_num(varint_part);
(num, (len / 8) as usize)
} else {
let b0 = bytes.cast::<u64>().read_unaligned();
let b1 = bytes.cast::<u64>().add(1).read_unaligned();
let msbs0 = !b0 & !0x7f7f7f7f7f7f7f7f;
let msbs1 = !b1 & !0x7f7f7f7f7f7f7f7f;
// TODO: could this be faster on CPUs without fast tzcnt?
// let blsi0 = msbs0.wrapping_neg() & msbs0;
// let blsi1 = msbs1.wrapping_neg() & msbs1;
//
// let len0 = ((blsi0.wrapping_mul(0x20406080a0c0e1)) >> 60) & 15;
// let len1 = ((blsi1.wrapping_mul(0x20406080a0c0e1)) >> 60) & 15;
let len0 = msbs0.trailing_zeros() + 1;
let len1 = msbs1.trailing_zeros() + 1;
// doing this is faster than using len0, len1 because tzcnt has significant latency
// and if the caller does not need the length, the call can be optimized out entirely
// b0 & blsmsk(msbs0)
let varint_part0 = b0 & (msbs0 ^ msbs0.wrapping_sub(1));
// b1 & blsmsk(msbs1)
let varint_part1 = (b1 & (msbs1 ^ msbs1.wrapping_sub(1))) * ((msbs0 == 0) as u64);
// let varint_part0 = b0 & !(0xffffffffffffffff << len0.min(63));
// let varint_part1 = b1 & !(0xffffffffffffffff << (((msbs0 == 0) as u32) * len1.min(63)));
let num = T::vector_to_num(core::mem::transmute::<[u64; 2], [u8; 16]>([
varint_part0,
varint_part1,
]));
let len = if msbs0 == 0 { len1 + 64 } else { len0 } / 8;
(num, len as usize)
}
}
/// Decodes two adjacent varints simultaneously. Target types must fit within 16 bytes when varint
/// encoded. Requires SSSE3 support.
///
/// For example, it is permissible to decode `u32` and `u32`, and `u64` and `u32`, but it is not
/// possible to decode two `u64` values with this function simultaneously.
///
/// Returns a tuple containing the two decoded values and the two lengths of bytes read for each
/// value.
///
/// For best performance, ensure each target type is `u32` or smaller.
///
/// # Safety
/// There must be at least 16 bytes of allocated memory after the start of the pointer. Otherwise,
/// there may be undefined behavior. Any data after the two varints are ignored. Truncated values
/// will be returned if a varint exceeds the target type's limit.
#[inline]
#[cfg(any(target_feature = "ssse3", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "ssse3")))]
pub unsafe fn decode_two_unsafe<T: VarIntTarget, U: VarIntTarget>(
bytes: *const u8,
) -> (T, U, u8, u8) {
if T::MAX_VARINT_BYTES + U::MAX_VARINT_BYTES > 16 {
// check will be eliminated at compile time
panic!(
"exceeded length limit: cannot decode {} and {}, total length {} exceeds 16 bytes",
core::any::type_name::<T>(),
core::any::type_name::<U>(),
T::MAX_VARINT_BYTES + U::MAX_VARINT_BYTES
);
}
if T::MAX_VARINT_BYTES <= 5 && U::MAX_VARINT_BYTES <= 5 {
// This will work with our lookup table, use that version
return decode_two_u32_unsafe(bytes);
}
let b = _mm_loadu_si128(bytes as *const __m128i);
// First find where the boundaries are
let bitmask = _mm_movemask_epi8(b) as u32;
// Find the number of bytes taken up by each varint
let bm_not = !bitmask;
let first_len = bm_not.trailing_zeros() + 1; // should compile to bsf or tzcnt
let bm_not_2 = bm_not >> first_len;
let second_len = bm_not_2.trailing_zeros() + 1;
let ascend = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
let first_len_vec = _mm_set1_epi8(first_len as i8);
let first_mask = _mm_cmplt_epi8(ascend, first_len_vec);
let first = _mm_and_si128(b, first_mask);
let second_shuf = _mm_add_epi8(ascend, first_len_vec);
let second_shuffled = _mm_shuffle_epi8(b, second_shuf);
let second_mask = _mm_cmplt_epi8(ascend, _mm_set1_epi8(second_len as i8));
let second = _mm_and_si128(second_shuffled, second_mask);
let first_num;
let second_num;
// Only use "turbo" mode if the numbers fit in 64-bit lanes
let should_turbo = T::MAX_VARINT_BYTES <= 8
&& U::MAX_VARINT_BYTES <= 8
&& cfg!(not(all(target_feature = "bmi2", very_fast_pdep)));
if should_turbo {
// const, so optimized out
let comb = _mm_or_si128(first, _mm_bslli_si128(second, 8));
let x = if T::MAX_VARINT_BYTES <= 2 && U::MAX_VARINT_BYTES <= 2 {
dual_u8_stage2(comb)
} else if T::MAX_VARINT_BYTES <= 3 && U::MAX_VARINT_BYTES <= 3 {
dual_u16_stage2(comb)
} else {
dual_u32_stage2(comb)
};
let x: [u32; 4] = core::mem::transmute(x);
// _mm_extract_epi32 requires SSE4.1
first_num = T::cast_u32(x[0]);
second_num = U::cast_u32(x[2]);
} else {
first_num = T::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(first));
second_num = U::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(second));
}
(first_num, second_num, first_len as u8, second_len as u8)
}
#[inline]
#[cfg(any(target_feature = "ssse3", doc))]
unsafe fn decode_two_u32_unsafe<T: VarIntTarget, U: VarIntTarget>(
bytes: *const u8,
) -> (T, U, u8, u8) {
let b = _mm_loadu_si128(bytes as *const __m128i);
// Get the movemask and mask out irrelevant parts
let bitmask = _mm_movemask_epi8(b) as u32 & 0b1111111111;
// Use lookup table to get the shuffle mask
let (lookup, first_len, second_len) =
*lookup::LOOKUP_DOUBLE_STEP1.get_unchecked(bitmask as usize);
let shuf = *lookup::LOOKUP_DOUBLE_VEC.get_unchecked(lookup as usize);
let comb = _mm_shuffle_epi8(b, shuf);
let first_num;
let second_num;
// Only use "turbo" mode if PDEP/PEXT are not faster
let should_turbo = cfg!(not(all(target_feature = "bmi2", very_fast_pdep)));
if should_turbo {
// const, so optimized out
let x = if T::MAX_VARINT_BYTES <= 2 && U::MAX_VARINT_BYTES <= 2 {
dual_u8_stage2(comb)
} else if T::MAX_VARINT_BYTES <= 3 && U::MAX_VARINT_BYTES <= 3 {
dual_u16_stage2(comb)
} else {
dual_u32_stage2(comb)
};
let x: [u32; 4] = core::mem::transmute(x);
// _mm_extract_epi32 requires SSE4.1
first_num = T::cast_u32(x[0]);
second_num = U::cast_u32(x[2]);
} else {
first_num = T::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(comb));
second_num = U::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(_mm_bsrli_si128(
comb, 8,
)));
}
(first_num, second_num, first_len, second_len)
}
#[inline(always)]
unsafe fn dual_u8_stage2(comb: __m128i) -> __m128i {
_mm_or_si128(
_mm_and_si128(comb, _mm_set_epi64x(0x000000000000007f, 0x000000000000007f)),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x0000000000000100, 0x0000000000000100)),
1,
),
)
}
#[inline(always)]
unsafe fn dual_u16_stage2(comb: __m128i) -> __m128i {
_mm_or_si128(
_mm_or_si128(
_mm_and_si128(comb, _mm_set_epi64x(0x000000000000007f, 0x000000000000007f)),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x0000000000030000, 0x0000000000030000)),
2,
),
),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x0000000000007f00, 0x0000000000007f00)),
1,
),
)
}
#[inline(always)]
unsafe fn dual_u32_stage2(comb: __m128i) -> __m128i {
_mm_or_si128(
_mm_or_si128(
_mm_and_si128(comb, _mm_set_epi64x(0x000000000000007f, 0x000000000000007f)),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x0000000f00000000, 0x0000000f00000000)),
4,
),
),
_mm_or_si128(
_mm_or_si128(
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x000000007f000000, 0x000000007f000000)),
3,
),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x00000000007f0000, 0x00000000007f0000)),
2,
),
),
_mm_srli_epi64(
_mm_and_si128(comb, _mm_set_epi64x(0x0000000000007f00, 0x0000000000007f00)),
1,
),
),
)
}
/// **Experimental. May have relatively poor performance.** Decode two adjacent varints
/// simultaneously from the input pointer. Requires AVX2. Allows for decoding a pair of `u64`
/// values. For smaller values, the non-wide variation of this function will probably be faster.
///
/// Returns a tuple containing the two decoded values and the two lengths of bytes read for each
/// value.
///
/// # Safety
/// There must be at least 32 bytes of allocated memory after the beginning of the pointer.
/// Otherwise, there may be undefined behavior. Calling code should ensure that AVX2 is supported
/// before referencing this function.
#[inline]
#[cfg(any(target_feature = "avx2", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "avx2")))]
pub unsafe fn decode_two_wide_unsafe<T: VarIntTarget, U: VarIntTarget>(
bytes: *const u8,
) -> (T, U, u8, u8) {
let b = _mm256_loadu_si256(bytes as *const __m256i);
// Get the most significant bits
let bitmask = _mm256_movemask_epi8(b) as u32;
// Find the number of bytes taken up by each varint
let bm_not = !bitmask;
let first_len = bm_not.trailing_zeros() + 1; // should compile to bsf or tzcnt
let bm_not_2 = bm_not >> first_len;
let second_len = bm_not_2.trailing_zeros() + 1;
// Create and parse vector consisting solely of the first varint
let ascend = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
let first_mask = _mm_cmplt_epi8(ascend, _mm_set1_epi8(first_len as i8));
let first = _mm_and_si128(_mm256_extracti128_si256(b, 0), first_mask);
// The second is much more tricky.
let shuf_gen = _mm256_setr_epi8(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15,
);
// Rearrange each 128-bit lane such that ORing them together results in the window of data we want)
let shuf_add = _mm256_set_m128i(
_mm_set1_epi8(-(16i8 - first_len as i8)),
_mm_set1_epi8(first_len as i8),
);
let shuf_added = _mm256_add_epi8(shuf_gen, shuf_add);
let shuf = _mm256_or_si256(
shuf_added,
_mm256_cmpgt_epi8(shuf_added, _mm256_set1_epi8(15)), // TODO: Is this really necessary?
);
let shuffled = _mm256_shuffle_epi8(b, shuf);
// OR the halves together, and now we have a view of the second varint
let second_shifted = _mm_or_si128(
_mm256_extracti128_si256(shuffled, 0),
_mm256_extracti128_si256(shuffled, 1),
);
let second_mask = _mm_cmplt_epi8(ascend, _mm_set1_epi8(second_len as i8));
let second = _mm_and_si128(second_shifted, second_mask);
let first_num;
let second_num;
// PEXT on the two halves is still slower, at least on Coffee Lake and Broadwell
let should_turbo = true;
if should_turbo {
// Decode the two halves in parallel using SSE2
let comb_lo = _mm_unpacklo_epi64(first, second);
let x_lo = _mm_or_si128(
_mm_or_si128(
_mm_or_si128(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x000000000000007f)),
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x7f00000000000000)),
7,
),
),
_mm_or_si128(
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x007f000000000000)),
6,
),
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x00007f0000000000)),
5,
),
),
),
_mm_or_si128(
_mm_or_si128(
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x0000007f00000000)),
4,
),
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x000000007f000000)),
3,
),
),
_mm_or_si128(
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x00000000007f0000)),
2,
),
_mm_srli_epi64(
_mm_and_si128(comb_lo, _mm_set1_epi64x(0x0000000000007f00)),
1,
),
),
),
);
let comb_hi = _mm_unpackhi_epi64(first, second);
let x_hi = _mm_or_si128(
_mm_slli_epi64(
_mm_and_si128(comb_hi, _mm_set1_epi64x(0x0000000000000100)),
55,
),
_mm_slli_epi64(
_mm_and_si128(comb_hi, _mm_set1_epi64x(0x000000000000007f)),
56,
),
);
let x = _mm_or_si128(x_lo, x_hi);
first_num = T::cast_u64(_mm_extract_epi64(x, 0) as u64);
second_num = U::cast_u64(_mm_extract_epi64(x, 1) as u64);
} else {
first_num = T::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(first));
second_num = U::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(second));
}
(first_num, second_num, first_len as u8, second_len as u8)
}
/// Decodes four adjacent varints simultaneously. Target types must fit within 16 bytes when varint
/// encoded. Requires SSSE3 support.
///
/// Returns a tuple containing the four encoded values, followed by the number of bytes read for
/// each encoded value, followed by a boolean indicator for whether the length values may be
/// incorrect due to overflow.
///
/// For best performance, ensure each target type is `u16` or smaller.
///
/// # Safety
/// There must be at least 16 bytes of allocated memory after the start of the pointer. Otherwise,
/// there may be undefined behavior. Any data after the four varints are ignored. Truncated values
/// will be returned if a varint exceeds the target type's limit.
#[inline]
#[cfg(any(target_feature = "ssse3", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "ssse3")))]
pub unsafe fn decode_four_unsafe<
T: VarIntTarget,
U: VarIntTarget,
V: VarIntTarget,
W: VarIntTarget,
>(
bytes: *const u8,
) -> (T, U, V, W, u8, u8, u8, u8, bool) {
if T::MAX_VARINT_BYTES + U::MAX_VARINT_BYTES + V::MAX_VARINT_BYTES + W::MAX_VARINT_BYTES > 16 {
// check will be eliminated at compile time
panic!(
"exceeded length limit: cannot decode {}, {}, {}, and {}, total length {} exceeds 16 bytes",
core::any::type_name::<T>(),
core::any::type_name::<U>(),
core::any::type_name::<V>(),
core::any::type_name::<W>(),
T::MAX_VARINT_BYTES + U::MAX_VARINT_BYTES + V::MAX_VARINT_BYTES + W::MAX_VARINT_BYTES
);
}
if T::MAX_VARINT_BYTES <= 3
&& U::MAX_VARINT_BYTES <= 3
&& V::MAX_VARINT_BYTES <= 3
&& W::MAX_VARINT_BYTES <= 3
{
return decode_four_u16_unsafe(bytes);
}
let b = _mm_loadu_si128(bytes as *const __m128i);
// First find where the boundaries are
let bitmask = _mm_movemask_epi8(b) as u32;
// Find the number of bytes taken up by each varint
let bm_not = !bitmask;
let first_len = bm_not.trailing_zeros() + 1; // should compile to bsf or tzcnt
let bm_not_2 = bm_not >> first_len;
let second_len = bm_not_2.trailing_zeros() + 1;
let bm_not_3 = bm_not_2 >> second_len;
let third_len = bm_not_3.trailing_zeros() + 1;
let bm_not_4 = bm_not_3 >> third_len;
let fourth_len = bm_not_4.trailing_zeros() + 1;
let ascend = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
let first_len_vec = _mm_set1_epi8(first_len as i8);
let first_mask = _mm_cmplt_epi8(ascend, first_len_vec);
let first = _mm_and_si128(b, first_mask);
let second_shuf = _mm_add_epi8(ascend, first_len_vec);
let second_shuffled = _mm_shuffle_epi8(b, second_shuf);
let second_len_vec = _mm_set1_epi8(second_len as i8);
let second_mask = _mm_cmplt_epi8(ascend, second_len_vec);
let second = _mm_and_si128(second_shuffled, second_mask);
let third_shuf = _mm_add_epi8(ascend, second_len_vec);
let third_shuffled = _mm_shuffle_epi8(second_shuffled, third_shuf);
let third_len_vec = _mm_set1_epi8(third_len as i8);
let third_mask = _mm_cmplt_epi8(ascend, third_len_vec);
let third = _mm_and_si128(third_shuffled, third_mask);
let fourth_shuf = _mm_add_epi8(ascend, third_len_vec);
let fourth_shuffled = _mm_shuffle_epi8(third_shuffled, fourth_shuf);
let fourth_len_vec = _mm_set1_epi8(fourth_len as i8);
let fourth_mask = _mm_cmplt_epi8(ascend, fourth_len_vec);
let fourth = _mm_and_si128(fourth_shuffled, fourth_mask);
let first_num;
let second_num;
let third_num;
let fourth_num;
// Only use "turbo" mode if the numbers fit in 64-bit lanes
let should_turbo = T::MAX_VARINT_BYTES <= 4
&& U::MAX_VARINT_BYTES <= 4
&& V::MAX_VARINT_BYTES <= 4
&& W::MAX_VARINT_BYTES <= 4
// PDEP/PEXT are still a little faster here
&& cfg!(not(all(
target_feature = "bmi2",
very_fast_pdep
)));
if should_turbo {
// const, so optimized out
let comb = _mm_or_si128(
_mm_or_si128(first, _mm_bslli_si128(second, 4)),
_mm_or_si128(_mm_bslli_si128(third, 8), _mm_bslli_si128(fourth, 12)),
);
let x = if T::MAX_VARINT_BYTES <= 2
&& U::MAX_VARINT_BYTES <= 2
&& V::MAX_VARINT_BYTES <= 2
&& W::MAX_VARINT_BYTES <= 2
{
_mm_or_si128(
_mm_and_si128(comb, _mm_set1_epi32(0x0000007f)),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00000100)), 1),
)
} else {
_mm_or_si128(
_mm_or_si128(
_mm_and_si128(comb, _mm_set1_epi32(0x0000007f)),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00030000)), 2),
),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00007f00)), 1),
)
};
let x: [u32; 4] = core::mem::transmute(x);
// _mm_extract_epi32 requires SSE4.1
first_num = T::cast_u32(x[0]);
second_num = U::cast_u32(x[1]);
third_num = V::cast_u32(x[2]);
fourth_num = W::cast_u32(x[3]);
} else {
first_num = T::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(first));
second_num = U::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(second));
third_num = V::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(third));
fourth_num = W::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(fourth));
}
(
first_num,
second_num,
third_num,
fourth_num,
first_len as u8,
second_len as u8,
third_len as u8,
fourth_len as u8,
false,
)
}
#[inline]
#[cfg(any(target_feature = "ssse3", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "ssse3")))]
unsafe fn decode_four_u16_unsafe<
T: VarIntTarget,
U: VarIntTarget,
V: VarIntTarget,
W: VarIntTarget,
>(
bytes: *const u8,
) -> (T, U, V, W, u8, u8, u8, u8, bool) {
let b = _mm_loadu_si128(bytes as *const __m128i);
// First find where the boundaries are
let bitmask = _mm_movemask_epi8(b) as u32;
// Use the lookup table
let lookup = *lookup::LOOKUP_QUAD_STEP1.get_unchecked((bitmask & 0b111111111111) as usize);
// Fetch the shuffle mask
let shuf = *lookup::LOOKUP_QUAD_VEC.get_unchecked((lookup & 0b11111111) as usize);
// Extract the lengths while we're waiting
let first_len = (lookup >> 8) & 0b1111;
let second_len = (lookup >> 12) & 0b1111;
let third_len = (lookup >> 16) & 0b1111;
let fourth_len = (lookup >> 20) & 0b1111;
let comb = _mm_shuffle_epi8(b, shuf);
let invalid = lookup >> 31;
let first_num;
let second_num;
let third_num;
let fourth_num;
// PDEP/PEXT may be still a little faster here
let should_turbo = cfg!(not(all(target_feature = "bmi2", very_fast_pdep)));
if should_turbo {
// const, so optimized out
let x = if T::MAX_VARINT_BYTES <= 2
&& U::MAX_VARINT_BYTES <= 2
&& V::MAX_VARINT_BYTES <= 2
&& W::MAX_VARINT_BYTES <= 2
{
_mm_or_si128(
_mm_and_si128(comb, _mm_set1_epi32(0x0000007f)),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00000100)), 1),
)
} else {
_mm_or_si128(
_mm_or_si128(
_mm_and_si128(comb, _mm_set1_epi32(0x0000007f)),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00030000)), 2),
),
_mm_srli_epi32(_mm_and_si128(comb, _mm_set1_epi32(0x00007f00)), 1),
)
};
let x: [u32; 4] = core::mem::transmute(x);
// _mm_extract_epi32 requires SSE4.1
first_num = T::cast_u32(x[0]);
second_num = U::cast_u32(x[1]);
third_num = V::cast_u32(x[2]);
fourth_num = W::cast_u32(x[3]);
} else {
first_num = T::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(comb));
second_num = U::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(_mm_bsrli_si128(
comb, 4,
)));
third_num = V::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(_mm_bsrli_si128(
comb, 8,
)));
fourth_num = W::vector_to_num(core::mem::transmute::<__m128i, [u8; 16]>(_mm_bsrli_si128(
comb, 12,
)));
}
(
first_num,
second_num,
third_num,
fourth_num,
first_len as u8,
second_len as u8,
third_len as u8,
fourth_len as u8,
invalid != 0,
)
}
/// Decodes four adjacent varints into u8's simultaneously. Requires SSSE3 support. **Does not
/// perform overflow checking and may produce incorrect output.**
///
/// Returns a tuple containing an array of decoded values, and the total number of bytes read.
///
/// # Safety
/// There must be at least 16 bytes of allocated memory after the start of the pointer. Otherwise,
/// there may be undefined behavior. Truncated values will be returned if the varint represents
/// a number larger than what a u8 can handle.
///
/// This function does not perform overflow checking. If a varint exceeds two bytes in encoded
/// length, it may be interpreted as multiple varints, and the reported length of data read will
/// be shorter than expected. Caution is encouraged when using this function.
#[inline]
#[cfg(any(target_feature = "ssse3", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "ssse3")))]
pub unsafe fn decode_eight_u8_unsafe(bytes: *const u8) -> ([u8; 8], u8) {
let b = _mm_loadu_si128(bytes as *const __m128i);
let ones = _mm_set1_epi8(1);
let mut lens = _mm_setzero_si128();
let mut shift = _mm_and_si128(_mm_cmplt_epi8(b, _mm_setzero_si128()), ones);
let ascend = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
let asc_one = _mm_setr_epi8(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16);
let mut window_small = _mm_setr_epi8(1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
let broadcast_mask = _mm_setzero_si128();
// if the first byte is zero, shift down by 1, if the first byte is one, shift down by 2
// 0
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 1
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 2
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 3
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 4
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 5
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 6
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
window_small = _mm_bslli_si128(window_small, 1);
// 7
let first_byte = _mm_shuffle_epi8(shift, broadcast_mask);
// shift = _mm_shuffle_epi8(shift, _mm_add_epi8(asc_one, first_byte));
lens = _mm_or_si128(lens, _mm_and_si128(first_byte, window_small));
// window_small = _mm_bslli_si128(window_small, 1);
// Construct the shuffle
let lens_invert = _mm_sub_epi8(ones, lens);
let mut cumul_lens = _mm_add_epi8(lens_invert, _mm_bslli_si128(lens_invert, 1));
cumul_lens = _mm_add_epi8(cumul_lens, _mm_bslli_si128(cumul_lens, 2));
cumul_lens = _mm_add_epi8(cumul_lens, _mm_bslli_si128(cumul_lens, 4));
cumul_lens = _mm_add_epi8(cumul_lens, _mm_bslli_si128(cumul_lens, 8));
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | true |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/src/encode/mod.rs | src/encode/mod.rs | #[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
use crate::num::{SignedVarIntTarget, VarIntTarget};
/// Encodes a single number to a varint. Requires SSE2 support.
///
/// Produces a tuple, with the encoded data followed by the number of bytes used to encode the
/// varint.
///
/// # Examples
/// ```
/// use varint_simd::encode;
///
/// let encoded = encode::<u32>(1337);
/// assert_eq!(encoded, ([185, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 2));
/// ```
#[inline]
#[cfg(any(target_feature = "sse2", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "sse2")))]
pub fn encode<T: VarIntTarget>(num: T) -> ([u8; 16], u8) {
unsafe { encode_unsafe(num) }
}
/// Convenience function for encoding a single signed integer in ZigZag format to a varint.
/// See also: [`encode`]
///
/// # Examples
/// ```
/// use varint_simd::encode_zigzag;
///
/// let encoded = encode_zigzag::<i32>(-20);
/// assert_eq!(encoded, ([39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 1));
/// ```
#[inline]
#[cfg(any(target_feature = "sse2", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "sse2")))]
pub fn encode_zigzag<T: SignedVarIntTarget>(num: T) -> ([u8; 16], u8) {
unsafe { encode_unsafe(T::Unsigned::zigzag(num)) }
}
/// Encodes a single number to a varint, and writes the resulting data to the slice. Returns the
/// number of bytes written (maximum 10 bytes).
///
/// See also: [`encode`]
///
/// **Panics:** if the slice is too small to contain the varint.
#[inline]
#[cfg(any(target_feature = "sse2", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "sse2")))]
pub fn encode_to_slice<T: VarIntTarget>(num: T, slice: &mut [u8]) -> u8 {
let (data, size) = encode(num);
slice[..size as usize].copy_from_slice(&data[..size as usize]);
size
}
/// Encodes a single number to a varint. Requires SSE2 support.
///
/// Produces a tuple, with the encoded data followed by the number of bytes used to encode the
/// varint.
///
/// # Safety
/// This should not have any unsafe behavior with any input. However, it still calls a large number
/// of unsafe functions.
#[inline]
#[cfg(any(target_feature = "sse2", doc))]
#[cfg_attr(rustc_nightly, doc(cfg(target_feature = "sse2")))]
pub unsafe fn encode_unsafe<T: VarIntTarget>(num: T) -> ([u8; 16], u8) {
if T::MAX_VARINT_BYTES <= 5 {
// We could kick off a lzcnt here on the original number but that makes the math complicated and slow
let stage1 = num.num_to_scalar_stage1();
// We could OR the data with 1 to avoid undefined behavior, but for some reason it's still faster to take the branch
let leading = stage1.leading_zeros();
let unused_bytes = (leading - 1) / 8;
let bytes_needed = 8 - unused_bytes;
// set all but the last MSBs
let msbs = 0x8080808080808080;
let msbmask = 0xFFFFFFFFFFFFFFFF >> ((8 - bytes_needed + 1) * 8 - 1);
let merged = stage1 | (msbs & msbmask);
(
core::mem::transmute::<[u64; 2], [u8; 16]>([merged, 0]),
bytes_needed as u8,
)
} else {
// Break the number into 7-bit parts and spread them out into a vector
let stage1: __m128i = core::mem::transmute(num.num_to_vector_stage1());
// Create a mask for where there exist values
// This signed comparison works because all MSBs should be cleared at this point
// Also handle the special case when num == 0
let minimum = _mm_set_epi8(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xffu8 as i8);
let exists = _mm_or_si128(_mm_cmpgt_epi8(stage1, _mm_setzero_si128()), minimum);
let bits = _mm_movemask_epi8(exists);
// Count the number of bytes used
let bytes = 32 - bits.leading_zeros() as u8; // lzcnt on supported CPUs
// TODO: Compiler emits an unnecessary branch here when using bsr/bsl fallback
// Fill that many bytes into a vector
let ascend = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
let mask = _mm_cmplt_epi8(ascend, _mm_set1_epi8(bytes as i8));
// Shift it down 1 byte so the last MSB is the only one set, and make sure only the MSB is set
let shift = _mm_bsrli_si128(mask, 1);
let msbmask = _mm_and_si128(shift, _mm_set1_epi8(128u8 as i8));
// Merge the MSB bits into the vector
let merged = _mm_or_si128(stage1, msbmask);
(core::mem::transmute::<__m128i, [u8; 16]>(merged), bytes)
}
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/fuzz/fuzz_targets/fuzz_target_1.rs | fuzz/fuzz_targets/fuzz_target_1.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use integer_encoding::VarInt;
fuzz_target!(|data: [u8; 16]| {
let reference = u64::decode_var(&data);
let simd = unsafe { varint_simd::decode_unsafe(data.as_ptr()) };
if let Some(reference) = reference {
assert_eq!(reference.0, simd.0);
assert_eq!(reference.1, simd.1);
}
});
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/fuzz/fuzz_targets/fuzz_target_2.rs | fuzz/fuzz_targets/fuzz_target_2.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use integer_encoding::VarInt;
fuzz_target!(|data: u64| {
let mut reference_out = [0u8; 16];
let reference_size = u64::encode_var(data, &mut reference_out);
let (simd_out, simd_size) = unsafe { varint_simd::encode_unsafe(data) };
assert_eq!(reference_size, simd_size as usize);
assert_eq!(reference_out[0..reference_size], simd_out[0..reference_size]);
});
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/fuzz/fuzz_targets/fuzz_target_3.rs | fuzz/fuzz_targets/fuzz_target_3.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
use integer_encoding::VarInt;
fuzz_target!(|data: [u8; 16]| {
let reference = u64::decode_var(&data);
let len = unsafe { varint_simd::decode_len_unsafe::<u64>(data.as_ptr()) };
if let Some(reference) = reference {
assert_eq!(reference.1, len);
}
});
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/benches/varint_bench/prost_varint.rs | benches/varint_bench/prost_varint.rs | use bytes::{Buf, BufMut};
use std::cmp::min;
/// Encodes an integer value into LEB128 variable length format, and writes it to the buffer.
/// The buffer must have enough remaining space (maximum 10 bytes).
#[inline]
pub fn encode_varint<B>(mut value: u64, buf: &mut B)
where
B: BufMut,
{
loop {
if value < 0x80 {
buf.put_u8(value as u8);
break;
} else {
buf.put_u8(((value & 0x7F) | 0x80) as u8);
value >>= 7;
}
}
}
/// Decodes a LEB128-encoded variable length integer from the buffer.
#[inline]
pub fn decode_varint<B>(buf: &mut B) -> Result<u64, ()>
where
B: Buf,
{
let bytes = buf.chunk();
let len = bytes.len();
if len == 0 {
return Err(());
}
let byte = bytes[0];
if byte < 0x80 {
buf.advance(1);
Ok(u64::from(byte))
} else if len > 10 || bytes[len - 1] < 0x80 {
let (value, advance) = decode_varint_slice(bytes)?;
buf.advance(advance);
Ok(value)
} else {
decode_varint_slow(buf)
}
}
/// Decodes a LEB128-encoded variable length integer from the slice, returning the value and the
/// number of bytes read.
///
/// Based loosely on [`ReadVarint64FromArray`][1] with a varint overflow check from
/// [`ConsumeVarint`][2].
///
/// ## Safety
///
/// The caller must ensure that `bytes` is non-empty and either `bytes.len() >= 10` or the last
/// element in bytes is < `0x80`.
///
/// [1]: https://github.com/google/protobuf/blob/3.3.x/src/google/protobuf/io/coded_stream.cc#L365-L406
/// [2]: https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/encoding/protowire/wire.go#L358
#[inline]
fn decode_varint_slice(bytes: &[u8]) -> Result<(u64, usize), ()> {
// Fully unrolled varint decoding loop. Splitting into 32-bit pieces gives better performance.
// Use assertions to ensure memory safety, but it should always be optimized after inline.
assert!(!bytes.is_empty());
assert!(bytes.len() > 10 || bytes[bytes.len() - 1] < 0x80);
let mut b: u8;
let mut part0: u32;
b = unsafe { *bytes.get_unchecked(0) };
part0 = u32::from(b);
if b < 0x80 {
return Ok((u64::from(part0), 1));
};
part0 -= 0x80;
b = unsafe { *bytes.get_unchecked(1) };
part0 += u32::from(b) << 7;
if b < 0x80 {
return Ok((u64::from(part0), 2));
};
part0 -= 0x80 << 7;
b = unsafe { *bytes.get_unchecked(2) };
part0 += u32::from(b) << 14;
if b < 0x80 {
return Ok((u64::from(part0), 3));
};
part0 -= 0x80 << 14;
b = unsafe { *bytes.get_unchecked(3) };
part0 += u32::from(b) << 21;
if b < 0x80 {
return Ok((u64::from(part0), 4));
};
part0 -= 0x80 << 21;
let value = u64::from(part0);
let mut part1: u32;
b = unsafe { *bytes.get_unchecked(4) };
part1 = u32::from(b);
if b < 0x80 {
return Ok((value + (u64::from(part1) << 28), 5));
};
part1 -= 0x80;
b = unsafe { *bytes.get_unchecked(5) };
part1 += u32::from(b) << 7;
if b < 0x80 {
return Ok((value + (u64::from(part1) << 28), 6));
};
part1 -= 0x80 << 7;
b = unsafe { *bytes.get_unchecked(6) };
part1 += u32::from(b) << 14;
if b < 0x80 {
return Ok((value + (u64::from(part1) << 28), 7));
};
part1 -= 0x80 << 14;
b = unsafe { *bytes.get_unchecked(7) };
part1 += u32::from(b) << 21;
if b < 0x80 {
return Ok((value + (u64::from(part1) << 28), 8));
};
part1 -= 0x80 << 21;
let value = value + ((u64::from(part1)) << 28);
let mut part2: u32;
b = unsafe { *bytes.get_unchecked(8) };
part2 = u32::from(b);
if b < 0x80 {
return Ok((value + (u64::from(part2) << 56), 9));
};
part2 -= 0x80;
b = unsafe { *bytes.get_unchecked(9) };
part2 += u32::from(b) << 7;
// Check for u64::MAX overflow. See [`ConsumeVarint`][1] for details.
// [1]: https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/encoding/protowire/wire.go#L358
if b < 0x02 {
return Ok((value + (u64::from(part2) << 56), 10));
};
// We have overrun the maximum size of a varint (10 bytes) or the final byte caused an overflow.
// Assume the data is corrupt.
Err(())
}
/// Decodes a LEB128-encoded variable length integer from the buffer, advancing the buffer as
/// necessary.
///
/// Contains a varint overflow check from [`ConsumeVarint`][1].
///
/// [1]: https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/encoding/protowire/wire.go#L358
#[inline(never)]
#[cold]
fn decode_varint_slow<B>(buf: &mut B) -> Result<u64, ()>
where
B: Buf,
{
let mut value = 0;
for count in 0..min(10, buf.remaining()) {
let byte = buf.get_u8();
value |= u64::from(byte & 0x7F) << (count * 7);
if byte <= 0x7F {
// Check for u64::MAX overflow. See [`ConsumeVarint`][1] for details.
// [1]: https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/encoding/protowire/wire.go#L358
if count == 9 && byte >= 0x02 {
return Err(());
} else {
return Ok(value);
}
}
}
Err(())
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/benches/varint_bench/main.rs | benches/varint_bench/main.rs | use criterion::{criterion_group, criterion_main, BatchSize, Criterion, Throughput};
use integer_encoding::VarInt;
use rand::distributions::{Distribution, Standard};
use rand::{thread_rng, Rng};
use varint_simd::{
decode,
decode_eight_u8_unsafe,
decode_four_unsafe,
decode_len,
decode_len_unsafe,
decode_two_unsafe, //decode_two_wide_unsafe,
decode_unsafe,
encode,
VarIntTarget,
};
mod leb128;
mod prost_varint;
#[inline(always)]
fn create_batched_encoded_generator<T: VarInt + Default, R: Rng, const C: usize>(
rng: &mut R,
) -> impl FnMut() -> (Vec<u8>, Vec<T>) + '_
where
Standard: Distribution<T>,
{
move || {
let mut encoded = Vec::new();
let mut idx = 0;
for _ in 0..C {
if encoded.len() < idx + 16 {
encoded.extend(std::iter::repeat(0).take(idx + 11 - encoded.len()))
}
let len = rng.gen::<T>().encode_var(&mut encoded[idx..]);
idx += len;
}
(encoded, vec![Default::default(); C])
}
}
#[inline(always)]
fn decode_len_batched_varint_simd<T: VarIntTarget, const C: usize>(input: &mut (Vec<u8>, Vec<T>)) {
let data = &input.0;
let mut slice = &data[..];
for _ in 0..C {
// SAFETY: the input slice should have at least 16 bytes of allocated padding at the end
let len = decode_len::<T>(slice).unwrap();
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_len_batched_varint_simd_unsafe<T: VarIntTarget, const C: usize>(
input: &mut (Vec<u8>, Vec<T>),
) {
let data = &input.0;
let mut slice = &data[..];
for _ in 0..C {
// SAFETY: the input slice should have at least 16 bytes of allocated padding at the end
let len = unsafe { decode_len_unsafe::<T>(slice.as_ptr()) };
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_varint_simd_unsafe<T: VarIntTarget, const C: usize>(
input: &mut (Vec<u8>, Vec<T>),
) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
// SAFETY: the input slice should have at least 16 bytes of allocated padding at the end
let (num, len) = unsafe { decode_unsafe::<T>(slice.as_ptr()) };
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_varint_simd_2x_unsafe<T: VarIntTarget, const C: usize>(
input: &mut (Vec<u8>, Vec<T>),
) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..(C / 2) {
let (num1, num2, len1, len2) = unsafe { decode_two_unsafe::<T, T>(slice.as_ptr()) };
out[i * 2] = num1;
out[i * 2 + 1] = num2;
slice = &slice[((len1 + len2) as usize)..];
}
}
// #[inline(always)]
// fn decode_batched_varint_simd_2x_wide_unsafe<T: VarIntTarget, const C: usize>(
// input: &mut (Vec<u8>, Vec<T>),
// ) {
// let data = &input.0;
// let out = &mut input.1;
//
// let mut slice = &data[..];
// for i in 0..(C / 2) {
// let (num1, num2, len1, len2) = unsafe { decode_two_wide_unsafe::<T, T>(slice.as_ptr()) };
// out[i * 2] = num1;
// out[i * 2 + 1] = num2;
// slice = &slice[((len1 + len2) as usize)..];
// }
// }
#[inline(always)]
fn decode_batched_varint_simd_4x_unsafe<T: VarIntTarget, const C: usize>(
input: &mut (Vec<u8>, Vec<T>),
) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..(C / 4) {
let (num1, num2, num3, num4, len1, len2, len3, len4, _invalid) =
unsafe { decode_four_unsafe::<T, T, T, T>(slice.as_ptr()) };
out[i * 4] = num1;
out[i * 4 + 1] = num2;
out[i * 4 + 2] = num3;
out[i * 4 + 3] = num4;
slice = &slice[((len1 + len2 + len3 + len4) as usize)..];
}
}
#[inline(always)]
fn decode_batched_varint_simd_8x_u8_unsafe<const C: usize>(input: &mut (Vec<u8>, Vec<u8>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..(C / 8) {
let (nums, total_len) = unsafe { decode_eight_u8_unsafe(slice.as_ptr()) };
out[(i * 8)..(i * 8 + 8)].copy_from_slice(&nums);
slice = &slice[(total_len as usize)..];
}
}
#[inline(always)]
fn decode_batched_varint_simd_safe<T: VarIntTarget, const C: usize>(input: &mut (Vec<u8>, Vec<T>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = decode::<T>(slice).unwrap();
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_integer_encoding<T: VarInt, const C: usize>(input: &mut (Vec<u8>, Vec<T>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = T::decode_var(slice).unwrap();
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_rustc_u8<const C: usize>(input: &mut (Vec<u8>, Vec<u8>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = leb128::read_u16_leb128(slice);
out[i] = num as u8;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_rustc_u16<const C: usize>(input: &mut (Vec<u8>, Vec<u16>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = leb128::read_u16_leb128(slice);
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_rustc_u32<const C: usize>(input: &mut (Vec<u8>, Vec<u32>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = leb128::read_u32_leb128(slice);
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_rustc_u64<const C: usize>(input: &mut (Vec<u8>, Vec<u64>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let (num, len) = leb128::read_u64_leb128(slice);
out[i] = num;
slice = &slice[len..];
}
}
#[inline(always)]
fn decode_batched_prost<T: VarIntTarget, const C: usize>(input: &mut (Vec<u8>, Vec<T>)) {
let data = &input.0;
let out = &mut input.1;
let mut slice = &data[..];
for i in 0..C {
let num = prost_varint::decode_varint(&mut slice).unwrap();
out[i] = T::cast_u64(num);
}
}
pub fn criterion_benchmark(c: &mut Criterion) {
let mut rng = thread_rng();
// Must be a multiple of 8
const SEQUENCE_LEN: usize = 256;
let mut group = c.benchmark_group("varint-u8/decode");
group.throughput(Throughput::Elements(SEQUENCE_LEN as u64));
group.bench_function("integer-encoding", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_integer_encoding::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("rustc", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_rustc_u8::<SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("prost-varint", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_prost::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_unsafe::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/safe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_safe::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/2x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_2x_unsafe::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/4x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_4x_unsafe::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/8x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_8x_u8_unsafe::<SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.finish();
let mut group = c.benchmark_group("varint-u8/decode_len");
group.throughput(Throughput::Elements(SEQUENCE_LEN as u64));
group.bench_function("varint-simd/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_len_batched_varint_simd_unsafe::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/safe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u8, _, SEQUENCE_LEN>(&mut rng),
decode_len_batched_varint_simd::<u8, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.finish();
let mut group = c.benchmark_group("varint-u8/encode");
group.throughput(Throughput::Elements(1));
group.bench_function("integer-encoding", |b| {
b.iter_batched(
|| rng.gen::<u8>(),
|num| {
let mut target = [0u8; 16];
u8::encode_var(num, &mut target)
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("rustc", |b| {
b.iter_batched(
|| rng.gen::<u8>(),
|num| {
target.clear();
leb128::write_u16_leb128(&mut target, num as u16);
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("prost-varint", |b| {
b.iter_batched(
|| rng.gen::<u8>(),
|num| {
target.clear();
prost_varint::encode_varint(num as u64, &mut target)
},
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd", |b| {
b.iter_batched(|| rng.gen::<u8>(), encode, BatchSize::SmallInput)
});
group.finish();
let mut group = c.benchmark_group("varint-u16/decode");
group.throughput(Throughput::Elements(SEQUENCE_LEN as u64));
group.bench_function("integer-encoding", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_integer_encoding::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("rustc", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_rustc_u16::<SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("prost-varint", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_prost::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_unsafe::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/safe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_safe::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/2x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_2x_unsafe::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/4x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u16, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_4x_unsafe::<u16, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.finish();
let mut group = c.benchmark_group("varint-u16/encode");
group.throughput(Throughput::Elements(1));
group.bench_function("integer-encoding", |b| {
b.iter_batched(
|| rng.gen::<u16>(),
|num| {
let mut target = [0u8; 16];
u16::encode_var(num, &mut target)
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("rustc", |b| {
b.iter_batched(
|| rng.gen::<u16>(),
|num| {
target.clear();
leb128::write_u16_leb128(&mut target, num);
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("prost-varint", |b| {
b.iter_batched(
|| rng.gen::<u16>(),
|num| {
target.clear();
prost_varint::encode_varint(num as u64, &mut target)
},
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd", |b| {
b.iter_batched(|| rng.gen::<u16>(), encode, BatchSize::SmallInput)
});
group.finish();
let mut group = c.benchmark_group("varint-u32/decode");
group.throughput(Throughput::Elements(SEQUENCE_LEN as u64));
group.bench_function("integer-encoding", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_integer_encoding::<u32, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("rustc", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_rustc_u32::<SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("prost-varint", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_prost::<u32, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_unsafe::<u32, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/safe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_safe::<u32, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/2x/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u32, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_2x_unsafe::<u32, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.finish();
let mut group = c.benchmark_group("varint-u32/encode");
group.throughput(Throughput::Elements(1));
group.bench_function("integer-encoding", |b| {
b.iter_batched(
|| rng.gen::<u32>(),
|num| {
let mut target = [0u8; 16];
u32::encode_var(num, &mut target)
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("rustc", |b| {
b.iter_batched(
|| rng.gen::<u32>(),
|num| {
target.clear();
leb128::write_u32_leb128(&mut target, num);
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("prost-varint", |b| {
b.iter_batched(
|| rng.gen::<u32>(),
|num| {
target.clear();
prost_varint::encode_varint(num as u64, &mut target)
},
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd", |b| {
b.iter_batched(|| rng.gen::<u32>(), encode, BatchSize::SmallInput)
});
group.finish();
let mut group = c.benchmark_group("varint-u64/decode");
group.throughput(Throughput::Elements(SEQUENCE_LEN as u64));
group.bench_function("integer-encoding", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
decode_batched_integer_encoding::<u64, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("rustc", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
decode_batched_rustc_u64::<SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("prost-varint", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
decode_batched_prost::<u64, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/unsafe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_unsafe::<u64, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd/safe", |b| {
b.iter_batched_ref(
create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
decode_batched_varint_simd_safe::<u64, SEQUENCE_LEN>,
BatchSize::SmallInput,
)
});
// group.bench_function("varint-simd/2x_wide/unsafe", |b| {
// b.iter_batched_ref(
// create_batched_encoded_generator::<u64, _, SEQUENCE_LEN>(&mut rng),
// decode_batched_varint_simd_2x_wide_unsafe::<u64, SEQUENCE_LEN>,
// BatchSize::SmallInput,
// )
// });
group.finish();
let mut group = c.benchmark_group("varint-u64/encode");
group.throughput(Throughput::Elements(1));
group.bench_function("integer-encoding", |b| {
b.iter_batched(
|| rng.gen::<u64>(),
|num| {
let mut target = [0u8; 16];
u64::encode_var(num, &mut target)
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("rustc", |b| {
b.iter_batched(
|| rng.gen::<u64>(),
|num| {
target.clear();
leb128::write_u64_leb128(&mut target, num);
},
BatchSize::SmallInput,
)
});
let mut target = Vec::with_capacity(16);
group.bench_function("prost-varint", |b| {
b.iter_batched(
|| rng.gen::<u64>(),
|num| {
target.clear();
prost_varint::encode_varint(num, &mut target)
},
BatchSize::SmallInput,
)
});
group.bench_function("varint-simd", |b| {
b.iter_batched(|| rng.gen::<u64>(), encode, BatchSize::SmallInput)
});
group.finish();
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
as-com/varint-simd | https://github.com/as-com/varint-simd/blob/0952d92bd547da51d189b9b34ee88ff4474db8b6/benches/varint_bench/leb128.rs | benches/varint_bench/leb128.rs | // leb128.rs from rustc
#![allow(dead_code)]
macro_rules! impl_write_unsigned_leb128 {
($fn_name:ident, $int_ty:ident) => {
#[inline]
pub fn $fn_name(out: &mut Vec<u8>, mut value: $int_ty) {
loop {
if value < 0x80 {
out.push(value as u8);
break;
} else {
out.push(((value & 0x7f) | 0x80) as u8);
value >>= 7;
}
}
}
};
}
impl_write_unsigned_leb128!(write_u16_leb128, u16);
impl_write_unsigned_leb128!(write_u32_leb128, u32);
impl_write_unsigned_leb128!(write_u64_leb128, u64);
impl_write_unsigned_leb128!(write_u128_leb128, u128);
impl_write_unsigned_leb128!(write_usize_leb128, usize);
macro_rules! impl_read_unsigned_leb128 {
($fn_name:ident, $int_ty:ident) => {
#[inline]
pub fn $fn_name(slice: &[u8]) -> ($int_ty, usize) {
let mut result = 0;
let mut shift = 0;
let mut position = 0;
loop {
let byte = slice[position];
position += 1;
if (byte & 0x80) == 0 {
result |= (byte as $int_ty) << shift;
return (result, position);
} else {
result |= ((byte & 0x7F) as $int_ty) << shift;
}
shift += 7;
}
}
};
}
impl_read_unsigned_leb128!(read_u16_leb128, u16);
impl_read_unsigned_leb128!(read_u32_leb128, u32);
impl_read_unsigned_leb128!(read_u64_leb128, u64);
impl_read_unsigned_leb128!(read_u128_leb128, u128);
impl_read_unsigned_leb128!(read_usize_leb128, usize);
#[inline]
/// encodes an integer using signed leb128 encoding and stores
/// the result using a callback function.
///
/// The callback `write` is called once for each position
/// that is to be written to with the byte to be encoded
/// at that position.
pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W)
where
W: FnMut(u8),
{
loop {
let mut byte = (value as u8) & 0x7f;
value >>= 7;
let more =
!(((value == 0) && ((byte & 0x40) == 0)) || ((value == -1) && ((byte & 0x40) != 0)));
if more {
byte |= 0x80; // Mark this byte to show that more bytes will follow.
}
write(byte);
if !more {
break;
}
}
}
#[inline]
pub fn write_signed_leb128(out: &mut Vec<u8>, value: i128) {
write_signed_leb128_to(value, |v| out.push(v))
}
#[inline]
pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i128, usize) {
let mut result = 0;
let mut shift = 0;
let mut position = start_position;
let mut byte;
loop {
byte = data[position];
position += 1;
result |= i128::from(byte & 0x7F) << shift;
shift += 7;
if (byte & 0x80) == 0 {
break;
}
}
if (shift < 64) && ((byte & 0x40) != 0) {
// sign extend
result |= -(1 << shift);
}
(result, position - start_position)
}
| rust | Apache-2.0 | 0952d92bd547da51d189b9b34ee88ff4474db8b6 | 2026-01-04T20:20:30.197063Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/app.rs | crates/netpurr/src/app.rs | use std::time::{SystemTime, UNIX_EPOCH};
use egui::{Context, Event, Frame, Margin, Visuals};
use log::info;
use poll_promise::Promise;
use netpurr_core::data::workspace_data::WorkspaceData;
use crate::data::config_data::ConfigData;
use crate::operation::operation::Operation;
use crate::panels::bottom_panel::BottomPanel;
use crate::panels::selected_collection_panel::SelectedCollectionPanel;
use crate::panels::selected_workspace_panel::SelectedWorkspacePanel;
use crate::panels::top_panel::TopPanel;
pub struct App {
workspace_data: WorkspaceData,
operation: Operation,
config_data: ConfigData,
selected_collection_panel: SelectedCollectionPanel,
selected_workspace_panel: SelectedWorkspacePanel,
show_confirmation_dialog: bool,
allowed_to_close: bool,
current_workspace: String,
sync_promise: Option<Promise<rustygit::types::Result<()>>>,
auto_save_time: u64,
top_panel: TopPanel,
bottom_panel: BottomPanel,
}
impl App {
pub fn new(cc: &eframe::CreationContext<'_>) -> Self {
Self::configure_fonts(&cc.egui_ctx);
&cc.egui_ctx.style_mut(|s| {
s.spacing.item_spacing.x = 7.0;
s.spacing.item_spacing.y = 7.0;
});
let mut workspace_data = WorkspaceData::default();
let config_data = ConfigData::load();
config_data.select_workspace().map(|workspace| {
workspace_data.load_all(workspace);
});
let mut app = App {
operation: Operation::new(workspace_data.get_cookies_manager()),
config_data,
selected_collection_panel: Default::default(),
selected_workspace_panel: Default::default(),
show_confirmation_dialog: false,
allowed_to_close: false,
current_workspace: "".to_string(),
sync_promise: None,
workspace_data,
auto_save_time: 0,
top_panel: Default::default(),
bottom_panel: Default::default(),
};
app
}
pub fn configure_fonts(ctx: &Context) -> Option<()> {
let font_name = "NotoSansSC-Regular".to_string();
let font_file_bytes = include_bytes!("./../font/NotoSansSC-Regular.ttf").to_vec();
let font_data = egui::FontData::from_owned(font_file_bytes);
let mut font_def = eframe::egui::FontDefinitions::default();
font_def.font_data.insert(font_name.to_string(), font_data);
let font_family = eframe::epaint::FontFamily::Proportional;
font_def
.families
.get_mut(&font_family)?
.insert(0, font_name.clone());
let font_family = eframe::epaint::FontFamily::Monospace;
font_def
.families
.get_mut(&font_family)?
.push(font_name.clone());
egui_phosphor::add_to_fonts(&mut font_def, egui_phosphor::Variant::Regular);
ctx.set_fonts(font_def);
Some(())
}
fn auto_save(&mut self, ctx: &Context) {
if ctx.input(|i| {
i.events
.iter()
.filter(|event| match event {
Event::Key { .. } => true,
_ => false,
})
.count()
> 0
}) {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs();
if now - self.auto_save_time > 5 {
self.auto_save_time = now;
self.workspace_data.auto_save_crd();
info!("auto save");
}
}
}
fn quit_dialog(&mut self, ctx: &Context) {
if ctx.input(|i| i.viewport().close_requested()) {
if !self.allowed_to_close {
ctx.send_viewport_cmd(egui::ViewportCommand::CancelClose);
self.show_confirmation_dialog = true;
}
}
if self.show_confirmation_dialog {
self.operation.lock_ui("Quit".to_string(), true);
egui::Window::new("Do you want to quit?")
.collapsible(false)
.resizable(false)
.show(ctx, |ui| {
ui.horizontal(|ui| {
if ui.button("No").clicked() {
self.show_confirmation_dialog = false;
self.allowed_to_close = false;
self.operation.lock_ui("Quit".to_string(), false);
}
if ui.button("Yes").clicked() {
self.show_confirmation_dialog = false;
self.allowed_to_close = true;
self.workspace_data.auto_save_crd();
ui.ctx().send_viewport_cmd(egui::ViewportCommand::Close);
}
});
});
}
}
}
impl eframe::App for App {
/// Called each time the UI needs repainting, which may be many times per second.
fn update(&mut self, ctx: &Context, _frame: &mut eframe::Frame) {
self.operation
.show(ctx, &mut self.config_data, &mut self.workspace_data);
egui::TopBottomPanel::top("top_panel").show(ctx, |ui| {
self.top_panel.render(
ui,
&mut self.workspace_data,
self.operation.clone(),
&mut self.config_data,
)
});
egui::CentralPanel::default()
.frame(Frame {
inner_margin: Margin::ZERO,
outer_margin: Margin::ZERO,
rounding: Default::default(),
shadow: Default::default(),
fill: Default::default(),
stroke: Default::default(),
})
.show(ctx, |ui| {
if self.config_data.select_collection().is_some()
&& self.config_data.select_workspace().is_some()
{
ui.add_enabled_ui(!self.operation.get_ui_lock(), |ui| {
self.selected_collection_panel.set_and_render(
ui,
&self.operation,
&mut self.workspace_data,
&mut self.config_data,
);
});
} else {
self.selected_workspace_panel.set_and_render(
ui,
&self.operation,
&mut self.workspace_data,
&mut self.config_data,
);
}
});
egui::TopBottomPanel::bottom("bottom_panel").show(ctx, |ui| {
self.bottom_panel.render(
ui,
&mut self.workspace_data,
self.operation.clone(),
&mut self.config_data,
)
});
self.auto_save(ctx);
self.quit_dialog(ctx);
}
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/lib.rs | crates/netpurr/src/lib.rs | #![warn(clippy::all, rust_2018_idioms)]
use std::string::ToString;
pub use app::App;
pub const APP_NAME: &str = "Netpurr";
mod app;
mod data;
mod import;
mod operation;
mod panels;
mod utils;
mod widgets;
mod windows;
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/main.rs | crates/netpurr/src/main.rs | #![warn(clippy::all, rust_2018_idioms)]
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release
use std::sync::Arc;
use chrono::{Datelike, Local, Timelike};
use eframe::icon_data::from_png_bytes;
use log4rs::append::console::ConsoleAppender;
use log4rs::append::file::FileAppender;
use log4rs::config::{Appender, Root};
use log4rs::Config;
use log4rs::encode::pattern::PatternEncoder;
use log::LevelFilter;
use netpurr::APP_NAME;
// When compiling natively:
#[cfg(not(target_arch = "wasm32"))]
fn main() -> eframe::Result<()> {
set_log_config();
let native_options = eframe::NativeOptions {
viewport: egui::ViewportBuilder::default()
.with_icon(Arc::new(
from_png_bytes(&include_bytes!("../icon/icon128.png")[..]).expect("png get error"),
))
.with_inner_size([1500.0, 600.0])
.with_min_inner_size([800.0, 400.0]),
..Default::default()
};
eframe::run_native(
APP_NAME,
native_options,
Box::new(|cc| {
egui_extras::install_image_loaders(&cc.egui_ctx);
Box::new(netpurr::App::new(cc))
}),
)
}
fn set_log_config() {
let local_time = Local::now();
// 生成文件名格式
let file_name = format!(
"{:04}{:02}{:02}_{:02}{:02}{:02}.log",
local_time.year(),
local_time.month(),
local_time.day(),
local_time.hour(),
local_time.minute(),
local_time.second()
);
let log_path = dirs::home_dir()
.expect("find home dir error")
.join(APP_NAME)
.join("logs")
.join(file_name);
let stdout = ConsoleAppender::builder().build();
let file = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new("{d} - {m}{n}")))
.build(log_path)
.expect("create log file appender error");
let config = Config::builder()
.appender(Appender::builder().build("stdout", Box::new(stdout)))
.appender(Appender::builder().build("file", Box::new(file)))
.build(
Root::builder()
.appender("stdout")
.appender("file")
.build(LevelFilter::Info),
)
.expect("create log config error");
log4rs::init_config(config).expect("init log error");
}
// When compiling to web using trunk:
#[cfg(target_arch = "wasm32")]
fn main() {
// Redirect `log` message to `console.log` and friends:
eframe::WebLogger::init(log::LevelFilter::Debug).ok();
let web_options = eframe::WebOptions::default();
wasm_bindgen_futures::spawn_local(async {
eframe::WebRunner::new()
.start(
"the_canvas_id", // hardcode it
web_options,
Box::new(|cc| Box::new(eframe_template::TemplateApp::new(cc))),
)
.await
.expect("failed to start eframe");
});
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/operation/git.rs | crates/netpurr/src/operation/git.rs | use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
use futures_util::AsyncWriteExt;
use log::error;
use poll_promise::Promise;
use rustygit::Repository;
use rustygit::types::BranchName;
#[derive(Default, Clone)]
pub struct Git {}
impl Git {
pub fn if_enable_git(&self, repo_path: &PathBuf) -> bool {
match Repository::new(repo_path).cmd(["status"]) {
Ok(_) => true,
Err(_) => false,
}
}
pub fn enable_git(&self, repo_path: &PathBuf) {
let repo = Repository::init(repo_path);
if repo.is_err() {
error!("init git repo failed, path: {:?}", repo_path);
}
let gitignore_content = ".DS_Store\nrequests/";
let gitignore_path = repo_path.join("./.gitignore");
let mut file = File::create(gitignore_path);
if file.is_ok() {
file.unwrap().write_all(gitignore_content.as_bytes());
}
}
pub fn create_branch(
&self,
repo_path: &PathBuf,
branch_name: String,
) -> rustygit::types::Result<()> {
let repo = Repository::new(repo_path);
let branch = BranchName::from_str(branch_name.as_str())?;
repo.create_local_branch(&branch)?;
repo.cmd(["commit", "--allow-empty", "-m", "Init Repo"])
}
pub fn update_remote(
&self,
repo_path: &PathBuf,
remote_url: String,
) -> rustygit::types::Result<()> {
let repo = Repository::new(repo_path);
let remotes = repo.cmd_out(["remote"])?;
let origin = "origin".to_string();
if remotes.contains(&origin) {
repo.cmd(["remote", "set-url", "origin", remote_url.as_str()])
} else {
repo.cmd(["remote", "add", "origin", remote_url.as_str()])
}
}
pub fn git_sync_promise(&self, repo_path: PathBuf) -> Promise<rustygit::types::Result<()>> {
Promise::spawn_thread("git_thread", move || -> rustygit::types::Result<()> {
let repo = Repository::new(repo_path);
if let Ok(head) = repo.cmd_out(["branch", "--show-current"]) {
if let Some(branch_name) = head.get(0) {
repo.cmd([
"branch",
format!("--set-upstream-to=origin/{}", &branch_name).as_str(),
])?;
repo.cmd(["add", "."])?;
repo.cmd(["rm", "-rf", "--ignore-unmatch","--cached", "requests/*"])?;
repo.commit_all("auto commit");
repo.cmd(["pull", "--rebase"])?;
repo.cmd(["push", "--set-upstream", "origin", &branch_name])
} else {
Ok(())
}
} else {
Ok(())
}
})
}
pub fn git_force_pull_promise(
&self,
repo_path: PathBuf,
) -> Promise<rustygit::types::Result<()>> {
Promise::spawn_thread("git_thread", move || -> rustygit::types::Result<()> {
let repo = Repository::new(repo_path);
if let Ok(head) = repo.cmd_out(["branch", "--show-current"]) {
if let Some(branch_name) = head.get(0) {
repo.cmd([
"reset",
"--hard",
format!("origin/{}", &branch_name).as_str(),
])?;
repo.cmd(["fetch", "origin"])?;
repo.cmd(["pull", "origin", branch_name.as_str()])
} else {
Ok(())
}
} else {
Ok(())
}
})
}
pub fn git_force_push_promise(
&self,
repo_path: PathBuf,
) -> Promise<rustygit::types::Result<()>> {
Promise::spawn_thread("git_thread", move || -> rustygit::types::Result<()> {
let repo = Repository::new(repo_path);
if let Ok(head) = repo.cmd_out(["branch", "--show-current"]) {
if let Some(branch_name) = head.get(0) {
repo.cmd(["push", "--force", "origin", branch_name.as_str()])
} else {
Ok(())
}
} else {
Ok(())
}
})
}
pub fn switch_branch(&self, path: &PathBuf, branch_name: String) {
let repo = Repository::new(path);
if let Ok(branch) = BranchName::from_str(branch_name.as_str()) {
repo.switch_branch(&branch);
}
}
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/operation/windows.rs | crates/netpurr/src/operation/windows.rs | use std::cell::RefCell;
use std::rc::Rc;
use egui::{Context, Ui};
use netpurr_core::data::collections::{Collection, CollectionFolder};
use netpurr_core::data::http::HttpRecord;
use netpurr_core::data::workspace_data::WorkspaceData;
use crate::data::config_data::ConfigData;
use crate::operation::operation::Operation;
pub trait Window {
fn window_setting(&self) -> WindowSetting;
fn set_open(&mut self, open: bool);
fn get_open(&self) -> bool;
fn render(
&mut self,
ui: &mut Ui,
config_data: &mut ConfigData,
workspace_data: &mut WorkspaceData,
operation: Operation,
);
}
#[derive(Default, Clone)]
pub struct WindowSetting {
name: String,
id: String,
collapsible: bool,
resizable: bool,
default_width: Option<f32>,
default_height: Option<f32>,
max_width: Option<f32>,
max_height: Option<f32>,
min_width: Option<f32>,
min_height: Option<f32>,
modal: bool,
}
impl WindowSetting {
pub fn new(name: impl Into<String>) -> Self {
WindowSetting {
name: name.into(),
..Default::default()
}
}
pub fn new_with_id(name: impl Into<String>, id: impl Into<String>) -> Self {
WindowSetting {
name: name.into(),
id: id.into(),
..Default::default()
}
}
pub fn get_windows_id(&self) -> String {
format!("{}-{}", self.name, self.id)
}
pub fn collapsible(mut self, collapsible: bool) -> Self {
self.collapsible = collapsible;
self
}
pub fn resizable(mut self, resizable: bool) -> Self {
self.resizable = resizable;
self
}
pub fn modal(mut self, modal: bool) -> Self {
self.modal = modal;
self
}
pub fn max_width(mut self, max_width: f32) -> Self {
self.max_width = Some(max_width);
self
}
pub fn max_height(mut self, max_height: f32) -> Self {
self.max_height = Some(max_height);
self
}
pub fn min_height(mut self, min_height: f32) -> Self {
self.min_height = Some(min_height);
self
}
pub fn min_width(mut self, min_width: f32) -> Self {
self.min_width = Some(min_width);
self
}
pub fn default_height(mut self, default_height: f32) -> Self {
self.default_height = Some(default_height);
self
}
pub fn default_width(mut self, default_width: f32) -> Self {
self.default_width = Some(default_width);
self
}
pub fn id(&self) -> &str {
&self.id
}
}
#[derive(Default)]
pub struct Windows {
pub show_windows: Vec<Rc<RefCell<Box<dyn Window>>>>,
}
impl Windows {
pub fn add(&mut self, mut window: Rc<RefCell<Box<dyn Window>>>) {
if self
.show_windows
.iter()
.find(|w| {
w.borrow().window_setting().get_windows_id()
== window.borrow().window_setting().get_windows_id()
})
.is_none()
{
window.borrow_mut().set_open(true);
self.show_windows.push(window);
}
}
pub fn retain(&mut self) {
self.show_windows.retain(|w| w.borrow().get_open());
}
pub fn show(
&self,
ctx: &Context,
config_data: &mut ConfigData,
workspace_data: &mut WorkspaceData,
operation: Operation,
) {
for window in self.show_windows.iter() {
let mut open = window.borrow().get_open();
if window.borrow().window_setting().modal {
operation.lock_ui(window.borrow().window_setting().name.clone(), true);
}
let mut w = egui::Window::new(window.borrow().window_setting().name).id(window
.borrow()
.window_setting()
.get_windows_id()
.into());
if let Some(v) = window.borrow().window_setting().max_width {
w = w.max_width(v)
}
if let Some(v) = window.borrow().window_setting().min_width {
w = w.min_width(v)
}
if let Some(v) = window.borrow().window_setting().max_height {
w = w.max_height(v)
}
if let Some(v) = window.borrow().window_setting().min_height {
w = w.min_height(v)
}
if let Some(v) = window.borrow().window_setting().default_width {
w = w.default_width(v)
}
if let Some(v) = window.borrow().window_setting().default_height {
w = w.default_height(v)
}
let collapsible = window.borrow().window_setting().collapsible;
let resizable = window.borrow().window_setting().resizable;
w.collapsible(collapsible)
.resizable(resizable)
.open(&mut open)
.show(ctx, |ui| {
window
.borrow_mut()
.render(ui, config_data, workspace_data, operation.clone())
});
open = window.borrow().get_open() && open;
window.borrow_mut().set_open(open);
if !open {
operation.lock_ui(window.borrow().window_setting().name.clone(), false);
}
}
}
}
#[derive(Default, Clone, Debug)]
pub struct OpenWindows {
pub save_opened: bool,
pub edit: bool,
pub collection_opened: bool,
pub folder_opened: bool,
pub cookies_opened: bool,
pub http_record: HttpRecord,
pub default_path: Option<String>,
pub collection: Option<Collection>,
pub parent_folder: Rc<RefCell<CollectionFolder>>,
pub folder: Option<Rc<RefCell<CollectionFolder>>>,
pub crt_id: String,
pub save_crt_opened: bool,
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/operation/mod.rs | crates/netpurr/src/operation/mod.rs | use std::str::FromStr;
use crate::operation::windows::Window;
mod git;
pub mod operation;
pub mod windows;
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/operation/operation.rs | crates/netpurr/src/operation/operation.rs | use std::cell::RefCell;
use std::collections::{BTreeMap, HashMap};
use std::rc::Rc;
use std::sync::{Arc, RwLock};
use egui::{emath, WidgetText};
use poll_promise::Promise;
use egui_toast::{Toast, ToastKind, ToastOptions, Toasts};
use netpurr_core::data::collections::{CollectionFolder, Testcase};
use netpurr_core::data::cookies_manager::CookiesManager;
use netpurr_core::data::environment::EnvironmentItemValue;
use netpurr_core::data::http::Request;
use netpurr_core::data::record::Record;
use netpurr_core::data::websocket::WebSocketSession;
use netpurr_core::data::workspace_data::WorkspaceData;
use netpurr_core::runner::{
Runner, RunRequestInfo, TestGroupRunResults, TestRunError, TestRunResult,
};
use netpurr_core::script::{Context, ScriptScope, ScriptTree};
use crate::data::config_data::ConfigData;
use crate::operation::git::Git;
use crate::operation::windows::{Window, Windows};
#[derive(Clone)]
pub struct Operation {
runner: Runner,
lock_ui: HashMap<String, bool>,
modal_flag: Rc<RefCell<ModalFlag>>,
toasts: Rc<RefCell<Toasts>>,
current_windows: Rc<RefCell<Windows>>,
add_windows: Rc<RefCell<Windows>>,
git: Git,
}
#[derive(Default)]
pub struct ModalFlag {
lock_ui: HashMap<String, bool>,
}
impl ModalFlag {
pub fn lock_ui(&mut self, key: String, bool: bool) {
self.lock_ui.insert(key, bool);
}
pub fn get_ui_lock(&self) -> bool {
let mut result = false;
for (_, lock) in self.lock_ui.iter() {
result = result || (lock.clone());
}
result
}
}
impl Operation {
pub fn new(cookies_manager: CookiesManager) -> Self {
Operation {
lock_ui: Default::default(),
runner: Runner::new(cookies_manager.cookie_store.clone()),
modal_flag: Rc::new(RefCell::new(ModalFlag::default())),
toasts: Rc::new(RefCell::new(
Toasts::default()
.anchor(emath::Align2::RIGHT_BOTTOM, (-10.0, -10.0))
.direction(egui::Direction::BottomUp),
)),
current_windows: Rc::new(RefCell::new(Windows::default())),
add_windows: Rc::new(RefCell::new(Default::default())),
git: Default::default(),
}
}
pub fn send_rest_with_script_promise(
&self,
run_request_info: RunRequestInfo,
) -> Promise<Result<TestRunResult, TestRunError>> {
self.runner.send_rest_with_script_promise(run_request_info)
}
pub fn run_test_group_promise(
&self,
fast:bool,
envs: BTreeMap<String, EnvironmentItemValue>,
script_tree: ScriptTree,
test_group_run_result: Arc<RwLock<TestGroupRunResults>>,
collection_path: String,
parent_testcase: Option<Testcase>,
folder: Rc<RefCell<CollectionFolder>>,
) -> Promise<()> {
self.runner.run_test_group_promise(
fast,
envs,
script_tree,
test_group_run_result,
collection_path,
parent_testcase,
folder,
)
}
pub fn run_test_record_promise(
&self,
envs: BTreeMap<String, EnvironmentItemValue>,
script_tree: ScriptTree,
test_group_run_result: Arc<RwLock<TestGroupRunResults>>,
collection_path: String,
parent_testcase: Option<Testcase>,
record: Record,
) -> Promise<()> {
self.runner.run_test_record_promise(
envs,
script_tree,
test_group_run_result,
collection_path,
parent_testcase,
record,
)
}
pub fn connect_websocket_with_script(
&self,
http_request: Request,
envs: BTreeMap<String, EnvironmentItemValue>,
pre_request_scripts: Vec<ScriptScope>,
test_scripts: Vec<ScriptScope>,
) -> WebSocketSession {
self.runner.connect_websocket_with_script(RunRequestInfo {
shared_map: Default::default(),
collection_path: None,
request_name: "".to_string(),
request: http_request,
envs,
pre_request_scripts,
test_scripts,
testcase: Default::default(),
})
}
pub fn run_script(
&self,
scripts: Vec<ScriptScope>,
context: Context,
) -> Promise<anyhow::Result<Context>> {
self.runner.run_script(scripts, context)
}
pub fn lock_ui(&self, key: String, bool: bool) {
self.modal_flag.borrow_mut().lock_ui(key, bool);
}
pub fn get_ui_lock(&self) -> bool {
self.modal_flag.borrow_mut().get_ui_lock()
}
pub fn add_toast(&self, toast: Toast) {
self.toasts.borrow_mut().add(toast);
}
pub fn add_success_toast(&self, text: impl Into<WidgetText>) {
self.add_toast(Toast {
text: text.into(),
kind: ToastKind::Success,
options: ToastOptions::default()
.show_icon(true)
.duration_in_seconds(2.0)
.show_progress(true),
});
}
pub fn add_error_toast(&self, text: impl Into<WidgetText>) {
self.add_toast(Toast {
text: text.into(),
kind: ToastKind::Error,
options: ToastOptions::default()
.show_icon(true)
.duration_in_seconds(5.0)
.show_progress(true),
});
}
pub fn add_window(&self, window: Box<dyn Window>) {
self.add_windows
.borrow_mut()
.add(Rc::new(RefCell::new(window)));
}
pub fn show(
&self,
ctx: &egui::Context,
config_data: &mut ConfigData,
workspace_data: &mut WorkspaceData,
) {
self.toasts.borrow_mut().show(ctx);
for w in &self.add_windows.borrow().show_windows {
self.current_windows.borrow_mut().add(w.clone())
}
self.add_windows.borrow_mut().show_windows.clear();
self.current_windows
.borrow()
.show(ctx, config_data, workspace_data, self.clone());
self.current_windows.borrow_mut().retain()
}
pub fn git(&self) -> &Git {
&self.git
}
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/utils/openapi_help.rs | crates/netpurr/src/utils/openapi_help.rs | use openapiv3::{
Header, OpenAPI, Operation, Parameter, ReferenceOr, RequestBody, Response, Schema, SchemaKind,
Type,
};
use serde_json::{json, Value};
pub struct OpenApiHelp {
pub openapi: OpenAPI,
}
pub trait GetItem<R> {
fn get_item(&self, openapi: &OpenAPI) -> Option<R>;
}
impl GetItem<Schema> for ReferenceOr<Schema> {
fn get_item(&self, openapi: &OpenAPI) -> Option<Schema> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_schema_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(s.clone()),
};
}
}
impl GetItem<Schema> for ReferenceOr<Box<Schema>> {
fn get_item(&self, openapi: &OpenAPI) -> Option<Schema> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_schema_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(*s.clone()),
};
}
}
impl GetItem<Parameter> for ReferenceOr<Parameter> {
fn get_item(&self, openapi: &OpenAPI) -> Option<Parameter> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_parameter_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(s.clone()),
};
}
}
impl GetItem<Parameter> for ReferenceOr<Box<Parameter>> {
fn get_item(&self, openapi: &OpenAPI) -> Option<Parameter> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_parameter_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(*s.clone()),
};
}
}
impl GetItem<RequestBody> for ReferenceOr<RequestBody> {
fn get_item(&self, openapi: &OpenAPI) -> Option<RequestBody> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_request_body_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(s.clone()),
};
}
}
impl GetItem<RequestBody> for ReferenceOr<Box<RequestBody>> {
fn get_item(&self, openapi: &OpenAPI) -> Option<RequestBody> {
return match self {
ReferenceOr::Reference { reference } => {
OpenApiHelp::get_request_body_with_ref(openapi, reference.clone())
}
ReferenceOr::Item(s) => Some(*s.clone()),
};
}
}
impl OpenApiHelp {
fn get_schema_with_ref(openapi: &OpenAPI, ref_name: String) -> Option<Schema> {
return match openapi.components.clone() {
None => None,
Some(c) => {
let s_name = ref_name.trim_start_matches("#/components/schemas/");
let find = c.schemas.get(s_name);
match find {
None => None,
Some(rs) => rs.clone().into_item(),
}
}
};
}
fn get_response_with_ref(openapi: &OpenAPI, ref_name: String) -> Option<Response> {
return match openapi.components.clone() {
None => None,
Some(c) => {
let s_name = ref_name.trim_start_matches("#/components/responses/");
let find = c.responses.get(s_name);
match find {
None => None,
Some(rs) => rs.clone().into_item(),
}
}
};
}
fn get_parameter_with_ref(openapi: &OpenAPI, ref_name: String) -> Option<Parameter> {
return match openapi.components.clone() {
None => None,
Some(c) => {
let s_name = ref_name.trim_start_matches("#/components/Parameters/");
let find = c.parameters.get(s_name);
match find {
None => None,
Some(rs) => rs.clone().into_item(),
}
}
};
}
fn get_request_body_with_ref(openapi: &OpenAPI, ref_name: String) -> Option<RequestBody> {
return match openapi.components.clone() {
None => None,
Some(c) => {
let s_name = ref_name.trim_start_matches("#/components/request_bodies/");
let find = c.request_bodies.get(s_name);
match find {
None => None,
Some(rs) => rs.clone().into_item(),
}
}
};
}
fn get_header_with_ref(openapi: &OpenAPI, ref_name: String) -> Option<Header> {
return match openapi.components.clone() {
None => None,
Some(c) => {
let s_name = ref_name.trim_start_matches("#/components/headers/");
let find = c.headers.get(s_name);
match find {
None => None,
Some(rs) => rs.clone().into_item(),
}
}
};
}
}
impl OpenApiHelp {
pub fn get_operation(&self, operation_id: String) -> Option<Operation> {
for (_, path_item) in self.openapi.paths.iter() {
if let Some(item) = path_item.as_item() {
let mut ops: Vec<Option<Operation>> = vec![];
ops.push(item.options.clone());
ops.push(item.get.clone());
ops.push(item.post.clone());
ops.push(item.put.clone());
ops.push(item.delete.clone());
ops.push(item.patch.clone());
ops.push(item.head.clone());
ops.push(item.trace.clone());
for op in ops.iter() {
if let Some(options) = op {
if let Some(op_id) = options.operation_id.clone() {
if op_id == operation_id {
return Some(options.clone());
}
}
}
}
}
}
return None;
}
pub fn gen_openapi_schema(&self, operation_id: String) -> Option<Value> {
if let Some(options) = self.get_operation(operation_id.clone()) {
if let Some(op_id) = options.operation_id.clone() {
if op_id == operation_id {
if let Some(request_body) = options.request_body.clone() {
if let Some(rb) = request_body.get_item(&self.openapi) {
if let Some(mt) = rb.content.get("application/json") {
if let Some(rs) = &mt.schema {
if let Some(s) = rs.get_item(&self.openapi) {
return self.gen_schema(s);
}
}
}
}
}
}
}
}
return None;
}
pub fn gen_schema(&self, s: Schema) -> Option<Value> {
return match s.schema_kind.clone() {
SchemaKind::Type(t) => match t {
Type::Object(ot) => {
let mut json_tree = json!({});
for (name, rs) in ot.properties.iter() {
if let Some(s) = rs.get_item(&self.openapi) {
let json_child = self.gen_schema(s);
match json_child {
None => {}
Some(child) => {
json_tree
.as_object_mut()
.unwrap()
.insert(name.clone(), child);
}
}
}
}
Some(json_tree)
}
Type::Array(at) => {
let mut json_tree = json!([]);
match at.items {
None => {}
Some(rs) => {
if let Some(s) = rs.get_item(&self.openapi) {
let json_child = self.gen_schema(s);
match json_child {
None => {}
Some(child) => json_tree.as_array_mut().unwrap().push(child),
}
}
}
}
Some(json_tree)
}
Type::String(_) => Some(json!("string")),
Type::Number(_) => Some(json!(10000)),
Type::Integer(_) => Some(json!(10)),
Type::Boolean(_) => Some(json!(true)),
},
_ => None,
};
}
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/utils/mod.rs | crates/netpurr/src/utils/mod.rs | use std::cmp::min;
use std::collections::HashSet;
use chrono::format;
use eframe::emath::{Align, Pos2};
use eframe::epaint::text::LayoutJob;
use egui::{
Area, CollapsingHeader, CollapsingResponse, Color32, FontSelection, Frame, Id, InnerResponse,
Key, Layout, Order, Response, RichText, Style, TextBuffer, Ui, WidgetText,
};
use egui::text::TextWrapping;
use netpurr_core::data::record::Record;
use crate::panels::HORIZONTAL_GAP;
pub mod openapi_help;
pub fn build_rest_ui_header(record: Record, max_char: Option<usize>, ui: &Ui) -> LayoutJob {
let mut lb = LayoutJob {
text: Default::default(),
sections: Default::default(),
wrap: TextWrapping {
max_width: 50.0,
max_rows: 1,
break_anywhere: true,
overflow_character: Some('…'),
},
first_row_min_height: 0.0,
break_on_newline: false,
halign: Align::LEFT,
justify: false,
};
let style = Style::default();
RichText::new(format!("{} ", egui_phosphor::regular::FILE_TEXT)).append_to(&mut lb, &style, FontSelection::Default, Align::Center);
if record.base_url() != "" {
RichText::new(record.method() + " ")
.color(ui.visuals().warn_fg_color)
.strong()
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
let mut new_name = "".to_string();
if record.name() != "" {
new_name = record.name();
} else {
new_name = record.base_url();
}
match max_char {
None => {}
Some(size) => {
if new_name.len() > size {
let len = min(new_name.chars().count() - 1, size);
new_name = new_name.chars().take(len).collect::<String>() + "...";
}
}
}
RichText::new(new_name)
.color(ui.visuals().text_color())
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
} else {
RichText::new("Untitled Request")
.strong()
.color(ui.visuals().text_color())
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
}
lb
}
pub enum HighlightValue {
None,
Has,
Usize(usize),
String(String, Color32),
}
pub fn build_with_count_ui_header(
name: String,
highlight_value: HighlightValue,
ui: &Ui,
) -> LayoutJob {
let mut lb = LayoutJob::default();
let mut color = Color32::GREEN;
let style = Style::default();
RichText::new(name + " ")
.color(ui.visuals().text_color())
.strong()
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
match highlight_value {
HighlightValue::Has => {
RichText::new("●").color(color.clone()).strong().append_to(
&mut lb,
&style,
FontSelection::Default,
Align::Center,
);
}
HighlightValue::Usize(value) => {
RichText::new("(".to_string() + value.to_string().as_str() + ")")
.color(color.clone())
.strong()
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
}
HighlightValue::String(value, self_color) => {
RichText::new("(".to_string() + value.as_str() + ")")
.color(self_color)
.strong()
.append_to(&mut lb, &style, FontSelection::Default, Align::Center);
}
HighlightValue::None => {}
}
lb
}
pub fn left_right_panel(
ui: &mut Ui,
id: String,
left: impl FnOnce(&mut Ui),
right: impl FnOnce(&mut Ui),
) -> InnerResponse<()> {
let left_id = id.clone() + "_left";
let right_id = id.clone() + "_right";
ui.horizontal(|ui| {
egui::SidePanel::right(right_id)
.resizable(true)
.show_separator_line(false)
.show_inside(ui, |ui| {
right(ui);
});
egui::SidePanel::left(left_id)
.resizable(true)
.min_width(ui.available_width() - HORIZONTAL_GAP * 2.0)
.show_inside(ui, |ui| {
left(ui);
});
})
}
pub fn popup_widget<R>(
ui: &Ui,
popup_id: Id,
widget_response: &Response,
suggested_position: Pos2,
add_contents: impl FnOnce(&mut Ui) -> R,
) -> Option<R> {
if ui.memory(|mem| mem.is_popup_open(popup_id)) {
let inner = Area::new(popup_id)
.order(Order::Foreground)
.constrain(true)
.fixed_pos(suggested_position)
.show(ui.ctx(), |ui| {
let frame = Frame::popup(ui.style());
frame
.show(ui, |ui| {
ui.with_layout(Layout::left_to_right(Align::LEFT), |ui| add_contents(ui))
.inner
})
.inner
})
.inner;
if ui.input(|i| i.key_pressed(Key::Escape)) || widget_response.clicked_elsewhere() {
ui.memory_mut(|mem| mem.close_popup());
}
Some(inner)
} else {
None
}
}
pub fn select_label(ui: &mut Ui, text: impl Into<WidgetText>) -> Response {
ui.with_layout(
Layout::top_down(Align::LEFT).with_cross_justify(true),
|ui| ui.selectable_label(false, text),
)
.inner
}
pub fn select_value<Value: PartialEq>(
ui: &mut Ui,
current_value: &mut Value,
selected_value: Value,
text: impl Into<WidgetText>,
) -> Response {
ui.with_layout(
Layout::top_down(Align::LEFT).with_cross_justify(true),
|ui| ui.selectable_value(current_value, selected_value, text),
)
.inner
}
pub fn text_edit_singleline_justify<S: TextBuffer>(ui: &mut Ui, text: &mut S) -> Response {
ui.with_layout(
Layout::top_down(Align::LEFT).with_cross_justify(true),
|ui| ui.text_edit_singleline(text),
)
.inner
}
pub fn text_edit_singleline_filter_justify<S: TextBuffer>(ui: &mut Ui, text: &mut S) -> Response {
text.replace_with(
text.as_str()
.replace("/", "_")
.as_str()
.replace(" ", "_")
.as_str(),
);
let filtered_string: String = text
.as_str()
.chars()
.filter(|&c| c.is_ascii_alphabetic() || c.is_alphabetic() || c.is_numeric() || c == '_')
.collect();
text.replace_with(filtered_string.as_str());
ui.with_layout(
Layout::top_down(Align::LEFT).with_cross_justify(true),
|ui| ui.text_edit_singleline(text),
)
.inner
}
pub fn text_edit_singleline_filter<S: TextBuffer>(ui: &mut Ui, text: &mut S) -> Response {
text.replace_with(
text.as_str()
.replace("/", "_")
.as_str()
.replace(" ", "_")
.as_str(),
);
let filtered_string: String = text
.as_str()
.chars()
.filter(|&c| c.is_ascii_alphabetic() || c.is_alphabetic() || c.is_numeric() || c == '_')
.collect();
text.replace_with(filtered_string.as_str());
ui.text_edit_singleline(text)
}
pub fn text_edit_multiline_justify<S: TextBuffer>(ui: &mut Ui, text: &mut S) -> Response {
ui.with_layout(
Layout::top_down(Align::LEFT).with_cross_justify(true),
|ui| ui.text_edit_multiline(text),
)
.inner
}
pub fn build_copy_name(mut name: String, names: HashSet<String>) -> String {
name = name
.splitn(2, "Copy")
.next()
.unwrap_or_default()
.trim()
.to_string();
let mut index = 2;
let mut new_name = name.clone();
while (names.contains(new_name.as_str())) {
new_name = format!("{} Copy {}", name.clone(), index);
index += 1;
}
return new_name;
}
pub fn selectable_check<Value: PartialEq>(
ui: &mut Ui,
current_value: &mut Value,
selected_value: Value,
text: impl Into<WidgetText>,
) -> Response {
let mut response = ui.checkbox(&mut (*current_value == selected_value), text);
if response.clicked() && *current_value != selected_value {
*current_value = selected_value;
response.mark_changed();
}
response
}
pub fn add_right_space(ui: &mut Ui, space: f32) {
let space = ui.available_width() - space;
if space > 0.0 {
ui.add_space(space)
}
}
pub fn add_left_space(ui: &mut Ui, space: f32) {
let space = space;
if space > 0.0 {
ui.add_space(space)
}
}
pub fn open_collapsing<R>(
ui: &mut Ui,
heading: impl Into<WidgetText>,
add_contents: impl FnOnce(&mut Ui) -> R,
) -> CollapsingResponse<R> {
CollapsingHeader::new(heading)
.default_open(true)
.show(ui, add_contents)
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
tmtbe/netpurr | https://github.com/tmtbe/netpurr/blob/922e0e29e2a4685249fafe7eba87bb4fffe7d72e/crates/netpurr/src/widgets/matrix_label.rs | crates/netpurr/src/widgets/matrix_label.rs | use std::fs::File;
use std::io::Write;
use egui::{Direction, Label, Layout, RichText, Sense, Ui, Vec2, Widget};
use netpurr_core::data::workspace_data::WorkspaceData;
use crate::data::config_data::ConfigData;
use crate::data::export::{Export, ExportType};
use crate::operation::operation::Operation;
use crate::panels::{HORIZONTAL_GAP, VERTICAL_GAP};
use crate::utils;
use crate::widgets::empty_container::EmptyContainer;
use crate::windows::new_collection_windows::NewCollectionWindows;
pub struct MatrixLabel {
matrix_label_type: MatrixLabelType,
}
pub enum MatrixLabelType {
Collection(String),
Add,
}
impl MatrixLabel {
pub fn new(matrix_label_type: MatrixLabelType) -> Self {
MatrixLabel { matrix_label_type }
}
pub fn render(
self,
ui: &mut Ui,
workspace_data: &mut WorkspaceData,
config_data: &mut ConfigData,
operation: &Operation,
) {
let size = Vec2 { x: 150.0, y: 150.0 };
EmptyContainer::default()
.with_stroke(true)
.default_size(size)
.show(ui, |ui| {
let max = ui.max_rect();
let response = ui.allocate_rect(max, Sense::click());
let mut content_ui = ui.child_ui(max, *ui.layout());
match &self.matrix_label_type {
MatrixLabelType::Collection(collection_name) => {
content_ui.vertical(|ui| {
ui.add_space(VERTICAL_GAP);
ui.horizontal(|ui| {
ui.add_space(HORIZONTAL_GAP);
Label::new(RichText::from("Collection").strong())
.selectable(false)
.ui(ui);
utils::add_right_space(ui, 25.0);
ui.menu_button("...", |ui| {
self.more_button(
workspace_data,
operation,
collection_name.clone(),
ui,
);
});
});
ui.horizontal(|ui| {
ui.add_space(HORIZONTAL_GAP);
Label::new(RichText::from(collection_name.clone()).heading())
.selectable(false)
.ui(ui);
});
});
if response.clicked() {
workspace_data.selected_test_item=None;
config_data.set_select_collection(Some(collection_name.clone()));
}
}
MatrixLabelType::Add => {
content_ui.with_layout(
Layout::centered_and_justified(Direction::TopDown),
|ui| {
Label::new(RichText::from("+").heading())
.selectable(false)
.ui(ui);
},
);
if response.clicked() {
operation.add_window(Box::new(
NewCollectionWindows::default().with_open_collection(None),
));
}
}
}
})
}
fn more_button(
&self,
workspace_data: &mut WorkspaceData,
operation: &Operation,
collection_name: String,
ui: &mut Ui,
) {
workspace_data
.get_collection_by_name(collection_name.clone())
.map(|collection| {
if utils::select_label(ui, "Edit").clicked() {
operation.add_window(Box::new(
NewCollectionWindows::default()
.with_open_collection(Some(collection.clone())),
));
ui.close_menu();
}
if utils::select_label(ui, "Duplicate").clicked() {
let new_name = utils::build_copy_name(
collection_name.clone(),
workspace_data.get_collection_names(),
);
let new_collections = collection.duplicate(new_name);
workspace_data.add_collection(new_collections);
ui.close_menu();
}
if utils::select_label(ui, "Remove").clicked() {
workspace_data.remove_collection(collection.folder.borrow().name.clone());
ui.close_menu();
}
ui.separator();
if utils::select_label(ui, "Export").clicked() {
ui.close_menu();
let export = Export {
openapi: None,
info: None,
export_type: ExportType::Collection,
collection: Some(collection.clone()),
};
if let Ok(json) = serde_json::to_string(&export) {
let file_name =
format!("collection-{}.json", collection.folder.borrow().name);
if let Some(path) =
rfd::FileDialog::new().set_file_name(file_name).save_file()
{
match File::create(path) {
Ok(mut file) => match file.write_all(json.as_bytes()) {
Ok(_) => {
operation.add_success_toast("Export collection success.");
}
Err(e) => {
operation.add_error_toast(format!(
"Export collection file failed: {}",
e.to_string()
));
}
},
Err(e) => {
operation.add_error_toast(format!(
"Export collection file failed: {}",
e.to_string()
));
}
}
}
}
}
});
}
}
| rust | Apache-2.0 | 922e0e29e2a4685249fafe7eba87bb4fffe7d72e | 2026-01-04T20:20:05.778379Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.