repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/main.rs | src/main.rs | use std::env;
use pakhi::start_pakhi;
use pakhi::common::io::{RealIO, IO};
fn main() {
let main_module_path = get_main_module_path();
match main_module_path {
Ok(path) => {
let mut io = RealIO::new();
if let Err(err) = start_pakhi(path, &mut io) {
io.panic(err);
}
},
Err(e) => eprintln!("Err: {}", e),
}
}
fn get_main_module_path() -> Result<String, &'static str> {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
Err("Needs src filename.")
} else if args.len() > 2 {
Err("Only one filename required.")
} else {
if args[1].ends_with(".pakhi") {
Ok(args[1].clone())
} else {
Err("Source file must have .pakhi extension.")
}
}
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/backend/built_ins.rs | src/backend/built_ins.rs | use std::collections::HashMap;
use std::path::Path;
use crate::backend::interpreter::DataType;
// Contains all built-in function and constant names
pub struct BuiltInFunctionList {
built_in_functions: HashMap<Vec<char>, String>,
}
impl BuiltInFunctionList {
pub(crate) fn new() -> Self {
let mut functions_map: HashMap<Vec<char>, String> = HashMap::new();
// this functions are built-in
let function_list = vec!["_স্ট্রিং", "_সংখ্যা", "_লিস্ট-পুশ", "_লিস্ট-পপ", "_লিস্ট-লেন", "_রিড-লাইন", "_এরর",
"_স্ট্রিং-স্প্লিট", "_স্ট্রিং-জয়েন", "_টাইপ", "_রিড-ফাইল", "_রাইট-ফাইল", "_ডিলিট-ফাইল",
"_নতুন-ডাইরেক্টরি", "_রিড-ডাইরেক্টরি", "_ডিলিট-ডাইরেক্টরি", "_ফাইল-নাকি-ডাইরেক্টরি"];
for f_name in function_list {
functions_map.insert(f_name.chars().collect(), f_name.to_string());
}
BuiltInFunctionList {
built_in_functions: functions_map,
}
}
pub(crate) fn is_built_in(&self, function_name: &Vec<char>) -> bool {
if self.built_in_functions.contains_key(function_name) {
true
} else { false }
}
pub(crate) fn get_name(&self, function_name: &Vec<char>) -> String {
self.built_in_functions.get(function_name).unwrap().clone()
}
// Converts DataType::Num to DataType::String
pub(crate) fn _to_string(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let number = arguments[0].clone();
if let DataType::Num(n) = number {
let bn_num_string = BuiltInFunctionList::replace_en_with_bn_digit(n.to_string());
return Ok(DataType::String(bn_num_string));
} else {
return Err("Datatype must be Number for converting to string".to_string());
}
} else { return Err("Function requires one arguments".to_string()); }
}
// Converts DataType::String to DataType::Num
pub(crate) fn _to_num(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let string = arguments[0].clone();
if let DataType::String(bangla_num_string) = string {
let eng_num_string = BuiltInFunctionList::replace_bn_with_en_digit(bangla_num_string);
let convert_result = eng_num_string.parse::<f64>();
match convert_result {
Ok(n) => return Ok(DataType::Num(n)),
Err(e) => return Err(format!("{:?}", e)),
}
} else {
return Err(format!("Datatype must be Number for converting to string"));
}
} else { return Err(format!("Function requires one arguments")); }
}
fn replace_bn_with_en_digit(bn_num_string: String) -> String {
let mut num_chars: Vec<char> = bn_num_string.chars().collect();
for (i, c) in num_chars.clone().iter().enumerate() {
num_chars[i] = BuiltInFunctionList::bn_digit_to_en_digit(c);
}
let num_string: String = num_chars.iter().collect();
num_string
}
fn replace_en_with_bn_digit(en_num_string: String) -> String {
let mut num_chars: Vec<char> = en_num_string.chars().collect();
for (i, c) in num_chars.clone().iter().enumerate() {
num_chars[i] = BuiltInFunctionList::en_digit_to_bn_digit(c);
}
let num_string: String = num_chars.iter().collect();
num_string
}
fn bn_digit_to_en_digit(digit: &char) -> char {
match digit {
'০' => '0',
'১' => '1',
'২' => '2',
'৩' => '3',
'৪' => '4',
'৫' => '5',
'৬' => '6',
'৭' => '7',
'৮' => '8',
'৯' => '9',
_ => digit.clone(),
}
}
fn en_digit_to_bn_digit(digit: &char) -> char {
match digit {
'0' => '০',
'1' => '১',
'2' => '২',
'3' => '৩',
'4' => '৪',
'5' => '৫',
'6' => '৬',
'7' => '৭',
'8' => '৮',
'9' => '৯',
_ => digit.clone(),
}
}
pub(crate) fn _list_push(arguments: Vec<DataType>, lists: &mut Vec<Vec<DataType>>) -> Result<DataType, String> {
if arguments.len() == 2 {
let list = arguments[0].clone();
if let DataType::List(index) = list {
let push_value = arguments[1].clone();
let actual_list = lists.get_mut(index).unwrap();
actual_list.push(push_value);
} else {
return Err(format!("Datatype must be array to push value"));
}
} else if arguments.len() == 3 {
let list = arguments[0].clone();
if let DataType::List(index) = list {
let push_at = arguments[1].clone();
let push_value = arguments[2].clone();
let actual_list = lists.get_mut(index).unwrap();
if let DataType::Num(push_at_i_f) = push_at {
let push_at_u = push_at_i_f as usize;
actual_list.insert(push_at_u, push_value);
} else { return Err(format!("Index must evaluate to number type")); }
} else { return Err(format!("Datatype must be array to push value")); }
} else { return Err(format!("Function requires two arguments")); }
return Ok(DataType::Nil);
}
pub(crate) fn _list_pop(arguments: Vec<DataType>, lists: &mut Vec<Vec<DataType>>) -> Result<DataType, String> {
if arguments.len() == 1 {
let list = arguments[0].clone();
if let DataType::List(index) = list {
let actual_list = lists.get_mut(index).unwrap();
actual_list.pop();
} else { return Err(format!("Datatype must be array to push value")); }
} else if arguments.len() == 2 {
let list = arguments[0].clone();
if let DataType::List(index) = list {
let pop_at = arguments[1].clone();
let actual_list = lists.get_mut(index).unwrap();
if let DataType::Num(pop_at_i_f) = pop_at {
let pop_at_i = pop_at_i_f as usize;
actual_list.remove(pop_at_i);
}
} else { return Err(format!("Datatype must be array to push value")); }
} else { return Err(format!("Function requires one argument")); }
return Ok(DataType::Nil);
}
pub(crate) fn _list_len(arguments: Vec<DataType>, lists: &mut Vec<Vec<DataType>>) -> Result<DataType, String> {
if arguments.len() == 1 {
let list = arguments[0].clone();
if let DataType::List(index) = list {
let actual_list = lists.get_mut(index).unwrap();
let length = actual_list.len();
return Ok(DataType::Num(length as f64));
} else { return Err(format!("Datatype must be list to get length")); }
} else { return Err(format!("Function requires one argument")); }
}
pub(crate) fn _read_line(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 0 {
let mut input = String::new();
match std::io::stdin().read_line(&mut input) {
Ok(_) => return Ok(DataType::String(input.trim_end().into())),
Err(e) => return Err(format!("{}", e)),
}
} else { return Err(format!("Function requires zero argument")); }
}
pub(crate) fn _error(arguments: Vec<DataType>) -> Result<String, String> {
if arguments.len() == 1 {
let error = arguments[0].clone();
match error {
DataType::String(err_message) => return Ok(err_message),
_ => return Err(format!("_এরর() functions arguments must be string")),
}
} else {
return Err(format!("_এরর() function expects one argument"));
}
}
pub(crate) fn _string_split(arguments: Vec<DataType>, lists: &mut Vec<Vec<DataType>>) -> Result<DataType, String> {
if arguments.len() == 2 {
let string = arguments[0].clone();
let split_by = arguments[1].clone();
match (string, split_by) {
(DataType::String(string), DataType::String(split_by)) => {
let mut splitted_string: Vec<&str> = string.split(&split_by).collect();
// For some reason split with "" causes splits to have "" at benginning and end
// Thats why removes character at start finish
if splitted_string[0] == "" && splitted_string[splitted_string.len() - 1] == "" {
splitted_string.remove(0);
splitted_string.remove(splitted_string.len() - 1);
}
let splitted_string: Vec<DataType> = splitted_string.iter()
.map(|s| DataType::String(String::from(s.clone()))).collect();
lists.push(splitted_string);
return Ok(DataType::List(lists.len() - 1));
},
_ => return Err(format!("_স্ট্রিং-স্প্লিট()); functions arguments must be string")),
}
} else {
return Err(format!("_স্ট্রিং-স্প্লিট()); function expects two argument"));
}
}
pub(crate) fn _string_join(arguments: Vec<DataType>, lists: &mut Vec<Vec<DataType>>) -> Result<DataType, String> {
if arguments.len() == 2 {
let list_of_strings = arguments[0].clone();
let join_by = arguments[1].clone();
match (list_of_strings, join_by) {
(DataType::List(list_index), DataType::String(join_by)) => {
let string_list = lists.get(list_index).unwrap();
let mut strings: Vec<String> = Vec::new();
for string in string_list {
if let DataType::String(string) = string.clone() {
strings.push(string);
} else { return Err(format!("_স্ট্রিং-জয়েন()); functions only accepts list of strings")); }
}
let joined_string = strings.join(&join_by);
return Ok(DataType::String(joined_string));
},
_ => return Err(format!("_স্ট্রিং-জয়েন()); functions arguments must be list and string")),
}
} else {
return Err(format!("_স্ট্রিং-জয়েন()); function expects two argument"));
}
}
pub(crate) fn _type(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let data = arguments[0].clone();
let d = match data {
DataType::Num(_) => DataType::String(String::from("_সংখ্যা")),
DataType::Bool(_) => DataType::String(String::from("_বুলিয়ান")),
DataType::String(_) => DataType::String(String::from("_স্ট্রিং")),
DataType::List(_) => DataType::String(String::from("_লিস্ট")),
DataType::NamelessRecord(_) => DataType::String(String::from("_রেকর্ড")),
DataType::Function(_) => DataType::String(String::from("_ফাং")),
DataType::Nil => DataType::String(String::from("_শূন্য")),
};
return Ok(d);
} else {
return Err(format!("_টাইপ() function expects one argument"));
}
}
pub(crate) fn _read_file(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let read_result = std::fs::read_to_string(path);
match read_result {
Ok(content) => Ok(DataType::String(content)),
Err(e) => return Err(format!("_রিড-ফাইল());: {}", e.to_string())),
}
},
_ => return Err(format!("_রিড-ফাইল());function's path argument must be of type string")),
}
} else {
return Err(format!("_রিড-ফাইল() function expects one argument"));
}
}
pub(crate) fn _write_file(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 2 {
let path_data = arguments[0].clone();
let content_data = arguments[1].clone();
match (path_data, content_data) {
(DataType::String(p), DataType::String(content)) => {
let path = Path::new(&p);
let write_result = std::fs::write(path, content);
match write_result {
Ok(_) => return Ok(DataType::Bool(true)),
Err(e) => return Err(format!("_রাইট-ফাইল(): {}", e.to_string())),
}
},
_ => return Err(format!("_রাইট-ফাইল() function's both argument must be of type string")),
}
} else {
return Err(format!("_রাইট-ফাইল() function expects two argument"));
}
}
pub(crate) fn _delete_file(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let delete_result = std::fs::remove_file(path);
match delete_result {
Ok(_) => Ok(DataType::Bool(true)),
Err(e) => return Err(format!("_ডিলিট-ফাইল(): {}", e.to_string())),
}
},
_ => return Err(format!("_ডিলিট-ফাইল() function's argument must be of type string")),
}
} else {
return Err(format!("_ডিলিট-ফাইল() function expects one argument"));
}
}
pub(crate) fn _create_dir(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let create_dir_result = std::fs::create_dir_all(path);
match create_dir_result {
Ok(_) => return Ok(DataType::Bool(true)),
Err(e) => return Err(format!("_ক্রিয়েট-ডাইরেক্টরি(): {}", e.to_string())),
}
},
_ => return Err(format!("_ক্রিয়েট-ডাইরেক্টরি() function's argument must be of type string")),
}
} else {
return Err(format!("_ক্রিয়েট-ডাইরেক্টরি() function expects one argument"));
}
}
pub(crate) fn _read_dir(arguments: Vec<DataType>) -> Result<Vec<String>, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let read_dir_result = std::fs::read_dir(path);
match read_dir_result {
Ok(paths) => {
let mut all_files_dirs: Vec<String> = Vec::new();
for path in paths {
let file_dir_name = path.unwrap().file_name().to_str().unwrap().to_string();
all_files_dirs.push(file_dir_name);
}
return Ok(all_files_dirs);
},
Err(e) => return Err(format!("_রিড-ডাইরেক্টরি(): {}, path: {}", e.to_string(), path.display())),
}
},
_ => return Err(format!("_রিড-ডাইরেক্টরি() function's argument must be of type string")),
}
} else {
return Err(format!("_রিড-ডাইরেক্টরি() function expects one argument"));
}
}
pub(crate) fn _delete_dir(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let delete_result = std::fs::remove_dir_all(path);
match delete_result {
Ok(_) => return Ok(DataType::Bool(true)),
Err(e) => return Err(format!("_ডিলিট-ডাইরেক্টরি(): {}", e.to_string()))
}
},
_ => return Err(format!("_ডিলিট-ডাইরেক্টরি() function's argument must be of type string")),
}
} else {
return Err(format!("_ডিলিট-ডাইরেক্টরি() function expects one argument"));
}
}
pub(crate) fn _file_or_dir(arguments: Vec<DataType>) -> Result<DataType, String> {
if arguments.len() == 1 {
let path_data = arguments[0].clone();
match path_data {
DataType::String(p) => {
let path = Path::new(&p);
let result = std::fs::metadata(path);
match result {
Ok(m) => {
match m.is_file() {
true => return Ok(DataType::String("ফাইল".to_string())),
false => return Ok(DataType::String("ডাইরেক্টরি".to_string())),
}
},
Err(e) => return Err(format!("_ফাইল-নাকি-ডাইরেক্টরি(): {}, path: {}", e.to_string(), path.display())),
}
},
_ => return Err(format!("_ফাইল-নাকি-ডাইরেক্টরি() function's argument must be of type string")),
}
} else {
return Err(format!("_ফাইল-নাকি-ডাইরেক্টরি() function expects one argument"));
}
}
}
| rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/backend/mark_sweep.rs | src/backend/mark_sweep.rs | use std::collections::HashMap;
use crate::backend::interpreter::DataType;
// Implementation of a mark-sweep garbage collector
pub(crate) struct GC<'a> {
envs: &'a mut Vec<HashMap<String, Option<DataType>>>,
lists: &'a mut Vec<Vec<DataType>>,
free_lists: &'a mut Vec<usize>,
nameless_records: &'a mut Vec<HashMap<String, DataType>>,
free_nameless_records: &'a mut Vec<usize>,
}
impl<'a> GC<'a> {
pub(crate) fn new(envs: &'a mut Vec<HashMap<String, Option<DataType>>>,
lists: &'a mut Vec<Vec<DataType>>,
free_lists: &'a mut Vec<usize>,
nameless_records: &'a mut Vec<HashMap<String, DataType>>,
free_nameless_records: &'a mut Vec<usize>,) -> Self
{
GC {
envs,
lists,
free_lists,
nameless_records,
free_nameless_records,
}
}
pub(crate) fn collect_garbage(&mut self) {
let (marked_lists, marked_nameless_records) = self.gc_mark();
self.gc_sweep(marked_lists, marked_nameless_records);
}
fn gc_sweep(&mut self, marked_lists: Vec<bool>, marked_record: Vec<bool>) {
for (index, alive) in marked_lists.iter().enumerate() {
if !alive {
// replacing list with empty list, which will be re_used later
self.lists[index] = Vec::new();
if !self.free_lists.contains(&index) {
self.free_lists.push(index);
}
}
}
for (index, alive) in marked_record.iter().enumerate() {
if !alive {
// replacing record with empty record, which will be re_used later
self.nameless_records[index] = HashMap::new();
if !self.free_nameless_records.contains(&index) {
self.free_nameless_records.push(index);
}
}
}
}
fn gc_mark(&mut self) -> (Vec<bool>, Vec<bool>) {
let mut marked_lists: Vec<bool> = vec![false; self.lists.len()];
let mut marked_records: Vec<bool> = vec![false; self.nameless_records.len()];
let (root_lists, root_records) = self.find_root_objects();
for root_list_index in root_lists {
marked_lists[root_list_index] = true;
let list = self.lists.get(root_list_index).unwrap();
self.mark_all_reachable_from_list(list, &mut marked_lists, &mut marked_records);
}
for root_record_index in root_records {
marked_records[root_record_index] = true;
let record = self.nameless_records.get(root_record_index).unwrap();
self.mark_all_reachable_from_record(record, &mut marked_lists, &mut marked_records);
}
(marked_lists, marked_records)
}
fn mark_all_reachable_from_list(&self, list: &Vec<DataType>, marked_lists: &mut Vec<bool>, marked_records: &mut Vec<bool>) {
for elem in list {
match elem {
DataType::List(index) => {
// If already marked true don't need to revisit
if !marked_lists[index.clone()] {
marked_lists[index.clone()] = true;
let list = self.lists.get(index.clone()).unwrap();
self.mark_all_reachable_from_list(list, marked_lists, marked_records);
}
},
DataType::NamelessRecord(index) => {
// If already marked true don't need to revisit
if !marked_records[index.clone()] {
marked_records[index.clone()] = true;
let record = self.nameless_records.get(index.clone()).unwrap();
self.mark_all_reachable_from_record(record, marked_lists, marked_records);
}
},
_ => {}
}
}
}
fn mark_all_reachable_from_record(&self, record: &HashMap<String, DataType>, marked_lists: &mut Vec<bool>, marked_records: &mut Vec<bool>) {
for (_, elem) in record.into_iter() {
match elem {
DataType::List(index) => {
// If already marked true don't need to revisit
if !marked_lists[index.clone()] {
marked_lists[index.clone()] = true;
let list = self.lists.get(index.clone()).unwrap();
self.mark_all_reachable_from_list(list, marked_lists, marked_records);
}
},
DataType::NamelessRecord(index) => {
// If already marked true don't need to revisit
if !marked_records[index.clone()] {
marked_records[index.clone()] = true;
let record = self.nameless_records.get(index.clone()).unwrap();
self.mark_all_reachable_from_record(record, marked_lists, marked_records);
}
},
_ => {}
}
}
}
fn find_root_objects(&self) -> (Vec<usize>, Vec<usize>) {
let mut root_lists: Vec<usize> = Vec::new();
let mut root_records: Vec<usize> = Vec::new();
for env in self.envs.iter() {
for (_, val) in env.into_iter() {
if let Some(data_type) = val {
match data_type {
DataType::List(index) => root_lists.push(index.clone()),
DataType::NamelessRecord(index) => root_records.push(index.clone()),
_ => {}
}
}
}
}
(root_lists, root_records)
}
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/backend/mod.rs | src/backend/mod.rs | pub mod interpreter;
pub mod built_ins;
mod mark_sweep; | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/backend/interpreter.rs | src/backend/interpreter.rs | use std::collections::HashMap;
use crate::common::io::{IO, RealIO};
use crate::frontend::parser;
use crate::frontend::lexer::{TokenKind, Token};
use crate::backend::built_ins::BuiltInFunctionList;
use crate::backend::mark_sweep;
use crate::common::pakhi_error::PakhiErr;
use std::iter::FromIterator;
use crate::common::pakhi_error::PakhiErr::{RuntimeError, TypeError};
enum Index {
List(usize),
NamelessRecord(String),
}
#[derive(Debug, Clone, PartialEq, PartialOrd)]
pub enum DataType {
Num(f64),
Bool(bool),
String(String),
// Array variant of DataType enum only stores the index of the actual array from arrays
// field in Interpreter, so multiple array reference implementation is easy.
List(usize),
NamelessRecord(usize),
Function(Func),
Nil,
}
#[derive(Debug, Clone, PartialEq, PartialOrd)]
pub struct Func {
starting_statement: usize,
args: Vec<String>,
}
#[derive(Debug)]
struct LoopEnv {
start: usize,
// this is needed to destroy envs created inside loop when using continue or break
total_envs_at_loop_creation: usize,
}
pub struct Interpreter<'a, T: IO> {
current: usize,
statements: Vec<parser::Stmt>,
loops: Vec<LoopEnv>,
return_addrs: Vec<usize>,
scopes: Vec<HashMap<String, Option<DataType>>>,
previous_if_was_executed: Vec<bool>,
lists: Vec<Vec<DataType>>,
// free list tracks which list indexes are free to be re-used for allocating as list datatype
free_lists: Vec<usize>,
nameless_records: Vec<HashMap<String, DataType>>,
// free records tracks which record indexes are free to be re-used for allocating as record datatype
free_nameless_records: Vec<usize>,
// This is used as parameter of gc to decide if it's time to collect garbage
total_allocated_object_count: usize,
io: &'a mut T,
// Storing all built-in function names because when modules identifiers are renamed
// we don't want to rename built-in functions
built_in_functions: BuiltInFunctionList,
}
impl<'a, T: 'a + IO> Interpreter<'a, T> {
pub fn new(statements: Vec<parser::Stmt>, io: &mut T) -> Interpreter<T> {
let mut root_scope : HashMap<String, Option<DataType>>= HashMap::new();
// Possible os value
// linux
// macos
// ios
// freebsd
// dragonfly
// netbsd
// openbsd
// solaris
// android
// windows
let os = std::env::consts::OS.to_string();
root_scope.insert("_প্ল্যাটফর্ম".to_string(), Some(DataType::String(os)));
Interpreter {
current: 0,
statements,
loops: Vec::new(),
return_addrs: Vec::new(),
scopes: vec![root_scope],
previous_if_was_executed: Vec::new(),
lists: Vec::new(),
free_lists: Vec::new(),
nameless_records: Vec::new(),
free_nameless_records: Vec::new(),
total_allocated_object_count: 0,
io,
built_in_functions: BuiltInFunctionList::new(),
}
}
pub fn run(&mut self) -> Result<(), PakhiErr> {
loop {
if let parser::Stmt::EOS(_, _) = self.statements[self.current] {
break;
}
self.interpret()?;
if self.total_allocated_object_count >= 1000 {
let mut gc = mark_sweep::GC::new(&mut self.scopes, &mut self.lists,
&mut self.free_lists,
&mut self.nameless_records,
&mut self.free_nameless_records);
gc.collect_garbage();
self.total_allocated_object_count = 0;
}
}
Ok(())
}
fn interpret(&mut self) -> Result<(), PakhiErr> {
match self.statements[self.current].clone() {
parser::Stmt::Print(expr, _, _) => self.interpret_print_stmt(expr)?,
parser::Stmt::PrintNoEOL(expr, _, _) => self.interpret_print_no_eol(expr)?,
parser::Stmt::Assignment(assign_stmt, _, _) => self.interpret_assign_stmt(assign_stmt)?,
parser::Stmt::If(cond_expr, _, _) => self.interpret_if_stmt(cond_expr)?,
parser::Stmt::Else(_, _) => self.interpret_else_stmt()?,
parser::Stmt::FuncDef(_, _) => self.interpret_funcdef()?,
parser::Stmt::Expression(expr, _, _) => {
self.interpret_expr(expr)?;
self.current += 1;
},
parser::Stmt::Loop(_, _) => {
// consuming loop
self.current += 1;
// saving loop start to reuse in continue statement
self.loops.push(LoopEnv { start: self.current, total_envs_at_loop_creation: self.scopes.len()});
},
parser::Stmt::Continue(_, _) => {
// destroying envs that was created inside loop
let last_loop_env_index = self.loops.len() - 1;
let total_envs_created_inside_loop = self.scopes.len() - self.loops[last_loop_env_index].total_envs_at_loop_creation;
for _ in 0..total_envs_created_inside_loop {
self.scopes.pop();
}
let loop_start = self.loops[last_loop_env_index].start;
self.current = loop_start;
},
parser::Stmt::Break(_, _) => {
self.current += 1;
// len <= 0 means no new environment was made inside loop
if self.loops.len() > 0 {
// destroying all envs that was created inside loop
let last_loop_env_index = self.loops.len() - 1;
let total_envs_created_inside_loop = self.scopes.len() - self.loops[last_loop_env_index].total_envs_at_loop_creation;
for _ in 0..total_envs_created_inside_loop {
self.scopes.pop();
}
}
// destroying loop env
self.loops.pop();
let mut stack: Vec<char> = Vec::new();
loop {
if let parser::Stmt::Loop(_, _) = self.statements[self.current] {
stack.push('{');
}
if let parser::Stmt::Continue(_, _) = self.statements[self.current] {
stack.pop();
if stack.is_empty() {
// consuming Stmt::Continue
self.current += 1;
break;
}
}
// skipping statements in block of loop
self.current += 1;
}
},
parser::Stmt::BlockStart(_, _) => {
self.current += 1;
// creating new scope
self.scopes.push(HashMap::new());
},
parser::Stmt::BlockEnd(_, _) => {
self.current += 1;
// BlockEnd means all statements in this blocks scope were interpreted
// so destroying scope created by Stmt::BlockStart
self.scopes.pop();
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(PakhiErr::RuntimeError(line, file_name,
format!("Debug Statement {:#?}", self.statements[self.current])));
},
}
Ok(())
}
fn interpret_print_no_eol(&mut self, expr: parser::Expr) -> Result<(), PakhiErr> {
match self.interpret_expr(expr)? {
DataType::Num(n) => {
let num = self.to_bn_num(n)?;
self.io.print( num.as_str())
},
DataType::Bool(b) => self.io.print( self.to_bn_bool(b).as_str()),
DataType::String(s) => self.io.print(s.as_str()),
DataType::List(arr_i) => {
let mut elems: Vec<(usize, DataType)> = Vec::new();
for (i, elem) in self.lists[arr_i].iter().enumerate() {
elems.push((i, elem.clone()));
}
self.io.print("[");
for (i, elem) in elems {
self.print_datatype(elem.clone())?;
if (i+1) < self.lists[arr_i].len() {
self.io.print(", ")
}
}
self.io.print("]");
},
DataType::NamelessRecord(record_i) => {
let nameless_record = self.nameless_records.get(record_i).unwrap().clone();
self.io.print("@{");
for (k, v) in nameless_record {
self.io.print(&*format!("\"{}\":", k));
self.print_datatype(v.clone())?;
self.io.print(",")
}
self.io.print("}");
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(PakhiErr::TypeError(line, file_name,
"_দেখাও statement doesn't support this datattype".to_string()));
},
}
self.current += 1;
Ok(())
}
fn print_datatype(&mut self, data: DataType) -> Result<(), PakhiErr> {
match data {
DataType::Num(n) => {
let num = self.to_bn_num(n)?;
self.io.print( num.as_str());
},
DataType::Bool(b) => self.io.print( self.to_bn_bool(b).as_str()),
DataType::String(s) => self.io.print(s.as_str()),
DataType::List(arr_i) => {
let mut elems: Vec<(usize, DataType)> = Vec::new();
for (i, elem) in self.lists[arr_i].iter().enumerate() {
elems.push((i.clone(), elem.clone()));
}
self.io.print("[");
for (i, elem) in elems.clone() {
self.print_datatype(elem.clone())?;
if (i+1) < self.lists[arr_i].len() {
self.io.print(", ")
}
}
self.io.print("]");
},
DataType::NamelessRecord(record_i) => {
let nameless_record = self.nameless_records.get(record_i).unwrap().clone();
self.io.print("@{");
for (k, v) in nameless_record {
self.io.print(&*format!("\"{}\":", k));
self.print_datatype(v.clone())?;
self.io.print(",")
}
self.io.print("}");
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "দেখাও doesn't support this datatype".to_string()));
},
}
Ok(())
}
fn interpret_print_stmt(&mut self, expr: parser::Expr) -> Result<(), PakhiErr> {
match self.interpret_expr(expr)? {
DataType::Num(n) => {
let num = self.to_bn_num(n)?;
self.io.println(num.as_str())
},
DataType::Bool(b) => self.io.println(self.to_bn_bool(b).as_str()),
DataType::String(s) => self.io.println( s.as_str()),
DataType::List(arr_i) => {
let mut elems: Vec<(usize, DataType)> = Vec::new();
for (i, elem) in self.lists[arr_i].iter().enumerate() {
elems.push((i, elem.clone()));
}
self.io.print("[");
for (i, elem) in elems.clone() {
self.print_datatype(elem.clone())?;
if (i+1) < self.lists[arr_i].len() {
self.io.print(", ")
}
}
self.io.println("]");
},
DataType::NamelessRecord(record_i) => {
let nameless_record = self.nameless_records.get(record_i).unwrap().clone();
self.io.print("@{");
for (k, v) in nameless_record {
self.io.print(&*format!("\"{}\":", k));
self.print_datatype(v.clone())?;
self.io.print(",")
}
self.io.println("}");
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(PakhiErr::TypeError(line, file_name,
"দেখাও statement doesn't support this datattype".to_string()));
},
}
self.current += 1;
Ok(())
}
fn interpret_assign_stmt(&mut self, assign_stmt: parser::Assignment) -> Result<(), PakhiErr> {
let var_key: String = assign_stmt.var_name.lexeme.clone().into_iter().collect();
match assign_stmt.kind {
parser::AssignmentKind::FirstAssignment => self.create_new_var(var_key, assign_stmt)?,
parser::AssignmentKind::Reassignment => self.reassign_to_old_var(var_key, assign_stmt)?,
}
self.current += 1;
Ok(())
}
fn create_new_var(&mut self, var_key: String, assign_stmt: parser::Assignment) -> Result<(), PakhiErr>
{
match assign_stmt.init_value {
Some(expr) => {
let init_value = self.interpret_expr(expr)?;
let env_i = self.scopes.len() - 1;
let current_env = &mut self.scopes[env_i];
current_env.insert(var_key, Some(init_value));
},
_ => {
let env_i = self.scopes.len() - 1;
let current_env = &mut self.scopes[env_i];
current_env.insert(var_key, Some(DataType::Nil));
},
}
Ok(())
}
fn reassign_to_old_var(&mut self, var_key: String,
assign_stmt: parser::Assignment) -> Result<(), PakhiErr>
{
let init_expr = assign_stmt.init_value.clone().unwrap();
let init_value = self.interpret_expr(init_expr)?;
// if variable wasn't found it evaluates to any negative number
let var_found_at_env_index: i32 = self.find_var_env_index(var_key.clone(), assign_stmt.init_value.clone());
if var_found_at_env_index >= 0 {
if assign_stmt.indexes.is_empty() {
// only simple variable assignment
self.scopes[var_found_at_env_index as usize].insert(var_key, Some(init_value));
} else {
// assignment to element in a list or record
self.reassign_to_list_or_record(assign_stmt, var_key, var_found_at_env_index, init_value)?;
}
} else {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, format!("Variable wasn't declared {:#}", var_key)));
}
Ok(())
}
// Reassign a value to a list or record at provided index
fn reassign_to_list_or_record(&mut self,
assign_stmt: parser::Assignment,
var_key: String,
var_found_at_env_index: i32,
init_value: DataType) -> Result<(), PakhiErr>
{
// effective_index is index of deepest nested array, to which init_val will be assigned
let effective_index = self.interpret_expr(assign_stmt.indexes.last().unwrap().clone())?;
let evaluated_indexes: Vec<Index> = self.evaluate_all_indexes(assign_stmt.indexes.clone())?;
let var = self.get_var_from_env(var_key.as_str(), var_found_at_env_index as usize);
match var {
Some(DataType::List(i)) => {
if assign_stmt.indexes.len() == 1 {
// single dimensional list
// changing list element at only one level deep
self.list_single_dim_assign(i, effective_index, init_value)?;
} else {
// multidimensional array so need to traverse nested list ore record
self.list_multi_dim_assign(i, evaluated_indexes, init_value.clone())?;
}
},
Some(DataType::NamelessRecord(record_ref)) => {
if assign_stmt.indexes.len() == 1 {
// single dimensional list
// changing list element at only one level deep
self.record_single_dim_assign(record_ref, effective_index, init_value)?;
} else {
// multidimensional array so need to traverse nested list ore record
self.record_multi_dim_assign(record_ref, evaluated_indexes, init_value.clone())?;
}
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(TypeError(line, file_name, "Datatype doesn't support index assignment".to_string()));
},
}
Ok(())
}
fn list_single_dim_assign(&mut self,
list_ref: usize,
index: DataType,
init_value: DataType) -> Result<(), PakhiErr>
{
match index {
DataType::List(j) => {
let a = self.lists[j].clone();
match a[0].clone() {
DataType::Num(n) => {
let list = self.lists.get_mut(list_ref).unwrap();
list[n as usize] = init_value
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"List must be indexed with number type".to_string()));
},
}
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Unexpected error while assigning to a index".to_string()));
},
}
Ok(())
}
fn record_single_dim_assign(&mut self,
record_ref: usize,
index: DataType,
init_value: DataType) -> Result<(), PakhiErr>
{
match index {
DataType::List(j) => {
let a = self.lists[j].clone();
match a[0].clone() {
DataType::String(key) => {
let record = self.nameless_records
.get_mut(record_ref).unwrap();
record.insert(key, init_value);
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Records must be indexed by a string type".to_string()));
},
}
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Unexpected error while assigning to a index".to_string()));
},
}
Ok(())
}
fn list_multi_dim_assign(&mut self,
list_reference: usize,
evaluated_indexes: Vec<Index>,
init_value: DataType) -> Result<(), PakhiErr>
{
let list = self.lists.get_mut(list_reference).unwrap();
match evaluated_indexes.get(0).unwrap() {
Index::List(list_ref) => {
let mut assignee: DataType = list.get(list_ref.clone()).unwrap().clone();
for i in 1..evaluated_indexes.len() {
if i == evaluated_indexes.len() - 1 {
match assignee {
DataType::List(arr_i) => {
//let a = self.arrays.get_mut(arr_i).unwrap();
let index = evaluated_indexes.get(i).unwrap();
match index {
Index::List(i) => {
self.lists[arr_i][i.clone()] = init_value.clone();
break;
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Error on assignment to index".to_string()));
}
}
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Cannot assign at index if datatype is not list".to_string()));
},
}
} else {
match assignee {
DataType::List(arr_i) => {
let a = self.lists.get_mut(arr_i).unwrap();
let index = evaluated_indexes.get(i).unwrap();
match index {
Index::List(i) => {
assignee = a.get(i.clone()).unwrap().clone();
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Error on assignment to index".to_string()));
},
}
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Cannot index if datatype not list".to_string()));
},
}
}
}
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Only list and record datatype can be indexed".to_string()));
},
}
Ok(())
}
fn record_multi_dim_assign(&mut self,
record_reference: usize,
evaluated_indexes: Vec<Index>,
init_value: DataType) -> Result<(), PakhiErr>
{
let record = self.nameless_records.get_mut(record_reference).unwrap();
match evaluated_indexes.get(0).unwrap() {
Index::NamelessRecord(key) => {
let mut assignee: DataType = record.get(key).unwrap().clone();
for i in 1..evaluated_indexes.len() {
if i == evaluated_indexes.len() - 1 {
match assignee {
DataType::NamelessRecord(record_i) => {
let index = evaluated_indexes.get(i).unwrap();
match index {
Index::NamelessRecord(k) => {
self.nameless_records[record_i].insert(k.clone(), init_value);
break;
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Error on assignment to index".to_string()));
}
}
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Cannot assign at index if datatype is not record".to_string()));
},
}
} else {
match assignee {
DataType::NamelessRecord(record_i) => {
let r = self.nameless_records.get_mut(record_i).unwrap();
let index = evaluated_indexes.get(i).unwrap();
match index {
Index::NamelessRecord(k) => {
assignee = r.get(k).unwrap().clone();
},
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name,
"Cannot assign at index if datatype is not record".to_string()));
},
}
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Cannot assign at index if datatype is not record".to_string()));
},
}
}
}
}
_ => {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Only list and record datatype can be indexed".to_string()));
},
}
Ok(())
}
fn find_var_env_index(&mut self, var_key: String, init_value: Option<parser::Expr>) -> i32 {
// if var was found at env returns its scope index
// if not found return any integer
let mut var_found_at_env_index: i32 = (self.scopes.len() - 1) as i32;
for env in self.scopes.iter().rev() {
if env.contains_key(&var_key) && init_value.is_some() {
break;
} else {
var_found_at_env_index -= 1;
}
}
return var_found_at_env_index;
}
fn get_var_from_env(&mut self, var_name: &str, env_index: usize) -> Option<DataType> {
return self.scopes[env_index].get(var_name).unwrap().clone();
}
fn evaluate_all_indexes(&mut self, index_exprs: Vec<parser::Expr>) -> Result<Vec<Index>, PakhiErr> {
let mut evaluated_index_exprs: Vec<Index> = Vec::new();
for i in 0..index_exprs.len() {
let index = self.interpret_expr(index_exprs[i].clone())?;
match index {
DataType::List(arr_i) => {
match self.lists[arr_i][0].clone() {
DataType::Num(i) => evaluated_index_exprs.push(Index::List(i as usize)),
DataType::String(key) => evaluated_index_exprs.push(Index::NamelessRecord(key)),
_ => {
let (line, file_name) = self.extract_expr_err_meta(&index_exprs[i]);
return Err(RuntimeError(line, file_name, "Index must be of number or string type".to_string()));
},
}
}
_ => {
let (line, file_name) = self.extract_expr_err_meta(&index_exprs[i]);
return Err(RuntimeError(line, file_name, "Expected '[' for indexing".to_string()));
},
}
}
return Ok(evaluated_index_exprs);
}
fn interpret_funcdef(&mut self) -> Result<(), PakhiErr> {
// consuming function definition statement
self.current += 1;
if let parser::Stmt::Expression(parser::Expr::Call(function, _, _),
line, file_name) = self.statements[self.current].clone()
{
match *function.expr {
parser::Expr::Primary(parser::Primary::Var(func_token), line, file_name) => {
let func_name: String = func_token.lexeme.iter().collect();
let func_args = function.arguments;
let mut func_args_name: Vec<String> = Vec::new();
for arg_expr in func_args {
match arg_expr {
parser::Expr::Primary(parser::Primary::Var(name_token), _, _) => {
func_args_name.push(name_token.lexeme.iter().collect());
},
_ => {
return Err(RuntimeError(line, file_name, "Error during function definition".to_string()));
},
}
}
let func = Func {
starting_statement: self.current + 1,
args: func_args_name,
};
let current_env_i = self.scopes.len() - 1;
self.scopes[current_env_i].insert(func_name.clone(), Some(DataType::Function(func)));
},
_ => {
return Err(RuntimeError(line, file_name, "Cannot interpret function definition".to_string()));
},
}
} else {
let (line, file_name) = self.extract_err_meta_stmt(self.current)?;
return Err(RuntimeError(line, file_name, "Expected function definition".to_string()));
}
// consuming function name and args statement (Expr::Call)
self.current += 1;
// skipping all statements in function body
// statements in func body is not executed during func definition
self.skip_block()?;
// consuming return statement
if self.current >= self.statements.len() {
let (line, file_name) = self.extract_err_meta_stmt(self.statements.len() - 1)?;
return Err(RuntimeError(line, file_name, "Unexpected error at function call".to_string()));
}
if let parser::Stmt::Return(_, _, _) = self.statements[self.current].clone() {
self.current += 1;
} else {
let (line, file_name) = self.extract_err_meta_stmt(self.statements.len() - 1)?;
return Err(RuntimeError(line, file_name, "Expected a return statement".to_string()));
}
Ok(())
}
fn interpret_if_stmt(&mut self, expr: parser::Expr) -> Result<(), PakhiErr> {
let (line, file_name) = self.extract_expr_err_meta(&expr);
// consuming if token
self.current += 1;
let if_condition_expr = self.interpret_expr(expr)?;
if let DataType::Bool(condition) = if_condition_expr {
if condition == false {
self.previous_if_was_executed.push(false);
// condition expression of if statement is false so skipping next block statement
self.skip_block_in_if()?;
} else {
self.previous_if_was_executed.push(true);
}
} else {
return Err(RuntimeError(line, file_name,
"If condition expression must evaluate to boolean value".to_string()));
}
Ok(())
}
fn interpret_else_stmt(&mut self) -> Result<(), PakhiErr> {
assert!(!self.previous_if_was_executed.is_empty());
// consuming else token
self.current += 1;
let last_if_condition_index = self.previous_if_was_executed.len() - 1;
if self.previous_if_was_executed[last_if_condition_index] {
self.skip_block_in_if()?;
}
| rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | true |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/common/io.rs | src/common/io.rs | use crate::common::pakhi_error::PakhiErr;
pub trait IO {
fn new() -> Self;
fn print(&mut self, m: &str);
fn println(&mut self, m: &str);
fn read_src_code_from_file(&mut self, file_path: &str) -> Result<String, std::io::Error> {
match std::fs::read_to_string(file_path) {
Ok(src_string) => Ok(src_string),
Err(e) => Err(e)
}
}
fn panic(&mut self, err: PakhiErr);
}
pub struct RealIO;
impl IO for RealIO {
fn new() -> RealIO {
RealIO
}
fn print(&mut self, m: &str) {
print!("{}", m);
}
fn println(&mut self, m: &str) {
println!("{}", m);
}
fn panic(&mut self, err: PakhiErr) {
match err {
PakhiErr::SyntaxError(line, file_name, err_message) => {
eprintln!("SyntaxError: {}", err_message);
eprintln!(" at file: {}, line: {}", file_name, line);
std::process::exit(1);
},
PakhiErr::RuntimeError(line, file_name, err_message) => {
eprintln!("RuntimeError: {}", err_message);
eprintln!(" at file: {}, line: {}", file_name, line);
std::process::exit(1);
},
PakhiErr::TypeError(line, file_name, err_message) => {
eprintln!("TypeError: {}", err_message);
eprintln!(" at file: {}, line: {}", file_name, line);
std::process::exit(1);
},
PakhiErr::UnexpectedError(err_message) => {
eprintln!("UnexpectedError: {}", err_message);
std::process::exit(1);
}
}
}
}
#[derive(Debug, Clone)]
pub struct MockIO {
print: Vec<String>,
println: Vec<String>,
panic: Vec<PakhiErr>,
op_order: Vec<String>,
expected_print: Vec<String>,
expected_println: Vec<String>,
expected_panic: Vec<PakhiErr>,
expected_op_order: Vec<String>,
}
impl MockIO {
pub fn expect_print(&mut self, m: &str) {
self.expected_print.push(String::from(m));
self.expected_op_order.push(String::from("print"));
}
pub fn expect_println(&mut self, m: &str) {
self.expected_println.push(String::from(m));
self.expected_op_order.push(String::from("println"));
}
pub fn expect_panic(&mut self, err: PakhiErr) {
self.expected_panic.push(err);
self.expected_op_order.push(String::from("panic"));
}
pub fn assert_all_true(&self) {
for (i, _)in self.print.iter().enumerate() {
assert_eq!(self.expected_print[i], self.print[i])
}
for (i, _)in self.println.iter().enumerate() {
assert_eq!(self.expected_println[i], self.println[i])
}
for (i, _)in self.panic.iter().enumerate() {
assert_eq!(self.expected_panic[i], self.panic[i])
}
for (i, _)in self.op_order.iter().enumerate() {
assert_eq!(self.expected_op_order[i], self.op_order[i])
}
}
}
impl IO for MockIO {
fn new() -> MockIO {
MockIO {
print: Vec::new(),
println: Vec::new(),
panic: Vec::new(),
op_order: Vec::new(),
expected_print: Vec::new(),
expected_println: Vec::new(),
expected_panic: Vec::new(),
expected_op_order: Vec::new(),
}
}
fn print(&mut self, m: &str) {
self.print.push(String::from(m));
self.op_order.push(String::from("print"));
}
fn println(&mut self, m: &str) {
self.println.push(String::from(m));
self.op_order.push(String::from("println"));
}
fn panic(&mut self, err: PakhiErr) {
self.panic.push(err);
self.op_order.push("panic".to_string());
}
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/common/mod.rs | src/common/mod.rs | pub mod io;
pub mod pakhi_error;
| rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/common/pakhi_error.rs | src/common/pakhi_error.rs | #[derive(Debug, Clone, PartialOrd, PartialEq)]
pub enum PakhiErr {
// Every tuple is (line_number, file_path, err_message)
SyntaxError(u32, String, String),
TypeError(u32, String, String),
RuntimeError(u32, String, String),
UnexpectedError(String), // Here only string will contain error message
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/frontend/lexer.rs | src/frontend/lexer.rs | use std::collections::HashMap;
use crate::common::pakhi_error::PakhiErr::SyntaxError;
use crate::common::pakhi_error::PakhiErr;
#[derive(Clone, Debug, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub lexeme: Vec<char>,
pub line: u32,
pub src_file_path: String,
}
#[derive(Clone, Debug, PartialEq)]
pub enum TokenKind {
Num(f64),
String(String),
Identifier,
If,
Else,
Loop,
Var, // নাম keyword is TokenKind of Var. Not variable identifier
Function,
Plus,
Minus,
Multiply,
Division,
Remainder,
At,
Semicolon,
Map,
Comment,
Comma,
ParenStart,
ParenEnd,
CurlyBraceStart,
CurlyBraceEnd,
SquareBraceStart,
SquareBraceEnd,
Equal,
LessThan,
GreaterThan,
EqualEqual,
NotEqual,
LessThanOrEqual,
GreaterThanOrEqual,
And,
Or,
Not,
Bool(bool),
Break,
Continue,
Return,
Print,
Import,
PrintNoEOL,
EOT, // represents end of token, only needed for parsing to indicate
// all previous tokens were consumed
}
pub fn tokenize(src: Vec<char>, src_file_path: String) -> Result<Vec<Token>, PakhiErr> {
let mut current_i = 0;
let mut line = 1;
let mut tokens: Vec<Token> = Vec::new();
while current_i < src.len() {
// c represents total chars consumed by token t
// l represents total line consumed by token t
let (t, c, l) = consume(&src, current_i, line, src_file_path.clone())?;
if let Some(token) = t {
tokens.push(token);
}
current_i += c;
line += l;
}
tokens.push(Token {
kind: TokenKind::EOT,
lexeme: "".chars().collect(),
line: 0,
src_file_path,
});
Ok(tokens)
}
fn consume(src: &Vec<char>, start: usize, line: u32, src_file_path: String) -> Result<(Option<Token>, usize, u32), PakhiErr> {
let consumed_char: usize;
let consumed_line: u32;
let token: Token;
match src[start] {
'-'|'০'|'১'|'২'|'৩'|'৪'|'৫'|'৬'|'৭'|'৮'|'৯' => {
if src[start+1].is_numeric() || src[start].is_numeric() {
// negative number, unary '-' operator
let (val, consumed) = consume_num(src, start, line, &src_file_path)?;
consumed_char = consumed;
consumed_line = 0;
token = Token {
kind: TokenKind::Num(val),
lexeme: src[start..(start+consumed_char)].to_vec(),
line: line + consumed_line,
src_file_path,
}
} else {
// not a negative number, binary '-' operator or map operator '->' in record
if src[start+1] == '>' {
// map operator '->' in record
consumed_char = 2;
consumed_line = 0;
token = Token {
kind: TokenKind::Map,
lexeme: src[start..(start+2)].to_vec(),
line,
src_file_path,
}
} else {
// binary '-' operator
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Minus,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
}
}
},
'+' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Plus,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'*' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Multiply,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'/' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Division,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'%' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Remainder,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'&' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::And,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'|' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Or,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'!' => {
if start < src.len() && src[start+1] == '=' {
consumed_char = 2;
consumed_line = 0;
token = Token {
kind: TokenKind::NotEqual,
lexeme: src[start..(start+2)].to_vec(),
line,
src_file_path,
}
} else {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Not,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
}
},
'@' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::At,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'#' => {
let (char_skipped, lines_skipped) = skip_comment_block(src, start, line, &src_file_path)?;
consumed_char = char_skipped;
consumed_line = lines_skipped;
token = Token {
kind: TokenKind::Comment,
lexeme: src[start..(start+char_skipped)].to_vec(),
line,
src_file_path,
}
},
';' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Semicolon,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
',' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Comma,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'"' => {
let (val, consumed) = consume_string(src, start);
consumed_char = consumed;
consumed_line = 0;
token = Token {
kind: TokenKind::String(val),
// start + 1 for excluding first " and (start+consumed_char)-1 for excluding last "
lexeme: src[(start+1)..((start+consumed_char)-1)].to_vec(),
line: line + consumed_line,
src_file_path,
}
},
'(' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::ParenStart,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
')' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::ParenEnd,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'{' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::CurlyBraceStart,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'}' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::CurlyBraceEnd,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'[' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::SquareBraceStart,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
']' => {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::SquareBraceEnd,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
},
'=' => {
if start < src.len() && src[start+1] == '=' {
consumed_char = 2;
consumed_line = 0;
token = Token {
kind: TokenKind::EqualEqual,
lexeme: src[start..(start+2)].to_vec(),
line,
src_file_path,
}
} else {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::Equal,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
}
},
'<' => {
if start < src.len() && src[start+1] == '=' {
consumed_char = 2;
consumed_line = 0;
token = Token {
kind: TokenKind::LessThanOrEqual,
lexeme: src[start..(start+2)].to_vec(),
line,
src_file_path,
}
} else {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::LessThan,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
}
},
'>' => {
if start < src.len() && src[start+1] == '=' {
consumed_char = 2;
consumed_line = 0;
token = Token {
kind: TokenKind::GreaterThanOrEqual,
lexeme: src[start..(start+2)].to_vec(),
line,
src_file_path,
}
} else {
consumed_char = 1;
consumed_line = 0;
token = Token {
kind: TokenKind::GreaterThan,
lexeme: src[start..(start+1)].to_vec(),
line,
src_file_path,
}
}
},
' ' | '\r' | '\t' => {
consumed_char = 1;
consumed_line = 0;
return Ok((None, consumed_char, consumed_line));
},
'\n' => {
consumed_char = 1;
consumed_line = 1;
return Ok((None, consumed_char, consumed_line));
},
_ => {
// if nothing matches must be an identifier
let (t, consumed) = consume_identifier(src, start, line, src_file_path);
consumed_char = consumed;
consumed_line = 0;
token = t;
},
}
Ok((Some(token), consumed_char, consumed_line))
}
fn consume_num(src: &Vec<char>, start: usize, line: u32, src_file_path: &str) -> Result<(f64, usize), PakhiErr> {
assert!(src[start].clone().is_numeric() || src[start] == '-');
let mut consumed = 0;
let mut val = 0.0;
let mut fractional_val = 0.0;
let mut i = start;
let is_negative = if src[start] == '-' {
// skipping negative sign
consumed += 1;
i += 1;
true
} else {
false
};
let mut in_fractional_part = false;
while i < src.len() && (src[i].clone().is_numeric() || src[i] == '.') {
if src[i] == '.' {
if in_fractional_part {
return Err(SyntaxError(line, src_file_path.to_string(),
"Number is not properly formatted".to_string()));
}
in_fractional_part = true;
consumed += 1;
i += 1;
continue;
}
if in_fractional_part {
fractional_val = (fractional_val * 10.0) + bn_digit_to_en_digit(src[i], line, src_file_path)?;
consumed += 1;
i += 1;
} else {
val = (val * 10.0) + bn_digit_to_en_digit(src[i], line, src_file_path)?;
consumed += 1;
i += 1;
}
}
fractional_val = fractional_val / (10_f64.powf(fractional_val.to_string().len() as f64));
if is_negative {
Ok(((val + fractional_val) * -1.0, consumed))
} else {
Ok(((val + fractional_val), consumed))
}
}
fn consume_string(src: &Vec<char>, start: usize) -> (String, usize) {
assert_eq!('"', src[start]);
let mut consumed = 0;
let mut val = String::new();
let mut i = start + 1;
while i < src.len() && (src[i].clone() != '"') {
val.push(src[i]);
consumed += 1;
i += 1;
}
// adding extra 2 for first " and last "
consumed += 2;
(val, consumed)
}
fn consume_identifier(src: &Vec<char>, start: usize, line: u32, src_file_path: String) -> (Token, usize) {
let mut consumed = 0;
let mut char_vec: Vec<char>= Vec::new();
let mut i = start;
while i < src.len() && is_valid_identifier_char(src[i]) {
char_vec.push(src[i]);
consumed += 1;
i +=1;
}
let token = match keyword(&char_vec, line, src_file_path.clone()) {
Some(t) => t,
None => Token {
kind: TokenKind::Identifier,
lexeme: src[start..(start+consumed)].to_vec(),
line,
src_file_path,
}
};
(token, consumed)
}
fn is_valid_identifier_char(c: char) -> bool {
if c == '-' || c == '_' || c == '/' {
return true;
}
!c.is_ascii_whitespace() && !c.is_ascii_punctuation() && !c.is_ascii_control()
}
fn keyword(char_vec: &Vec<char>, line: u32, src_file_path: String) -> Option<Token> {
let mut keyword_map: HashMap<Vec<char>, TokenKind> = HashMap::new();
keyword_map.insert("নাম".chars().collect(), TokenKind::Var);
keyword_map.insert("যদি".chars().collect(), TokenKind::If);
keyword_map.insert("অথবা".chars().collect(), TokenKind::Else);
keyword_map.insert("লুপ".chars().collect(), TokenKind::Loop);
keyword_map.insert("ফাং".chars().collect(), TokenKind::Function);
keyword_map.insert("ফেরত".chars().collect(), TokenKind::Return);
keyword_map.insert("থামাও".chars().collect(), TokenKind::Break);
keyword_map.insert("আবার".chars().collect(), TokenKind::Continue);
keyword_map.insert("দেখাও".chars().collect(), TokenKind::Print);
keyword_map.insert("_দেখাও".chars().collect(), TokenKind::PrintNoEOL);
keyword_map.insert("সত্য".chars().collect(), TokenKind::Bool(true));
keyword_map.insert("মিথ্যা".chars().collect(), TokenKind::Bool(false));
keyword_map.insert("মডিউল".chars().collect(), TokenKind::Import);
match keyword_map.remove(char_vec) {
Some(token_kind) => Some(Token {
kind: token_kind,
lexeme: char_vec.to_vec(),
line,
src_file_path,
}),
None => None,
}
}
fn bn_digit_to_en_digit(digit: char, line: u32, src_file_path: &str) -> Result<f64, PakhiErr> {
match digit {
'০' => return Ok(0.0),
'১' => return Ok(1.0),
'২' => return Ok(2.0),
'৩' => return Ok(3.0),
'৪' => return Ok(4.0),
'৫' => return Ok(5.0),
'৬' => return Ok(6.0),
'৭' => return Ok(7.0),
'৮' => return Ok(8.0),
'৯' => return Ok(9.0),
_ => {
return Err(SyntaxError(line, src_file_path.to_string(), format!("Cannot convert '{}' to bangla digit", digit)));
},
}
}
fn skip_comment_block(src: &Vec<char>, start: usize, line: u32, src_file_path: &str) -> Result<(usize, u32), PakhiErr> {
let mut char_skipped: usize = 1;
let mut lines_skipped: u32 = 0;
while src[start + char_skipped] != '#' {
if (start + char_skipped + 1) > src.len() - 1 {
return Err(SyntaxError(line, src_file_path.to_string(), "Comment block wasn't closed".to_string()))
}
if src[start + char_skipped] == '\\' && src[start + char_skipped + 1] == '#' {
// if # escaped with \ skipping this #
char_skipped += 2;
continue;
}
char_skipped += 1;
if src[start + char_skipped] == '\n' {
lines_skipped += 1;
}
}
// skipping last #
char_skipped += 1;
Ok((char_skipped, lines_skipped))
}
#[cfg(test)]
mod tests {
use crate::lexer::{consume_num, keyword, TokenKind, consume_string};
#[test]
fn lexer_consume_num_test_1() {
let digits_1 = vec!['২', '৪', '৫'];
let (val, consumed) = consume_num(&digits_1, 0, 1, "test.pakhi").unwrap();
assert_eq!(245.0, val);
assert_eq!(3, consumed);
}
#[test]
fn lexer_consume_num_test_2() {
let digits_2 = vec!['২', '৪', '৫', ' ', '২'];
let (val, consumed) = consume_num(&digits_2, 0, 1, "test.pakhi").unwrap();
assert_eq!(245.0, val);
assert_eq!(3, consumed);
}
#[test]
fn lexer_consume_num_test_3() {
let digits_3 = vec!['২', '৪', '৫', '.', '২', '৩', '৬'];
let (val, consumed) = consume_num(&digits_3, 0, 1, "test.pakhi").unwrap();
assert_eq!(245.236, val);
assert_eq!(7, consumed);
}
#[test]
fn lexer_consume_num_test_4() {
let digits_4 = vec!['-', '২', '৪', '৫', '.', '২', '৩', '৬'];
let (val, consumed) = consume_num(&digits_4, 0, 1, "test.pakhi").unwrap();
assert_eq!(-245.236, val);
assert_eq!(8, consumed);
}
#[test]
fn lexer_consume_num_test_5() {
let digits_5 = vec!['০'];
let (val, consumed) = consume_num(&digits_5, 0, 1, "test.pakhi").unwrap();
assert_eq!(0.0, val);
assert_eq!(1, consumed);
}
#[test]
fn lexer_consume_string_test() {
let string: Vec<char> = "\" var a = 45;\"".chars().collect();
let (val, consumed) = consume_string(&string, 0);
assert_eq!(" var a = 45;", val);
assert_eq!(14, consumed);
}
#[test]
fn lexer_keyword_test_1() {
let kword: Vec<char> = "ফাং".chars().collect();
let t = keyword(&kword, 0, "test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::Function, t.kind);
}
#[test]
fn lexer_keyword_test_2() {
let kword: Vec<char> = "নাম".chars().collect();
let t = keyword(&kword, 0, "test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::Var, t.kind);
}
#[test]
fn lexer_keyword_test_3() {
let kword: Vec<char> = "লুপ".chars().collect();
let t = keyword(&kword, 0, "test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::Loop, t.kind);
}
#[test]
fn lexer_keyword_test_4() {
let kword: Vec<char> = "abc".chars().collect();
assert!(keyword(&kword, 0, "test.pakhi".to_string()).is_none());
}
}
| rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/frontend/parser.rs | src/frontend/parser.rs | use crate::frontend::lexer;
use crate::frontend::lexer::Token;
use crate::frontend::lexer::TokenKind;
use crate::common::io;
use crate::common::io::IO;
use std::path::Path;
use std::collections::HashMap;
use std::ffi::OsStr;
use crate::backend::built_ins::BuiltInFunctionList;
use crate::common::pakhi_error::PakhiErr;
#[derive(Debug, PartialEq, Clone)]
pub enum Stmt {
// Here u32 and String will contain line number and file_name
// so that user can locate error properly in their source file
Print(Expr, u32, String),
PrintNoEOL(Expr, u32, String),
Assignment(Assignment, u32, String),
Expression(Expr, u32, String),
BlockStart(u32, String),
BlockEnd(u32, String),
FuncDef(u32, String),
Return(Expr, u32, String),
If(Expr, u32, String),
Loop(u32, String),
Continue(u32, String),
Break(u32, String),
Else(u32, String),
EOS(u32, String), // represents end of statement, only needed for interpreting to indicate
// all previous statements were consumed
}
#[derive(Debug, PartialEq, Clone)]
pub struct Assignment {
pub kind: AssignmentKind,
pub var_name: Token,
// assignment could me made to list or record element, so indexes are needed
pub indexes: Vec<Expr>,
pub init_value: Option<Expr>,
}
#[derive(Debug, PartialEq, Clone)]
pub enum AssignmentKind {
FirstAssignment,
Reassignment,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Expr {
// Here u32 and String will contain line number and file_name
// so that user can locate error properly in their source file
Indexing(Box<Expr>, Box<Expr>, u32, String),
Or(Or, u32, String),
And(And, u32, String),
Equality(Binary, u32, String),
Comparison(Binary, u32, String),
AddOrSub(Binary, u32, String),
MulOrDivOrRemainder(Binary, u32, String),
Unary(Unary, u32, String),
Call(FunctionCall, u32, String),
Primary(Primary, u32, String),
}
#[derive(Debug, PartialEq, Clone)]
pub enum Primary {
Nil,
Bool(bool),
Num(f64),
String(String),
List(Vec<Expr>),
NamelessRecord((Vec<Expr>, Vec<Expr>)),
Var(Token),
Group(Box<Expr>),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Or {
pub left: Box<Expr>,
pub right: Box<Expr>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct And {
pub left: Box<Expr>,
pub right: Box<Expr>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct Binary {
pub operator: TokenKind,
pub left: Box<Expr>,
pub right: Box<Expr>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct Unary {
pub operator: TokenKind,
pub right: Box<Expr>
}
#[derive(Debug, PartialEq, Clone)]
pub struct FunctionCall {
pub expr: Box<Expr>,
pub arguments: Vec<Expr>,
}
struct Parser {
tokens: Vec<Token>,
current: usize,
main_module_path: String,
// Stores all imported child modules names for every parent module
// key: Parent module name
// value: Every imported child modules name
parent_child_relationship: HashMap<String, Vec<String>>,
// Storing all built-in function names because when modules identifiers are renamed
// we don't want to rename built-in functions
built_in_functions: BuiltInFunctionList,
}
impl Parser {
fn new(tokens: Vec<Token>) -> Parser {
Parser {
tokens,
current: 0,
main_module_path: String::new(),
parent_child_relationship: HashMap::new(),
built_in_functions: BuiltInFunctionList::new(),
}
}
fn parse(&mut self) -> Result<Vec<Stmt>, PakhiErr> {
// Figuring out which modules are direct child of root module
let parent_module_file_name = self.extract_filename(&self.main_module_path);
let child_modules_paths = self.extract_all_import_paths(&self.tokens)?;
let child_modules_file_name = self.extract_filenames(&child_modules_paths);
let mut new_childs: Vec<String> = Vec::new();
for new_child_name in child_modules_file_name {
new_childs.push(new_child_name);
}
self.parent_child_relationship.insert(parent_module_file_name.clone(), new_childs);
self.expand_dirname_constant_for_root_module();
let mut statements: Vec<Stmt> = Vec::new();
loop {
let s = self.statements()?;
if let Stmt::EOS(_, _) = s {
statements.push(s);
break;
}
statements.push(s);
if self.current > self.tokens.len() - 1 {
return Err(PakhiErr::UnexpectedError("Error at last line, Expected a ';'".to_string()));
}
if self.tokens[self.current].kind == TokenKind::Semicolon {
// useful semicolon should be consumed by self.statements()
// if not consumed assuming not useful semicolon
// function call needs this
// skipping semicolon
self.current += 1;
continue;
}
}
return Ok(statements)
}
fn statements(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
match self.tokens[self.current].kind {
TokenKind::Print => self.print_stmt(),
TokenKind::PrintNoEOL => self.print_no_newline_stmt(),
TokenKind::Var => self.assignment_stmt(),
TokenKind::Identifier => self.re_assignment_or_func_call_stmt(),
TokenKind::CurlyBraceStart => self.block_start(),
TokenKind::CurlyBraceEnd => self.block_end(),
TokenKind::If => self.if_statement(),
TokenKind::Else => self.else_statement(),
TokenKind::Loop => self.loop_stmt(),
TokenKind::Continue => self.continue_stmt(),
TokenKind::Break => self.break_stmt(),
TokenKind::Function => self.func_def_stmt(),
TokenKind::Return => self.return_stmt(),
TokenKind::At => todo!(),
TokenKind::Comment => self.comment_block(),
TokenKind::Import => self.module_import_stmt(),
TokenKind::EOT => Ok(Stmt::EOS(line, file_name)),
_ => {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name,
format!("Unexpected token debug Token: {:?}", self.tokens[self.current])));
},
}
}
fn module_import_stmt(&mut self) -> Result<Stmt, PakhiErr> {
// skipping module keyword token
self.current += 1;
if self.tokens[self.current].kind == TokenKind::Identifier {
let module_import_name = self.tokens[self.current].lexeme.clone();
match self.named_module_import(module_import_name) {
Ok(_) => {},
Err(e) => return Err(e),
}
} else {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name, "Expected a name for imported module".to_string()));
}
// skipping ; token
self.current += 1;
// Module doesn't generate statement, it only lexes and puts returned tokens to parser's token
// queue. Then generates statement from those tokens. That's why self.statements() is called.
let stmt = self.statements()?;
return Ok(stmt);
}
// Module could be imported with giving a namespace which was called unnamed_module_import
// but unnamed module import feature was removed
// that's why this functions name is named_module_import instead of import_module
fn named_module_import(&mut self, module_import_name: Vec<char>) -> Result<(), PakhiErr> {
// skipping module name identifier token and equal token
self.current += 2;
let module_path = match self.tokens[self.current].kind {
TokenKind::String(ref path) => {
let mut concated_module_path = Path::new(path).to_path_buf();
self.current += 1;
while self.tokens[self.current].kind != TokenKind::Semicolon {
match self.tokens[self.current].kind {
TokenKind::String(ref p) => {
let rest_of_the_path = Path::new(p);
concated_module_path = concated_module_path.join(rest_of_the_path);
self.current += 1;
},
TokenKind::Plus => {
self.current += 1;
},
_ => {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name,
"Module path must be static string literal".to_string()));
}
}
}
concated_module_path.to_str().unwrap().to_string()
},
_ => {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name,
"Module path must be static string literal".to_string()));
},
};
// checking if importing file with .pakhi
if !module_path.ends_with(".pakhi") {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name,
"Not a valid module file name".to_string()));
}
let imported_tokens = self.get_tokens_from_module(&module_path, module_import_name)?;
let parent_module_file_name = self.extract_filename(&module_path);
let child_modules_paths = self.extract_all_import_paths(&imported_tokens)?;
let child_modules_file_name = self.extract_filenames(&child_modules_paths);
// Checking for cyclic module dependency
// and figuring out who is parent of which modules
match self.parent_child_relationship.get_mut(&*parent_module_file_name) {
Some(childs) => {
for new_child_name in child_modules_file_name {
if childs.contains(&new_child_name) {
return Err(PakhiErr::RuntimeError(0, "".to_string(),
format!("Cyclic module dependency. Can't import {} from {}",
parent_module_file_name, new_child_name)));
}
childs.push(new_child_name);
}
},
None => {
let mut new_childs: Vec<String> = Vec::new();
for new_child_name in child_modules_file_name {
new_childs.push(new_child_name);
}
self.parent_child_relationship.insert(parent_module_file_name.clone(), new_childs);
}
}
// tokens is inserted after whole module import statement
// after importing module self.current will point to semicolon of module import statement
let mut insert_token_at = self.current + 1; // + 1 required to insert after semicolon
for token in imported_tokens {
if token.kind == TokenKind::EOT { continue }
self.tokens.insert(insert_token_at, token);
insert_token_at += 1;
}
Ok(())
}
fn get_tokens_from_module(&self, path: &String, prepend: Vec<char>) -> Result<Vec<Token>, PakhiErr> {
let module_path = Path::new(path.as_str());
let current_module_root = Path::new(self.main_module_path.as_str()).parent().unwrap();
let modules_relative_path_to_current_modules = current_module_root.join(module_path);
let final_module_path = modules_relative_path_to_current_modules.as_path().to_str().unwrap();
let mut io = io::RealIO::new();
let src_string = io.read_src_code_from_file(final_module_path);
match src_string {
Ok(src) => {
let src_chars: Vec<char> = src.chars().collect();
let mut module_tokens = lexer::tokenize(src_chars,
final_module_path.to_string())?;
// Must call this function before prepend
self.expand_dirname_constant(&mut module_tokens, final_module_path);
self.prepend_with_import_name(&mut module_tokens, prepend);
return Ok(module_tokens);
},
Err(e) => {
return Err(PakhiErr::RuntimeError(0, "".to_string(),
format!("Error opening file: {}. System error message: {}", final_module_path, e)));
},
}
}
// Must call this function before prepend or without prepend
// Dynamically replace _ডাইরেক্টরি identifier token with String token that
// contains actual directory path String
fn expand_dirname_constant(&self, tokens: &mut Vec<Token>, module_file_location: &str) {
let mut tokens_to_mutate_index: Vec<usize> = Vec::new();
for (i, token) in tokens.iter().enumerate() {
if token.kind == TokenKind::Identifier && self.is_dirname_constant(&token.lexeme) {
tokens_to_mutate_index.push(i);
}
}
let modules_src_file_location= Path::new(module_file_location);
for i in tokens_to_mutate_index {
let mut module_src_file_dir;
if modules_src_file_location.is_relative() {
let absolute_path = std::env::current_dir().unwrap().join(&modules_src_file_location);
module_src_file_dir = absolute_path.parent().unwrap().to_str().unwrap().to_string();
} else {
module_src_file_dir = modules_src_file_location.parent().unwrap().to_str().unwrap().to_string();
}
if !module_src_file_dir.ends_with("/") || !module_src_file_dir.ends_with("/") {
module_src_file_dir.push_str("/");
}
tokens[i].kind = TokenKind::String(module_src_file_dir.clone());
tokens[i].lexeme = module_src_file_dir.chars().collect();
}
}
fn expand_dirname_constant_for_root_module(&mut self) {
let mut tokens_to_mutate_index: Vec<usize> = Vec::new();
for (i, token) in self.tokens.iter().enumerate() {
if token.kind == TokenKind::Identifier && self.is_dirname_constant(&token.lexeme) {
tokens_to_mutate_index.push(i);
}
}
let modules_src_file_location= Path::new(&self.main_module_path);
for i in tokens_to_mutate_index {
let mut module_src_file_dir;
if modules_src_file_location.is_relative() {
let absolute_path = std::env::current_dir().unwrap().join(&modules_src_file_location);
module_src_file_dir = absolute_path.parent().unwrap().to_str().unwrap().to_string();
} else {
module_src_file_dir = modules_src_file_location.parent().unwrap().to_str().unwrap().to_string();
}
if !module_src_file_dir.ends_with("/") || !module_src_file_dir.ends_with("/") {
module_src_file_dir.push_str("/");
}
self.tokens[i].kind = TokenKind::String(module_src_file_dir.clone());
self.tokens[i].lexeme = module_src_file_dir.chars().collect();
}
}
fn is_dirname_constant(&self, lexeme: &Vec<char>) -> bool {
let var_name: String = lexeme.iter().collect();
if var_name == "_ডাইরেক্টরি".to_string() {
true
} else { false }
}
fn prepend_with_import_name(&self, tokens: &mut Vec<Token>, prepend: Vec<char>) {
for token in tokens.iter_mut() {
if token.kind == TokenKind::Identifier {
if self.built_in_functions.is_built_in(&token.lexeme) {
continue;
}
let mut i = 0;
for c in prepend.iter() {
token.lexeme.insert(i, c.clone());
i += 1;
}
token.lexeme.insert(i, '/');
}
}
}
fn extract_filename(&self, path: &String) -> String {
let path = Path::new(path);
let file_name = OsStr::to_string_lossy(path.file_name().unwrap());
file_name.to_string()
}
fn extract_filenames(&self, paths: &Vec<String>) -> Vec<String> {
let mut file_names: Vec<String> = Vec::new();
for path in paths {
file_names.push(self.extract_filename(path));
}
file_names
}
fn extract_all_import_paths(&self, tokens: &Vec<Token>) -> Result<Vec<String>, PakhiErr> {
let import_stmt_start_token_indexes = self.find_all_imports_start(tokens);
let mut modules_paths: Vec<String> = Vec::new();
for i in import_stmt_start_token_indexes {
let module_paths = self.get_module_path_from_import_stmt(tokens, i);
match module_paths {
Ok(path) => modules_paths.push(path),
Err(e) => return Err(e),
}
}
let file_names = self.extract_filenames(&modules_paths);
return Ok(file_names);
}
fn find_all_imports_start(&self, tokens: &Vec<Token>) -> Vec<usize> {
let mut all_imports_starting_token_index: Vec<usize> = Vec::new();
for (i, t) in tokens.iter().enumerate() {
if t.kind == TokenKind::Import {
all_imports_starting_token_index.push(i)
}
}
all_imports_starting_token_index
}
fn get_module_path_from_import_stmt(&self, tokens: &Vec<Token>,
import_stmt_start_index: usize) -> Result<String, PakhiErr>
{
let import_path_offset = 3;
match tokens[import_stmt_start_index + import_path_offset].kind.clone() {
TokenKind::String(import_path) => {
return Ok(self.extract_filename(&import_path));
},
_ => {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name,
"import path is not valid".to_string()));
},
}
}
fn comment_block(&mut self) -> Result<Stmt, PakhiErr> {
// skipping comment block
self.current += 1;
// returning next statement
return self.statements();
}
fn print_no_newline_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming token
self.current += 1;
let expr = self.expression()?;
//consuming last ';' of print statement
self.current += 1;
return Ok(Stmt::PrintNoEOL(expr, line, file_name));
}
fn re_assignment_or_func_call_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// probably array indexing after function call won't work
if self.tokens[self.current + 1].kind == TokenKind::ParenStart {
// assuming its a function call statement
let expr = self.expression()?;
return Ok(Stmt::Expression(expr, line, file_name));
}
// if next token is not paren assuming it's a reassignment statement, or expression
// statement which ony has identifier
let re_assignment_stmt = self.re_assignment_stmt()?;
return Ok(re_assignment_stmt);
}
fn assignment_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming var token
self.current += 1;
if self.tokens[self.current].kind != TokenKind::Identifier {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name, "Expected an Identifier".to_string()));
}
let var_name = self.tokens[self.current].clone();
// consuming identifier token
self.current += 1;
let stmt;
if self.tokens[self.current].kind == TokenKind::Semicolon {
// no value provided to initialize variable
stmt = Stmt::Assignment(Assignment {
kind: AssignmentKind::FirstAssignment,
var_name,
indexes: Vec::new(),
init_value: None,
}, line, file_name);
} else {
// consuming '=' token
self.current += 1;
let expr = self.expression()?;
// init value provided for assigning to variable
stmt = Stmt::Assignment(Assignment {
kind: AssignmentKind::FirstAssignment,
var_name,
indexes: Vec::new(),
init_value: Some(expr),
}, line, file_name);
}
if self.tokens[self.current].kind != TokenKind::Semicolon {
// newline was consumed, os actual error was at previous line
if self.current >= self.tokens.len() {
return Err(PakhiErr::UnexpectedError("Unexpected error".to_string()));
} else {
let line = self.tokens[self.current - 1].line;
let file_name = self.tokens[self.current - 1].src_file_path.clone();
return Err(PakhiErr::SyntaxError(line, file_name, "Expected ';'".to_string()))
}
}
// consuming ; token
self.current += 1;
return Ok(stmt);
}
fn re_assignment_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
if self.tokens[self.current+1].kind != TokenKind::Equal &&
self.tokens[self.current+1].kind != TokenKind::SquareBraceStart {
// not a reassignment, only expression statement;
return self.expression_stmt();
}
let var_name = self.tokens[self.current].clone();
// consuming Identifier token
self.current += 1;
// indexes will be populated only if assigning to array element, otherwise it will be empty
let mut indexes: Vec<Expr> = Vec::new();
while self.tokens[self.current].kind != TokenKind::Equal {
let index = self.expression()?;
if let Expr::Primary(Primary::List(_), _, _) = index {
indexes.push(index);
} else {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name, "Array index expected".to_string()));
}
}
if self.tokens[self.current].kind != TokenKind::Equal {
let (line, file_name) = self.extract_err_meta()?;
return Err(PakhiErr::SyntaxError(line, file_name, "Expected '='".to_string()));
}
// consuming '=' token
self.current += 1;
let expr = self.expression()?;
// consuming ; token
self.current += 1;
let stmt = Stmt::Assignment(Assignment {
kind: AssignmentKind::Reassignment,
var_name,
indexes,
init_value: Some(expr),
}, line, file_name);
return Ok(stmt);
}
// expression need to be wrapped in expression stmt because interpreter only accepts vec of stmts
fn expression_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
let expr = self.expression()?;
return Ok(Stmt::Expression(expr, line, file_name));
}
fn print_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming print token
self.current += 1;
let expr = self.expression()?;
//consuming last ';' of print statement
self.current += 1;
return Ok(Stmt::Print(expr, line, file_name));
}
fn func_def_stmt(&mut self) -> Result<Stmt, PakhiErr> {
// consuming function token
self.current += 1;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
Ok(Stmt::FuncDef(line, file_name))
}
fn return_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming return token
self.current += 1;
let mut return_value = Expr::Primary(Primary::Nil, line, file_name.clone());
if self.tokens[self.current].kind != TokenKind::Semicolon {
// if not semicolon function return a value
return_value = self.expression()?;
}
//consuming ; token
self.current += 1;
return Ok(Stmt::Return(return_value, line, file_name));
}
fn block_start(&mut self) -> Result<Stmt, PakhiErr> {
// consuming { token
self.current += 1;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
Ok(Stmt::BlockStart(line, file_name))
}
fn block_end(&mut self) -> Result<Stmt, PakhiErr> {
// consuming } token
self.current += 1;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
Ok(Stmt::BlockEnd(line, file_name))
}
fn loop_stmt(&mut self) -> Result<Stmt, PakhiErr> {
// consuming loop token
self.current += 1;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
Ok(Stmt::Loop(line, file_name))
}
fn continue_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming loop token
self.current += 2;
Ok(Stmt::Continue(line, file_name))
}
fn break_stmt(&mut self) -> Result<Stmt, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current)?;
// consuming break and ; token
self.current += 2;
Ok(Stmt::Break(line, file_name))
}
fn if_statement(&mut self) -> Result<Stmt, PakhiErr> {
//consuming if token
self.current += 1;
let condition = self.expression()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
return Ok(Stmt::If(condition, line, file_name));
}
fn else_statement(&mut self) -> Result<Stmt, PakhiErr> {
//consuming else token
self.current += 1;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
Ok(Stmt::Else(line, file_name))
}
fn expression(&mut self) -> Result<Expr, PakhiErr> {
self.or()
}
fn or(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.and()?;
while self.tokens[self.current].kind == TokenKind::Or {
self.current += 1;
let right = self.and()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::Or(Or {
left: Box::new(expr),
right: Box::new(right),
}, line, file_name)
}
return Ok(expr);
}
fn and(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.equality()?;
while self.tokens[self.current].kind == TokenKind::And {
self.current += 1;
let right = self.equality()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::And(And {
left: Box::new(expr),
right: Box::new(right),
}, line, file_name)
}
return Ok(expr);
}
fn equality(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.comparison()?;
while self.tokens[self.current].kind == TokenKind::NotEqual ||
self.tokens[self.current].kind == TokenKind:: EqualEqual
{
let operator = self.tokens[self.current].kind.clone();
self.current += 1;
let right = self.comparison()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::Equality(Binary {
left: Box::new(expr),
right: Box::new(right),
operator,
}, line, file_name)
}
return Ok(expr);
}
fn comparison(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.addition()?;
while self.tokens[self.current].kind == TokenKind::GreaterThan ||
self.tokens[self.current].kind == TokenKind::GreaterThanOrEqual ||
self.tokens[self.current].kind == TokenKind::LessThan ||
self.tokens[self.current].kind == TokenKind::LessThanOrEqual
{
let operator = self.tokens[self.current].kind.clone();
self.current += 1;
let right = self.addition()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::Comparison(Binary {
left: Box::new(expr),
right: Box::new(right),
operator,
}, line, file_name)
}
return Ok(expr);
}
fn addition(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.multiplication()?;
while self.tokens[self.current].kind == TokenKind::Plus ||
self.tokens[self.current].kind == TokenKind::Minus
{
let operator = self.tokens[self.current].kind.clone();
self.current += 1;
let right = self.multiplication()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::AddOrSub(Binary {
left: Box::new(expr),
right: Box::new(right),
operator,
}, line, file_name)
}
return Ok(expr);
}
fn multiplication(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.unary()?;
while self.tokens[self.current].kind == TokenKind::Multiply ||
self.tokens[self.current].kind == TokenKind::Division ||
self.tokens[self.current].kind == TokenKind::Remainder
{
let operator = self.tokens[self.current].kind.clone();
self.current += 1;
let right = self.unary()?;
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
expr = Expr::MulOrDivOrRemainder(Binary {
left: Box::new(expr),
right: Box::new(right),
operator,
}, line, file_name)
}
return Ok(expr);
}
fn unary(&mut self) -> Result<Expr, PakhiErr> {
if self.tokens[self.current].kind == TokenKind::Not ||
self.tokens[self.current].kind == TokenKind::Minus
{
let operator = self.tokens[self.current].kind.clone();
let (line, file_name) = self.get_token_line_file_name(self.current)?;
self.current += 1;
let right = self.unary()?;
let expr = Expr::Unary(Unary {
operator,
right: Box::new(right),
}, line, file_name);
return Ok(expr);
}
return self.call();
}
fn finish_call(&mut self, calle: Expr) -> Result<Expr, PakhiErr> {
let (line, file_name) = self.get_token_line_file_name(self.current - 1)?;
let mut arguments: Vec<Expr> = Vec::new();
if self.tokens[self.current].kind != TokenKind::ParenEnd {
loop {
let expr = self.expression()?;
arguments.push(expr);
if self.tokens[self.current].kind == TokenKind::Comma {
// consuming , token
self.current += 1;
} else {
// no comma means all arguments consumed, so breaking out of
// arguments consuming loop
break;
}
}
}
//consuming parenEnd
self.current += 1;
let expr = Expr::Call(FunctionCall {
expr: Box::new(calle),
arguments,
}, line, file_name);
return Ok(expr);
}
fn call(&mut self) -> Result<Expr, PakhiErr> {
let mut expr = self.primary()?;
| rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | true |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/src/frontend/mod.rs | src/frontend/mod.rs | pub mod parser;
pub mod lexer; | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/tests/lexer.rs | tests/lexer.rs | use pakhi::frontend::lexer::{tokenize, TokenKind};
#[test]
fn lexer_var_declare() {
let tokens = tokenize("নাম ল = ০;".chars().collect::<Vec<char>>(),
"test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::Var, tokens[0].kind);
assert_eq!(TokenKind::Identifier, tokens[1].kind);
assert_eq!(TokenKind::Equal, tokens[2].kind);
assert_eq!(TokenKind::Num(0.0), tokens[3].kind);
assert_eq!(TokenKind::Semicolon, tokens[4].kind);
}
#[test]
fn lexer_nameless_record_literal() {
let tokens = tokenize(
r#"@ {"key" -> ১,}"#.chars().collect::<Vec<char>>(),
"test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::At, tokens[0].kind);
assert_eq!(TokenKind::CurlyBraceStart, tokens[1].kind);
assert_eq!(TokenKind::String(String::from("key")), tokens[2].kind);
assert_eq!(TokenKind::Map, tokens[3].kind);
assert_eq!(TokenKind::Num(1.0), tokens[4].kind);
assert_eq!(TokenKind::Comma, tokens[5].kind);
assert_eq!(TokenKind::CurlyBraceEnd, tokens[6].kind);
}
#[test]
fn lexer_comment_block() {
let tokens = tokenize("# this is a comment # \
নাম ল = ০;\
#this is a second comment#".chars().collect::<Vec<char>>(),
"test.pakhi".to_string()).unwrap();
assert_eq!(TokenKind::Comment, tokens[0].kind);
assert_eq!(TokenKind::Var, tokens[1].kind);
assert_eq!(TokenKind::Identifier, tokens[2].kind);
assert_eq!(TokenKind::Equal, tokens[3].kind);
assert_eq!(TokenKind::Num(0.0), tokens[4].kind);
assert_eq!(TokenKind::Semicolon, tokens[5].kind);
assert_eq!(TokenKind::Comment, tokens[6].kind);
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/tests/parser.rs | tests/parser.rs | use pakhi::frontend::{lexer, parser};
use pakhi::frontend::parser::{Stmt, Primary, Expr, Binary, Unary, Assignment, AssignmentKind, And, Or, parse};
use pakhi::frontend::lexer::{TokenKind, Token};
use pakhi::frontend::parser::AssignmentKind::FirstAssignment;
use pakhi::frontend::lexer::TokenKind::{Identifier, Plus};
use pakhi::frontend::parser::Primary::{NamelessRecord, Num};
use pakhi::frontend::parser::Expr::AddOrSub;
#[test]
fn parse_test_primary_num() {
let tokens = lexer::tokenize("দেখাও ৫৩.৬;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Primary(Primary::Num(53.6), 1, "test.pakhi".to_string())
, 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_binary_addition() {
let tokens = lexer::tokenize("দেখাও -৫৩.৬ + ৬;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::AddOrSub(Binary {
operator: TokenKind::Plus,
left: Box::new(Expr::Primary(Primary::Num(-53.6), 1, "test.pakhi".to_string())),
right: Box::new(Expr::Primary(Primary::Num(6.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_primary_string() {
let tokens = lexer::tokenize("দেখাও \"this is a test\";".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Primary(Primary::String(String::from("this is a test")),
1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_expr() {
let tokens = lexer::tokenize("দেখাও ১ + ৩ * ২;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::AddOrSub(Binary {
operator: TokenKind::Plus,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::MulOrDivOrRemainder(Binary {
operator: TokenKind::Multiply,
left: Box::from(Expr::Primary(Primary::Num(3.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(2.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()))
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_equality() {
let tokens = lexer::tokenize("দেখাও ১ == ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Equality(Binary {
operator: TokenKind::EqualEqual,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_not_equal() {
let tokens = lexer::tokenize("দেখাও ১ != ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Equality(Binary {
operator: TokenKind::NotEqual,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_comparison_less() {
let tokens = lexer::tokenize("দেখাও ১ < ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Comparison(Binary {
operator: TokenKind::LessThan,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
}
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_comaprison_greater() {
let tokens = lexer::tokenize("দেখাও ১ > ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Comparison(Binary {
operator: TokenKind::GreaterThan,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
}
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_comparison_less_or_equal() {
let tokens = lexer::tokenize("দেখাও ১ <= ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Comparison(Binary {
operator: TokenKind::LessThanOrEqual,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_comaprison_greater_or_equla() {
let tokens = lexer::tokenize("দেখাও ১ >= ১;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Comparison(Binary {
operator: TokenKind::GreaterThanOrEqual,
left: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Num(1.0), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_logical_and() {
let tokens = lexer::tokenize("দেখাও সত্য & মিথ্যা;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::And(And {
left: Box::from(Expr::Primary(Primary::Bool(true), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Bool(false), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_logical_or() {
let tokens = lexer::tokenize("দেখাও সত্য | মিথ্যা;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Or(Or {
left: Box::from(Expr::Primary(Primary::Bool(true), 1, "test.pakhi".to_string())),
right: Box::from(Expr::Primary(Primary::Bool(false), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_print_logical_not() {
let tokens = lexer::tokenize("দেখাও !সত্য;".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Print(Expr::Unary(Unary {
operator: TokenKind::Not,
right: Box::from(Expr::Primary(Primary::Bool(true), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string()), 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_assignment_string() {
let tokens = lexer::tokenize("নাম ল = \"red\";".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Assignment(Assignment {
kind: AssignmentKind::FirstAssignment,
var_name: Token {
kind: TokenKind::Identifier,
lexeme: vec!['ল'],
line: 1,
src_file_path: "test.pakhi".to_string(),
},
indexes: Vec::new(),
init_value: Some(Expr::Primary(Primary::String("red".to_string()), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_re_assignment_string() {
let tokens = lexer::tokenize("ল = \"red\";".chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Assignment(Assignment {
kind: AssignmentKind::Reassignment,
var_name: Token {
kind: TokenKind::Identifier,
lexeme: vec!['ল'],
line: 1,
src_file_path: "test.pakhi".to_string(),
},
indexes: Vec::new(),
init_value: Some(Expr::Primary(Primary::String("red".to_string()), 1, "test.pakhi".to_string())),
}, 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
}
#[test]
fn parse_test_namesless_record_literal() {
let tokens = lexer::tokenize(r#"নাম ক = @{
"key" -> ১,
"key_2" -> "string",
"key" -> ১ + ১,
};"#.chars().collect(),
"test.pakhi".to_string()).unwrap();
let parse_result = parse(String::from("test.pakhi"), tokens);
match parse_result {
Ok(ast) => {
let expected_ast = Stmt::Assignment(Assignment {
kind: FirstAssignment,
var_name: Token {
kind: Identifier,
lexeme: vec!['ক'],
line: 1,
src_file_path: "test.pakhi".to_string(),
},
indexes: vec![],
init_value: Some(
parser::Expr::Primary(NamelessRecord(
(
vec![
parser::Expr::Primary(Primary::String("key".to_string()), 2, "test.pakhi".to_string()),
parser::Expr::Primary(Primary::String("key_2".to_string()), 3, "test.pakhi".to_string()),
parser::Expr::Primary(Primary::String("key".to_string()), 4, "test.pakhi".to_string()),
],
vec![
parser::Expr::Primary(Num(1.0), 2, "test.pakhi".to_string()),
parser::Expr::Primary(Primary::String("string".to_string()), 3, "test.pakhi".to_string()),
AddOrSub(Binary {
operator: Plus,
left: Box::from(parser::Expr::Primary(Primary::Num(1.0), 4, "test.pakhi".to_string())),
right: Box::from(parser::Expr::Primary(Primary::Num(1.0), 4, "test.pakhi".to_string())),
}, 4, "test.pakhi".to_string()),
],
),
), 1, "test.pakhi".to_string()),
),
}, 1, "test.pakhi".to_string());
assert_eq!(expected_ast, ast[0]);
},
Err(e) => panic!("err: {:?}", e),
}
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/tests/interpreter.rs | tests/interpreter.rs | use pakhi::frontend::{lexer, parser};
use pakhi::frontend::parser::Stmt;
use pakhi::common::io::{MockIO, IO};
use pakhi::backend::interpreter::Interpreter;
use pakhi::common::pakhi_error::PakhiErr;
fn src_to_ast(src_lines: Vec<&str>) -> Vec<Stmt> {
let src: String = src_lines.join("\n");
let src_chars: Vec<char> = src.chars().collect();
let tokens = lexer::tokenize(src_chars, "test.pakhi".to_string()).unwrap();
match parser::parse("test.pakhi".to_string(), tokens) {
Ok(ast) => return ast,
Err(e) => panic!("{:?}", e),
}
}
fn run_assert_all_true(ast: Vec<Stmt>, mut mock_io: MockIO) -> Result<(), PakhiErr> {
let mut interpreter = Interpreter::new(ast, &mut mock_io);
interpreter.run()?;
mock_io.assert_all_true();
Ok(())
}
#[test]
fn println_test() {
let ast = src_to_ast(vec![
"দেখাও ০;",
"দেখাও ০;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("০");
mock_io.expect_println("০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn print_test() {
let ast = src_to_ast(vec![
"_দেখাও ০;",
"_দেখাও ০;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_print("০");
mock_io.expect_print("০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn var_decl_num() {
let ast = src_to_ast(vec![
"নাম ক = ১;",
"দেখাও ক;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("১");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn var_decl_string() {
let ast = src_to_ast(vec![
r#"নাম ক = "testing";"#,
"দেখাও ক;"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("testing");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn list_single_dim_indexing() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"দেখাও ক[১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("২");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn list_multi_dim_indexing() {
let ast = src_to_ast(vec![
"নাম ক = [১, [১, ২, ৩], ৩];",
"দেখাও ক[১][১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("২");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn list_multi_dim_mixed_indexing() {
let ast = src_to_ast(vec![
r#"নাম ক = [১, ২, ৩, @{"key" -> [১,২], "key_2" -> ৪,}];"#,
r#"দেখাও ক[৩]["key"][০];"#,
r#"দেখাও ক[৩]["key_2"];"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("১");
mock_io.expect_println("৪");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_list_mutate_push() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"_লিস্ট-পুশ(ক, ৪);",
"দেখাও ক[৩];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৪");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_list_push_middle() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"_লিস্ট-পুশ(ক, ১, ৪);",
"দেখাও ক[১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৪");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_list_pop_middle() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"_লিস্ট-পপ(ক, ১);",
"দেখাও ক[১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৩");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_list_len() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"দেখাও _লিস্ট-লেন(ক);",
"নাম ক = [];",
"দেখাও _লিস্ট-লেন(ক);",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৩");
mock_io.expect_println("০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn list_mutate() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"ক[১] = ৫;",
"দেখাও ক[১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৫");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn list_consistent() {
let ast = src_to_ast(vec![
"নাম ক = [১, ২, ৩];",
"নাম খ = ক;",
"ক[১] = ২০;",
"দেখাও খ[১];",
"খ[১] = ৩০;",
"দেখাও ক[১];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("২০");
mock_io.expect_println("৩০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn nameless_record_literal() {
let ast = src_to_ast(vec![
"নাম ক = @{",
"\"key\" -> ১,",
"\"key\" -> ১ + ১,",
"};",
"দেখাও ক;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_print("@{");
mock_io.expect_print("\"key\":");
mock_io.expect_print("২");
mock_io.expect_print(",");
mock_io.expect_println("}");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn nameless_record_single_dim_indexing() {
let ast = src_to_ast(vec![
"নাম ক = @{",
"\"key\" -> ১,",
"\"key\" -> ১ + ১,",
"};",
r#"ক["key"] = "string";"#,
r#"দেখাও ক["key"];"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("string");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn nameless_record_multi_dim_indexing() {
let ast = src_to_ast(vec![
"নাম ক = @{",
"\"key\" -> @{\"key_2\" -> \"string\",},",
"};",
r#"দেখাও ক["key"]["key_2"];"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("string");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn nameless_record_multi_dim_mixed_indexing() {
let ast = src_to_ast(vec![
r#"নাম ক = @{"key" -> [১, ২, ৩, @{"key" -> ১,}],};"#,
r#"দেখাও ক["key"][২];"#,
r#"দেখাও ক["key"][৩]["key"];"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৩");
mock_io.expect_println("১");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn expression_unary() {
let ast = src_to_ast(vec![
"নাম ক = ১;",
"নাম খ = -১;",
"দেখাও -ক;",
"দেখাও -খ;",
"দেখাও !সত্য;",
"দেখাও !মিথ্যা;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("-১");
mock_io.expect_println("১");
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("সত্য");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn expression_and() {
let ast = src_to_ast(vec![
"দেখাও মিথ্যা & মিথ্যা;",
"দেখাও মিথ্যা & সত্য;",
"দেখাও সত্য & মিথ্যা ;",
"দেখাও সত্য & সত্য;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("সত্য");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn expression_or() {
let ast = src_to_ast(vec![
"দেখাও মিথ্যা | মিথ্যা;",
"দেখাও মিথ্যা | সত্য;",
"দেখাও সত্য | মিথ্যা ;",
"দেখাও সত্য | সত্য;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn expression_equlaity() {
let ast = src_to_ast(vec![
"দেখাও মিথ্যা == মিথ্যা;",
"দেখাও মিথ্যা != সত্য;",
"দেখাও সত্য == মিথ্যা ;",
"দেখাও সত্য != সত্য;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("মিথ্যা");
mock_io.expect_println("মিথ্যা");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn if_test_condition_true() {
let ast = src_to_ast(vec![
"যদি সত্য {",
" দেখাও ০;",
"}",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn if_test_condition_false() {
let ast = src_to_ast(vec![
"যদি মিথ্যা {",
" দেখাও ১;",
"} অথবা {",
" দেখাও ০;",
"}",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("০");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn loop_test() {
let ast = src_to_ast(vec![
"নাম ক = ০;",
"লুপ {",
" দেখাও ১;",
" ক = ক + ১;",
" যদি ক >= ৩ {",
" থামাও;",
" }",
"} আবার;"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("১");
mock_io.expect_println("১");
mock_io.expect_println("১");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn loop_no_new_env() {
let ast = src_to_ast(vec![
"লুপ {",
" দেখাও ১;",
" থামাও;",
"} আবার;"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("১");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn function_decl_call() {
let ast = src_to_ast(vec![
"ফাং দ্বিগুন(সংখ্যা) {",
" ফেরত সংখ্যা * ২;",
"} ফেরত;",
"দেখাও দ্বিগুন(২);"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("৪");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn recursive_function_call() {
let ast = src_to_ast(vec![
"ফাং রি(ক) {",
" যদি ক > ৪ {",
" ফেরত ক;",
" }",
" দেখাও ক;",
" রি(ক + ১);",
"} ফেরত;",
"রি(০);",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("০");
mock_io.expect_println("১");
mock_io.expect_println("২");
mock_io.expect_println("৩");
mock_io.expect_println("৪");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
#[should_panic]
fn built_in_fn_error() {
let ast = src_to_ast(vec![
r#"_এরর("এরর হয়েছে");"#,
r#"দেখাও "দেখাবেনা";"#,
]);
let mock_io: MockIO = MockIO::new();
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_string_split() {
let ast = src_to_ast(vec![
r#"নাম স্প্লিট = _স্ট্রিং-স্প্লিট("স্ট্রিং স্প্লিট", " ");"#,
r#"দেখাও স্প্লিট[০];"#,
r#"দেখাও স্প্লিট[১];"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("স্ট্রিং");
mock_io.expect_println("স্প্লিট");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_string_join() {
let ast = src_to_ast(vec![
r#"নাম স্প্লিট = ["স্ট্রিং", "স্প্লিট"];"#,
r#"দেখাও _স্ট্রিং-জয়েন(স্প্লিট, "-");"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("স্ট্রিং-স্প্লিট");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_type() {
let ast = src_to_ast(vec![
r#"দেখাও _টাইপ(১);"#,
r#"দেখাও _টাইপ(মিথ্যা);"#,
r#"দেখাও _টাইপ("১");"#,
r#"দেখাও _টাইপ([১]);"#,
r#"দেখাও _টাইপ(@{"১" -> ১,});"#,
r#"নাম ক;"#,
r#"দেখাও _টাইপ(ক);"#,
r#"ফাং খ() {"#,
r#"} ফেরত;"#,
r#"দেখাও _টাইপ(খ);"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("_সংখ্যা");
mock_io.expect_println("_বুলিয়ান");
mock_io.expect_println("_স্ট্রিং");
mock_io.expect_println("_লিস্ট");
mock_io.expect_println("_রেকর্ড");
mock_io.expect_println("_শূন্য");
mock_io.expect_println("_ফাং");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_to_string() {
let ast = src_to_ast(vec![
r#"দেখাও _স্ট্রিং(১) == "১";"#,
r#"দেখাও _স্ট্রিং(১.০) == "১";"#,
r#"দেখাও _স্ট্রিং(-১.০) == "-১";"#,
r#"দেখাও _স্ট্রিং(১৩.৩২) == "১৩.৩২";"#,
r#"দেখাও _স্ট্রিং(-৪৩.৪৩) == "-৪৩.৪৩";"#,
r#"দেখাও _স্ট্রিং(-০.৪৩) == "-০.৪৩";"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_fn_to_num() {
let ast = src_to_ast(vec![
r#"দেখাও _সংখ্যা("১") == ১;"#,
r#"দেখাও _সংখ্যা("১.০") == ১;"#,
r#"দেখাও _সংখ্যা("-১.০") == -১;"#,
r#"দেখাও _সংখ্যা("১৩.৩২") == ১৩.৩২;"#,
r#"দেখাও _সংখ্যা("-৪৩.৪৩") == -৪৩.৪৩;"#,
r#"দেখাও _সংখ্যা("-০.৪৩") == -০.৪৩;"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
mock_io.expect_println("সত্য");
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
}
#[test]
fn built_in_const_platform() {
let ast = src_to_ast(vec![
r#"দেখাও _প্ল্যাটফর্ম;"#,
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println(std::env::consts::OS);
if let Err(err) = run_assert_all_true(ast, mock_io) {
panic!("{:?}", err);
}
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
Shafin098/pakhi-bhasha | https://github.com/Shafin098/pakhi-bhasha/blob/9805017f595169a9b49c9f36d9b30bbbee3e7b28/tests/main_integration.rs | tests/main_integration.rs | use pakhi::common::io::{MockIO, IO};
use std::io::Write;
use std::sync::{Arc, PoisonError};
use std::sync::Mutex;
use lazy_static::lazy_static;
lazy_static! {
static ref MUTEX: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
}
// create_file creates file ./tmp folder
fn create_file(file_name: &str, lines: Vec<&str>) {
let current_dir = std::env::current_dir().unwrap();
let tmp_dir = current_dir.join("__tmp");
std::fs::create_dir_all(&tmp_dir).unwrap();
let mut file = std::fs::File::create(tmp_dir.join(file_name)).unwrap();
let l: String = lines.join("\n");
file.write_all(l.as_bytes()).unwrap()
}
fn run_module(module_name: &str, mut io: MockIO) {
let root_path = std::env::current_dir().unwrap().join("__tmp");
let module_path = root_path.join(module_name);
let pakhi_result = pakhi::start_pakhi(module_path.to_str().unwrap().parse().unwrap(), &mut io);
match pakhi_result {
Ok(_) => {},
Err(e) => panic!("{:?}", e),
}
clean_test_tmp_dir();
io.assert_all_true();
}
fn clean_test_tmp_dir() {
let current_dir = std::env::current_dir().unwrap();
let tmp_dir = current_dir.join("__tmp");
std::fs::remove_dir_all(tmp_dir).unwrap()
}
#[test]
fn module_import() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.pakhi", vec![
r#"মডিউল ম = "module.pakhi";"#,
"দেখাও ম/ক;",
]);
create_file("module.pakhi", vec![
"নাম ক = ২;",
"দেখাও ক;",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("২");
mock_io.expect_println("২");
run_module("test.pakhi", mock_io);
}
#[test]
#[should_panic(expected="Cyclic module dependency. Can't import root.pakhi from module.pakhi")]
fn module_import_cyclic() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("root.pakhi", vec![
r#"মডিউল ম = "module.pakhi";"#,
]);
create_file("module.pakhi", vec![
r#"মডিউল ম = "root.pakhi";"#,
]);
let thread = std::thread::spawn(|| {
let mock_io: MockIO = MockIO::new();
run_module("root.pakhi", mock_io);
});
if thread.join().is_err() {
clean_test_tmp_dir();
panic!("Cyclic module dependency. Can't import root.pakhi from module.pakhi");
}
}
#[test]
fn built_in_fn_read_file() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.txt", vec![
"test passed",
]);
create_file("test.pakhi", vec![
"দেখাও _রিড-ফাইল(_ডাইরেক্টরি + \"./test.txt\");",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("test passed");
run_module("test.pakhi", mock_io);
}
#[test]
fn built_in_fn_write_file() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.pakhi", vec![
"_রাইট-ফাইল(_ডাইরেক্টরি + \"./test.txt\", \"test passed\");",
"দেখাও _রিড-ফাইল(_ডাইরেক্টরি + \"./test.txt\");",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("test passed");
run_module("test.pakhi", mock_io);
}
#[test]
fn built_in_fn_delete_file() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.txt", vec![
"test passed",
])
;create_file("test.pakhi", vec![
"দেখাও _ডিলিট-ফাইল(_ডাইরেক্টরি + \"./test.txt\");",
]);
let mock_io: MockIO = MockIO::new();
run_module("test.pakhi", mock_io);
assert!(!std::path::Path::new("./tmp/test.txt").exists());
}
#[test]
fn built_in_fn_read_dir() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
// create_file creates file ./tmp folder
create_file("test.txt", vec!["test passed"]);
create_file("test.pakhi", vec![
"নাম ডার = _রিড-ডাইরেক্টরি(_ডাইরেক্টরি + \"./\");",
"দেখাও ডার[০];"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("test.txt");
run_module("test.pakhi", mock_io);
}
#[test]
fn built_in_fn_create_dir() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.pakhi", vec![
"_নতুন-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
"_রাইট-ফাইল(_ডাইরেক্টরি + \"./test/test.txt\", \"test passed\");",
"নাম ড = _রিড-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
"দেখাও ড;"
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("[test.txt]");
run_module("test.pakhi", mock_io);
}
#[test]
#[should_panic]
fn built_in_fn_delete_dir() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.pakhi", vec![
"_নতুন-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
"_ডিলিট-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\")",
"_রিড-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
]);
let thread = std::thread::spawn(|| {
let mock_io: MockIO = MockIO::new();
run_module("test.pakhi", mock_io);
});
if thread.join().is_err() {
clean_test_tmp_dir();
panic!()
}
}
#[test]
fn built_in_fn_file_or_dir() {
let _m = MUTEX.lock().unwrap_or_else(PoisonError::into_inner);
create_file("test.pakhi", vec![
"_নতুন-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
"_রাইট-ফাইল(_ডাইরেক্টরি + \"./test.txt\", \"test passed\");",
"দেখাও _ফাইল-নাকি-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test.txt\");",
"দেখাও _ফাইল-নাকি-ডাইরেক্টরি(_ডাইরেক্টরি + \"./test\");",
]);
let mut mock_io: MockIO = MockIO::new();
mock_io.expect_println("ফাইল");
mock_io.expect_println("ডাইরেক্টরি");
run_module("test.pakhi", mock_io);
} | rust | MIT | 9805017f595169a9b49c9f36d9b30bbbee3e7b28 | 2026-01-04T20:18:15.998668Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/lib.rs | src/lib.rs | use crate::ids::{BlockId, DatabaseId};
use crate::models::error::ErrorResponse;
use crate::models::search::{DatabaseQuery, SearchRequest};
use crate::models::{Database, ListResponse, Object, Page};
use ids::{AsIdentifier, PageId};
use models::block::Block;
use models::PageCreateRequest;
use reqwest::header::{HeaderMap, HeaderValue};
use reqwest::{header, Client, ClientBuilder, RequestBuilder};
use tracing::Instrument;
pub mod ids;
pub mod models;
pub use chrono;
const NOTION_API_VERSION: &str = "2022-02-22";
/// An wrapper Error type for all errors produced by the [`NotionApi`](NotionApi) client.
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("Invalid Notion API Token: {}", source)]
InvalidApiToken { source: header::InvalidHeaderValue },
#[error("Unable to build reqwest HTTP client: {}", source)]
ErrorBuildingClient { source: reqwest::Error },
#[error("Error sending HTTP request: {}", source)]
RequestFailed {
#[from]
source: reqwest::Error,
},
#[error("Error reading response: {}", source)]
ResponseIoError { source: reqwest::Error },
#[error("Error parsing json response: {}", source)]
JsonParseError { source: serde_json::Error },
#[error("Unexpected API Response")]
UnexpectedResponse { response: Object },
#[error("API Error {}({}): {}", .error.code, .error.status, .error.message)]
ApiError { error: ErrorResponse },
}
/// An API client for Notion.
/// Create a client by using [new(api_token: String)](Self::new()).
#[derive(Clone)]
pub struct NotionApi {
client: Client,
}
impl NotionApi {
/// Creates an instance of NotionApi.
/// May fail if the provided api_token is an improper value.
pub fn new(api_token: String) -> Result<Self, Error> {
let mut headers = HeaderMap::new();
headers.insert(
"Notion-Version",
HeaderValue::from_static(NOTION_API_VERSION),
);
let mut auth_value = HeaderValue::from_str(&format!("Bearer {}", api_token))
.map_err(|source| Error::InvalidApiToken { source })?;
auth_value.set_sensitive(true);
headers.insert(header::AUTHORIZATION, auth_value);
let client = ClientBuilder::new()
.default_headers(headers)
.build()
.map_err(|source| Error::ErrorBuildingClient { source })?;
Ok(Self { client })
}
async fn make_json_request(
&self,
request: RequestBuilder,
) -> Result<Object, Error> {
let request = request.build()?;
let url = request.url();
tracing::trace!(
method = request.method().as_str(),
url = url.as_str(),
"Sending request"
);
let json = self
.client
.execute(request)
.instrument(tracing::trace_span!("Sending request"))
.await
.map_err(|source| Error::RequestFailed { source })?
.text()
.instrument(tracing::trace_span!("Reading response"))
.await
.map_err(|source| Error::ResponseIoError { source })?;
tracing::debug!("JSON Response: {}", json);
#[cfg(test)]
{
dbg!(serde_json::from_str::<serde_json::Value>(&json)
.map_err(|source| Error::JsonParseError { source })?);
}
let result =
serde_json::from_str(&json).map_err(|source| Error::JsonParseError { source })?;
match result {
Object::Error { error } => Err(Error::ApiError { error }),
response => Ok(response),
}
}
/// List all the databases shared with the supplied integration token.
/// > This method is apparently deprecated/"not recommended" and
/// > [search()](Self::search()) should be used instead.
pub async fn list_databases(&self) -> Result<ListResponse<Database>, Error> {
let builder = self.client.get("https://api.notion.com/v1/databases");
match self.make_json_request(builder).await? {
Object::List { list } => Ok(list.expect_databases()?),
response => Err(Error::UnexpectedResponse { response }),
}
}
/// Search all pages in notion.
/// `query` can either be a [SearchRequest] or a slightly more convenient
/// [NotionSearch](models::search::NotionSearch) query.
pub async fn search<T: Into<SearchRequest>>(
&self,
query: T,
) -> Result<ListResponse<Object>, Error> {
let result = self
.make_json_request(
self.client
.post("https://api.notion.com/v1/search")
.json(&query.into()),
)
.await?;
match result {
Object::List { list } => Ok(list),
response => Err(Error::UnexpectedResponse { response }),
}
}
/// Get a database by [DatabaseId].
pub async fn get_database<T: AsIdentifier<DatabaseId>>(
&self,
database_id: T,
) -> Result<Database, Error> {
let result = self
.make_json_request(self.client.get(format!(
"https://api.notion.com/v1/databases/{}",
database_id.as_id()
)))
.await?;
match result {
Object::Database { database } => Ok(database),
response => Err(Error::UnexpectedResponse { response }),
}
}
/// Get a page by [PageId].
pub async fn get_page<T: AsIdentifier<PageId>>(
&self,
page_id: T,
) -> Result<Page, Error> {
let result = self
.make_json_request(self.client.get(format!(
"https://api.notion.com/v1/pages/{}",
page_id.as_id()
)))
.await?;
match result {
Object::Page { page } => Ok(page),
response => Err(Error::UnexpectedResponse { response }),
}
}
/// Creates a new page and return the created page
pub async fn create_page<T: Into<PageCreateRequest>>(
&self,
page: T,
) -> Result<Page, Error> {
let result = self
.make_json_request(
self.client
.post("https://api.notion.com/v1/pages")
.json(&page.into()),
)
.await?;
match result {
Object::Page { page } => Ok(page),
response => Err(Error::UnexpectedResponse { response }),
}
}
/// Query a database and return the matching pages.
pub async fn query_database<D, T>(
&self,
database: D,
query: T,
) -> Result<ListResponse<Page>, Error>
where
T: Into<DatabaseQuery>,
D: AsIdentifier<DatabaseId>,
{
let result = self
.make_json_request(
self.client
.post(&format!(
"https://api.notion.com/v1/databases/{database_id}/query",
database_id = database.as_id()
))
.json(&query.into()),
)
.await?;
match result {
Object::List { list } => Ok(list.expect_pages()?),
response => Err(Error::UnexpectedResponse { response }),
}
}
pub async fn get_block_children<T: AsIdentifier<BlockId>>(
&self,
block_id: T,
) -> Result<ListResponse<Block>, Error> {
let result = self
.make_json_request(self.client.get(&format!(
"https://api.notion.com/v1/blocks/{block_id}/children",
block_id = block_id.as_id()
)))
.await?;
match result {
Object::List { list } => Ok(list.expect_blocks()?),
response => Err(Error::UnexpectedResponse { response }),
}
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/tests.rs | src/tests.rs | use crate::ids::BlockId;
use crate::models::search::PropertyCondition::Text;
use crate::models::search::{
DatabaseQuery, FilterCondition, FilterProperty, FilterValue, NotionSearch, TextCondition,
};
use crate::models::Object;
use crate::NotionApi;
fn test_token() -> String {
let token = {
if let Some(token) = std::env::var("NOTION_API_TOKEN").ok() {
token
} else if let Some(token) = std::fs::read_to_string(".api_token").ok() {
token
} else {
panic!("No API Token found in environment variable 'NOTION_API_TOKEN'!")
}
};
token.trim().to_string()
}
fn test_client() -> NotionApi {
NotionApi::new(test_token()).unwrap()
}
#[tokio::test]
async fn list_databases() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
dbg!(api.list_databases().await?);
Ok(())
}
#[tokio::test]
async fn search_databases() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
let response = api
.search(NotionSearch::Filter {
property: FilterProperty::Object,
value: FilterValue::Database,
})
.await?;
assert!(response.results.len() > 0);
Ok(())
}
#[tokio::test]
async fn search_pages() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
let response = api
.search(NotionSearch::Filter {
property: FilterProperty::Object,
value: FilterValue::Page,
})
.await?;
assert!(response.results.len() > 0);
Ok(())
}
#[tokio::test]
async fn get_database() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
let response = api
.search(NotionSearch::Filter {
value: FilterValue::Database,
property: FilterProperty::Object,
})
.await?;
let db = response
.results()
.iter()
.filter_map(|o| match o {
Object::Database { database } => Some(database),
_ => None,
})
.next()
.expect("Test expected to find at least one database in notion")
.clone();
// todo: fix this clone issue
let db_result = api.get_database(db.clone()).await?;
assert_eq!(db, db_result);
Ok(())
}
#[tokio::test]
async fn get_block_children() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
let search_response = api
.search(NotionSearch::Filter {
value: FilterValue::Page,
property: FilterProperty::Object,
})
.await?;
println!("{:?}", search_response.results.len());
for object in search_response.results {
match object {
Object::Page { page } => api
.get_block_children(BlockId::from(page.id))
.await
.unwrap(),
_ => panic!("Should not have received anything but pages!"),
};
}
Ok(())
}
#[tokio::test]
async fn query_database() -> Result<(), Box<dyn std::error::Error>> {
let api = test_client();
let response = api
.search(NotionSearch::Filter {
value: FilterValue::Database,
property: FilterProperty::Object,
})
.await?;
let db = response
.results()
.iter()
.filter_map(|o| match o {
Object::Database { database } => Some(database),
_ => None,
})
.next()
.expect("Test expected to find at least one database in notion")
.clone();
let pages = api
.query_database(
db,
DatabaseQuery {
filter: Some(FilterCondition {
property: "Name".to_string(),
condition: Text(TextCondition::Contains("First".to_string())),
}),
..Default::default()
},
)
.await?;
assert_eq!(pages.results().len(), 1);
Ok(())
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/ids.rs | src/ids.rs | use std::fmt::Display;
use std::fmt::Error;
pub trait Identifier: Display {
fn value(&self) -> &str;
}
/// Meant to be a helpful trait allowing anything that can be
/// identified by the type specified in `ById`.
pub trait AsIdentifier<ById: Identifier> {
fn as_id(&self) -> &ById;
}
impl<T> AsIdentifier<T> for T
where
T: Identifier,
{
fn as_id(&self) -> &T {
self
}
}
impl<T> AsIdentifier<T> for &T
where
T: Identifier,
{
fn as_id(&self) -> &T {
self
}
}
macro_rules! identifer {
($name:ident) => {
#[derive(serde::Serialize, serde::Deserialize, Debug, Eq, PartialEq, Hash, Clone)]
#[serde(transparent)]
pub struct $name(String);
impl Identifier for $name {
fn value(&self) -> &str {
&self.0
}
}
impl std::fmt::Display for $name {
fn fmt(
&self,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl std::str::FromStr for $name {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok($name(s.to_string()))
}
}
};
}
identifer!(DatabaseId);
identifer!(PageId);
identifer!(BlockId);
identifer!(UserId);
identifer!(PropertyId);
impl From<PageId> for BlockId {
fn from(page_id: PageId) -> Self {
BlockId(page_id.0)
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/paging.rs | src/models/paging.rs | use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone)]
#[serde(transparent)]
pub struct PagingCursor(String);
#[derive(Serialize, Debug, Eq, PartialEq, Default, Clone)]
pub struct Paging {
#[serde(skip_serializing_if = "Option::is_none")]
pub start_cursor: Option<PagingCursor>,
#[serde(skip_serializing_if = "Option::is_none")]
pub page_size: Option<u8>,
}
pub trait Pageable {
fn start_from(
self,
starting_point: Option<PagingCursor>,
) -> Self;
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/properties.rs | src/models/properties.rs | use crate::models::text::RichText;
use crate::models::users::User;
use crate::ids::{DatabaseId, PageId, PropertyId};
use crate::models::{DateTime, Number, Utc};
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
pub mod formulas;
#[cfg(test)]
mod tests;
/// How the number is displayed in Notion.
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Copy, Clone, Hash)]
#[serde(rename_all = "snake_case")]
pub enum NumberFormat {
Number,
NumberWithCommas,
Percent,
Dollar,
Euro,
Pound,
Yen,
Ruble,
Rupee,
Won,
Yuan,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone)]
pub struct NumberDetails {
pub format: NumberFormat,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone)]
#[serde(transparent)]
pub struct SelectOptionId(String);
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Copy, Clone)]
#[serde(rename_all = "lowercase")]
pub enum Color {
Default,
Gray,
Brown,
Orange,
Yellow,
Green,
Blue,
Purple,
Pink,
Red,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct SelectOption {
pub name: String,
pub id: SelectOptionId,
pub color: Color,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Select {
/// Sorted list of options available for this property.
pub options: Vec<SelectOption>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct StatusGroupOption {
pub name: String,
pub id: SelectOptionId,
pub color: Color,
pub option_ids: Vec<SelectOptionId>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Status {
/// Sorted list of options available for this property.
pub options: Vec<SelectOption>,
/// Sorted list of groups available for this property.
pub groups: Vec<StatusGroupOption>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Formula {
/// Formula to evaluate for this property
pub expression: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Relation {
/// The database this relation refers to.
/// New linked pages must belong to this database in order to be valid.
pub database_id: DatabaseId,
/// By default, relations are formed as two synced properties across databases:
/// if you make a change to one property, it updates the synced property at the same time.
/// `synced_property_name` refers to the name of the property in the related database.
pub synced_property_name: Option<String>,
/// By default, relations are formed as two synced properties across databases:
/// if you make a change to one property, it updates the synced property at the same time.
/// `synced_property_id` refers to the id of the property in the related database.
/// This is usually a short string of random letters and symbols.
pub synced_property_id: Option<PropertyId>,
}
/// The function used to roll up the values of the relation property.
/// <https://developers.notion.com/reference/page-property-values#rollup>
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum RollupFunction {
Average,
Checked,
Count,
CountPerGroup,
CountValues,
DateRange,
EarliestDate,
Empty,
LatestDate,
Max,
Median,
Min,
NotEmpty,
PercentChecked,
PercentEmpty,
PercentNotEmpty,
PercentPerGroup,
PercentUnchecked,
Range,
ShowOriginal,
ShowUnique,
Sum,
Unchecked,
Unique,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Rollup {
/// The name of the relation property this property is responsible for rolling up.
pub relation_property_name: String,
/// The id of the relation property this property is responsible for rolling up.
pub relation_property_id: PropertyId,
/// The name of the property of the pages in the related database
/// that is used as an input to `function`.
pub rollup_property_name: String,
/// The id of the property of the pages in the related database
/// that is used as an input to `function`.
pub rollup_property_id: String,
/// The function that is evaluated for every page in the relation of the rollup.
pub function: RollupFunction,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum PropertyConfiguration {
/// Represents the special Title property required on every database.
/// See <https://developers.notion.com/reference/database#title-configuration>
Title {
id: PropertyId,
},
/// Represents a Text property
/// <https://developers.notion.com/reference/database#text-configuration>
#[serde(rename = "rich_text")]
Text {
id: PropertyId,
},
/// Represents a Number Property
/// See <https://developers.notion.com/reference/database#number-configuration>
Number {
id: PropertyId,
/// How the number is displayed in Notion.
number: NumberDetails,
},
/// Represents a Select Property
/// See <https://developers.notion.com/reference/database#select-configuration>
Select {
id: PropertyId,
select: Select,
},
/// Represents a Status property
Status {
id: PropertyId,
status: Status,
},
/// Represents a Multi-select Property
/// See <https://developers.notion.com/reference/database#multi-select-configuration>
MultiSelect {
id: PropertyId,
multi_select: Select,
},
/// Represents a Date Property
/// See <https://developers.notion.com/reference/database#date-configuration>
Date {
id: PropertyId,
},
/// Represents a People Property
/// See <https://developers.notion.com/reference/database#people-configuration>
People {
id: PropertyId,
},
/// Represents a File Property
/// See <https://developers.notion.com/reference/database#file-configuration>
// Todo: File a bug with notion
// Documentation issue: docs claim type name is `file` but it is in fact `files`
Files {
id: PropertyId,
},
/// Represents a Checkbox Property
/// See <https://developers.notion.com/reference/database#checkbox-configuration>
Checkbox {
id: PropertyId,
},
/// Represents a URL Property
/// See <https://developers.notion.com/reference/database#url-configuration>
Url {
id: PropertyId,
},
/// Represents a Email Property
/// See <https://developers.notion.com/reference/database#email-configuration>
Email {
id: PropertyId,
},
/// Represents a Phone number Property
/// See <https://developers.notion.com/reference/database#phone-number-configuration>
PhoneNumber {
id: PropertyId,
},
/// See <https://developers.notion.com/reference/database#formula-configuration>
Formula {
id: PropertyId,
formula: Formula,
},
/// See <https://developers.notion.com/reference/database#relation-configuration>
Relation {
id: PropertyId,
relation: Relation,
},
/// See <https://developers.notion.com/reference/database#rollup-configuration>
Rollup {
id: PropertyId,
rollup: Rollup,
},
/// See <https://developers.notion.com/reference/database#created-time-configuration>
CreatedTime {
id: PropertyId,
},
/// See <https://developers.notion.com/reference/database#created-by-configuration>
CreatedBy {
id: PropertyId,
},
/// See <https://developers.notion.com/reference/database#last-edited-time-configuration>
LastEditedTime {
id: PropertyId,
},
/// See <https://developers.notion.com/reference/database#last-edited-by-configuration>
LastEditBy {
id: PropertyId,
},
UniqueId {
id: PropertyId,
},
Button {
id: PropertyId,
},
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct SelectedValue {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<SelectOptionId>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub color: Color,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(untagged)]
pub enum DateOrDateTime {
Date(NaiveDate),
DateTime(DateTime<Utc>),
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct DateValue {
pub start: DateOrDateTime,
pub end: Option<DateOrDateTime>,
pub time_zone: Option<String>,
}
/// Formula property value objects represent the result of evaluating a formula
/// described in the database's properties.
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum FormulaResultValue {
String { string: Option<String> },
Number { number: Option<Number> },
Boolean { boolean: Option<bool> },
Date { date: Option<DateValue> },
}
/// Relation property value objects contain an array of page references within the relation property.
/// A page reference is an object with an id property,
/// with a string value (UUIDv4) corresponding to a page ID in another database.
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct RelationValue {
pub id: PageId,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum RollupValue {
Number { number: Option<Number> },
Date { date: Option<DateTime<Utc>> },
Array { array: Vec<RollupPropertyValue> },
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct FileReference {
pub name: String,
pub url: String,
pub mime_type: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum PropertyValue {
// <https://developers.notion.com/reference/property-object#title-configuration>
Title {
id: PropertyId,
title: Vec<RichText>,
},
/// <https://developers.notion.com/reference/property-object#text-configuration>
#[serde(rename = "rich_text")]
Text {
id: PropertyId,
rich_text: Vec<RichText>,
},
/// <https://developers.notion.com/reference/property-object#number-configuration>
Number {
id: PropertyId,
number: Option<Number>,
},
/// <https://developers.notion.com/reference/property-object#select-configuration>
Select {
id: PropertyId,
select: Option<SelectedValue>,
},
/// <https://developers.notion.com/reference/property-object#status-configuration>
Status {
id: PropertyId,
status: Option<SelectedValue>,
},
/// <https://developers.notion.com/reference/property-object#multi-select-configuration>
MultiSelect {
id: PropertyId,
multi_select: Option<Vec<SelectedValue>>,
},
/// <https://developers.notion.com/reference/property-object#date-configuration>
Date {
id: PropertyId,
date: Option<DateValue>,
},
/// <https://developers.notion.com/reference/property-object#formula-configuration>
Formula {
id: PropertyId,
formula: FormulaResultValue,
},
/// <https://developers.notion.com/reference/property-object#relation-configuration>
/// It is actually an array of relations
Relation {
id: PropertyId,
relation: Option<Vec<RelationValue>>,
},
/// <https://developers.notion.com/reference/property-object#rollup-configuration>
Rollup {
id: PropertyId,
rollup: Option<RollupValue>,
},
/// <https://developers.notion.com/reference/property-object#people-configuration>
People {
id: PropertyId,
people: Vec<User>,
},
/// <https://developers.notion.com/reference/property-object#files-configuration>
Files {
id: PropertyId,
files: Option<Vec<FileReference>>,
},
/// <https://developers.notion.com/reference/property-object#checkbox-configuration>
Checkbox {
id: PropertyId,
checkbox: bool,
},
/// <https://developers.notion.com/reference/property-object#url-configuration>
Url {
id: PropertyId,
url: Option<String>,
},
/// <https://developers.notion.com/reference/property-object#email-configuration>
Email {
id: PropertyId,
email: Option<String>,
},
/// <https://developers.notion.com/reference/property-object#phone-number-configuration>
PhoneNumber {
id: PropertyId,
phone_number: String,
},
/// <https://developers.notion.com/reference/property-object#created-time-configuration>
CreatedTime {
id: PropertyId,
created_time: DateTime<Utc>,
},
/// <https://developers.notion.com/reference/property-object#created-by-configuration>
CreatedBy {
id: PropertyId,
created_by: User,
},
/// <https://developers.notion.com/reference/property-object#last-edited-time-configuration>
LastEditedTime {
id: PropertyId,
last_edited_time: DateTime<Utc>,
},
/// <https://developers.notion.com/reference/property-object#last-edited-by-configuration>
LastEditedBy {
id: PropertyId,
last_edited_by: User,
},
UniqueId {
id: PropertyId,
unique_id: UniqueidValue,
},
Button {
id: PropertyId,
},
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct UniqueidValue {
pub prefix: Option<String>,
pub number: u32,
}
/// <https://developers.notion.com/reference/page#rollup-property-value-element>
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum RollupPropertyValue {
/// <https://developers.notion.com/reference/page#rich-text-property-values>
#[serde(rename = "rich_text")]
Text {
rich_text: Vec<RichText>,
},
/// <https://developers.notion.com/reference/page#number-property-values>
Number {
number: Option<Number>,
},
/// <https://developers.notion.com/reference/page#select-property-values>
Select {
select: Option<SelectedValue>,
},
Status {
status: Option<SelectedValue>,
},
MultiSelect {
multi_select: Option<Vec<SelectedValue>>,
},
Date {
date: Option<DateValue>,
},
/// <https://developers.notion.com/reference/page#formula-property-values>
Formula {
formula: FormulaResultValue,
},
/// <https://developers.notion.com/reference/page#relation-property-values>
/// It is actually an array of relations
Relation {
relation: Option<Vec<RelationValue>>,
},
/// <https://developers.notion.com/reference/page#rollup-property-values>
Rollup {
rollup: Option<RollupValue>,
},
People {
people: Vec<User>,
},
Files {
files: Option<Vec<FileReference>>,
},
Checkbox {
checkbox: bool,
},
Url {
url: Option<String>,
},
Email {
email: Option<String>,
},
PhoneNumber {
phone_number: String,
},
CreatedTime {
created_time: DateTime<Utc>,
},
CreatedBy {
created_by: User,
},
LastEditedTime {
last_edited_time: DateTime<Utc>,
},
LastEditedBy {
last_edited_by: User,
},
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/tests.rs | src/models/tests.rs | use crate::ids::UserId;
use crate::models::properties::{DateOrDateTime, DateValue};
use crate::models::text::{
Annotations, Link, MentionObject, RichText, RichTextCommon, Text, TextColor,
};
use crate::models::users::{Person, User, UserCommon};
use crate::models::{ListResponse, Object, Page};
use chrono::{DateTime, NaiveDate};
use std::str::FromStr;
#[test]
fn deserialize_page() {
let _page: Page = serde_json::from_str(include_str!("tests/page.json")).unwrap();
}
#[test]
fn deserialize_query_result() {
let _page: ListResponse<Page> =
serde_json::from_str(include_str!("tests/query_result.json")).unwrap();
}
#[test]
fn deserialize_number_format() {
let _search_results: ListResponse<Object> =
serde_json::from_str(include_str!("tests/issue_15.json")).unwrap();
}
#[test]
fn rich_text() {
let rich_text_text: RichText =
serde_json::from_str(include_str!("tests/rich_text_text.json")).unwrap();
assert_eq!(
rich_text_text,
RichText::Text {
rich_text: RichTextCommon {
plain_text: "Rich".to_string(),
href: Some("https://github.com/jakeswenson/notion".to_string()),
annotations: Some(Annotations {
bold: Some(true),
code: Some(true),
color: Some(TextColor::Default),
italic: Some(true),
strikethrough: Some(true),
underline: Some(true),
}),
},
text: Text {
content: "Rich".to_string(),
link: Some(Link {
url: "https://github.com/jakeswenson/notion".to_string()
}),
},
}
)
}
#[test]
fn rich_text_mention_user_person() {
let rich_text_mention_user_person: RichText =
serde_json::from_str(include_str!("tests/rich_text_mention_user_person.json")).unwrap();
assert_eq!(
rich_text_mention_user_person,
RichText::Mention {
rich_text: RichTextCommon {
plain_text: "@John Doe".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
mention: MentionObject::User {
user: User::Person {
common: UserCommon {
id: UserId::from_str("1118608e-35e8-4fa3-aef7-a4ced85ce8e0").unwrap(),
name: Some("John Doe".to_string()),
avatar_url: Some(
"https://secure.notion-static.com/e6a352a8-8381-44d0-a1dc-9ed80e62b53d.jpg"
.to_string()
),
},
person: Person {
email: "john.doe@gmail.com".to_string()
},
}
},
}
)
}
#[test]
fn rich_text_mention_date() {
let rich_text_mention_date: RichText =
serde_json::from_str(include_str!("tests/rich_text_mention_date.json")).unwrap();
assert_eq!(
rich_text_mention_date,
RichText::Mention {
rich_text: RichTextCommon {
plain_text: "2022-04-16 → ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
mention: MentionObject::Date {
date: DateValue {
start: DateOrDateTime::Date(NaiveDate::from_str("2022-04-16").unwrap()),
end: None,
time_zone: None,
}
},
}
)
}
#[test]
fn rich_text_mention_date_with_time() {
let rich_text_mention_date_with_time: RichText =
serde_json::from_str(include_str!("tests/rich_text_mention_date_with_time.json")).unwrap();
assert_eq!(
rich_text_mention_date_with_time,
RichText::Mention {
rich_text: RichTextCommon {
plain_text: "2022-05-14T09:00:00.000-04:00 → ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
mention: MentionObject::Date {
date: DateValue {
start: DateOrDateTime::DateTime(
DateTime::from_str("2022-05-14T09:00:00.000-04:00").unwrap()
),
end: None,
time_zone: None,
}
},
}
)
}
#[test]
fn rich_text_mention_date_with_end() {
let rich_text_mention_date_with_end: RichText =
serde_json::from_str(include_str!("tests/rich_text_mention_date_with_end.json")).unwrap();
assert_eq!(
rich_text_mention_date_with_end,
RichText::Mention {
rich_text: RichTextCommon {
plain_text: "2022-05-12 → 2022-05-13".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
mention: MentionObject::Date {
date: DateValue {
start: DateOrDateTime::Date(NaiveDate::from_str("2022-05-12").unwrap()),
end: Some(DateOrDateTime::Date(
NaiveDate::from_str("2022-05-13").unwrap()
)),
time_zone: None,
}
},
}
)
}
#[test]
fn rich_text_mention_date_with_end_and_time() {
let rich_text_mention_date_with_end_and_time: RichText = serde_json::from_str(include_str!(
"tests/rich_text_mention_date_with_end_and_time.json"
))
.unwrap();
assert_eq!(
rich_text_mention_date_with_end_and_time,
RichText::Mention {
rich_text: RichTextCommon {
plain_text: "2022-04-16T12:00:00.000-04:00 → 2022-04-16T12:00:00.000-04:00"
.to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
mention: MentionObject::Date {
date: DateValue {
start: DateOrDateTime::DateTime(
DateTime::from_str("2022-04-16T12:00:00.000-04:00").unwrap()
),
end: Some(DateOrDateTime::DateTime(
DateTime::from_str("2022-04-16T12:00:00.000-04:00").unwrap()
)),
time_zone: None,
}
},
}
)
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/search.rs | src/models/search.rs | use crate::ids::{PageId, UserId};
use crate::models::paging::{Pageable, Paging, PagingCursor};
use crate::models::Number;
use chrono::{DateTime, Utc};
use serde::ser::SerializeMap;
use serde::{Serialize, Serializer};
#[derive(Serialize, Debug, Eq, PartialEq, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum SortDirection {
Ascending,
Descending,
}
#[derive(Serialize, Debug, Eq, PartialEq, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum SortTimestamp {
LastEditedTime,
}
#[derive(Serialize, Debug, Eq, PartialEq, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum FilterValue {
Page,
Database,
}
#[derive(Serialize, Debug, Eq, PartialEq, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum FilterProperty {
Object,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
pub struct Sort {
/// The name of the timestamp to sort against.
timestamp: SortTimestamp,
direction: SortDirection,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
pub struct Filter {
property: FilterProperty,
value: FilterValue,
}
#[derive(Serialize, Debug, Eq, PartialEq, Default)]
pub struct SearchRequest {
#[serde(skip_serializing_if = "Option::is_none")]
query: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
sort: Option<Sort>,
#[serde(skip_serializing_if = "Option::is_none")]
filter: Option<Filter>,
#[serde(flatten)]
paging: Option<Paging>,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum TextCondition {
Equals(String),
DoesNotEqual(String),
Contains(String),
DoesNotContain(String),
StartsWith(String),
EndsWith(String),
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
fn serialize_to_true<S>(serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_bool(true)
}
fn serialize_to_empty_object<S>(serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// Todo: there has to be a better way?
serializer.serialize_map(Some(0))?.end()
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum NumberCondition {
Equals(Number),
DoesNotEqual(Number),
GreaterThan(Number),
LessThan(Number),
GreaterThanOrEqualTo(Number),
LessThanOrEqualTo(Number),
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum CheckboxCondition {
Equals(bool),
DoesNotEqual(bool),
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum SelectCondition {
/// Only return pages where the page property value matches the provided value exactly.
Equals(String),
/// Only return pages where the page property value does not match the provided value exactly.
DoesNotEqual(String),
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum MultiSelectCondition {
/// Only return pages where the page property value contains the provided value.
Contains(String),
/// Only return pages where the page property value does not contain the provided value.
DoesNotContain(String),
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum DateCondition {
/// Only return pages where the page property value matches the provided date exactly.
/// Note that the comparison is done against the date.
/// Any time information sent will be ignored.
Equals(DateTime<Utc>),
/// Only return pages where the page property value is before the provided date.
/// Note that the comparison is done against the date.
/// Any time information sent will be ignored.
Before(DateTime<Utc>),
/// Only return pages where the page property value is after the provided date.
/// Note that the comparison is done against the date.
/// Any time information sent will be ignored.
After(DateTime<Utc>),
/// Only return pages where the page property value is on or before the provided date.
/// Note that the comparison is done against the date.
/// Any time information sent will be ignored.
OnOrBefore(DateTime<Utc>),
/// Only return pages where the page property value is on or after the provided date.
/// Note that the comparison is done against the date.
/// Any time information sent will be ignored.
OnOrAfter(DateTime<Utc>),
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
/// Only return pages where the page property value is within the past week.
#[serde(serialize_with = "serialize_to_empty_object")]
PastWeek,
/// Only return pages where the page property value is within the past month.
#[serde(serialize_with = "serialize_to_empty_object")]
PastMonth,
/// Only return pages where the page property value is within the past year.
#[serde(serialize_with = "serialize_to_empty_object")]
PastYear,
/// Only return pages where the page property value is within the next week.
#[serde(serialize_with = "serialize_to_empty_object")]
NextWeek,
/// Only return pages where the page property value is within the next month.
#[serde(serialize_with = "serialize_to_empty_object")]
NextMonth,
/// Only return pages where the page property value is within the next year.
#[serde(serialize_with = "serialize_to_empty_object")]
NextYear,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum PeopleCondition {
Contains(UserId),
/// Only return pages where the page property value does not contain the provided value.
DoesNotContain(UserId),
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum FilesCondition {
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum RelationCondition {
/// Only return pages where the page property value contains the provided value.
Contains(PageId),
/// Only return pages where the page property value does not contain the provided value.
DoesNotContain(PageId),
/// Only return pages where the page property value is empty.
#[serde(serialize_with = "serialize_to_true")]
IsEmpty,
/// Only return pages where the page property value is present.
#[serde(serialize_with = "serialize_to_true")]
IsNotEmpty,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum FormulaCondition {
/// Only return pages where the result type of the page property formula is "text"
/// and the provided text filter condition matches the formula's value.
Text(TextCondition),
/// Only return pages where the result type of the page property formula is "number"
/// and the provided number filter condition matches the formula's value.
Number(NumberCondition),
/// Only return pages where the result type of the page property formula is "checkbox"
/// and the provided checkbox filter condition matches the formula's value.
Checkbox(CheckboxCondition),
/// Only return pages where the result type of the page property formula is "date"
/// and the provided date filter condition matches the formula's value.
Date(DateCondition),
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum PropertyCondition {
RichText(TextCondition),
Number(NumberCondition),
Checkbox(CheckboxCondition),
Select(SelectCondition),
MultiSelect(MultiSelectCondition),
Date(DateCondition),
People(PeopleCondition),
Files(FilesCondition),
Relation(RelationCondition),
Formula(FormulaCondition),
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
#[serde(untagged)]
pub enum FilterCondition {
Property {
property: String,
#[serde(flatten)]
condition: PropertyCondition,
},
/// Returns pages when **all** of the filters inside the provided vector match.
And { and: Vec<FilterCondition> },
/// Returns pages when **any** of the filters inside the provided vector match.
Or { or: Vec<FilterCondition> },
}
#[derive(Serialize, Debug, Eq, PartialEq, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum DatabaseSortTimestamp {
CreatedTime,
LastEditedTime,
}
#[derive(Serialize, Debug, Eq, PartialEq, Clone)]
pub struct DatabaseSort {
// Todo: Should property and timestamp be mutually exclusive? (i.e a flattened enum?)
// the documentation is not clear:
// https://developers.notion.com/reference/post-database-query#post-database-query-sort
#[serde(skip_serializing_if = "Option::is_none")]
pub property: Option<String>,
/// The name of the timestamp to sort against.
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp: Option<DatabaseSortTimestamp>,
pub direction: SortDirection,
}
#[derive(Serialize, Debug, Eq, PartialEq, Default, Clone)]
pub struct DatabaseQuery {
#[serde(skip_serializing_if = "Option::is_none")]
pub sorts: Option<Vec<DatabaseSort>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub filter: Option<FilterCondition>,
#[serde(flatten)]
pub paging: Option<Paging>,
}
impl Pageable for DatabaseQuery {
fn start_from(
self,
starting_point: Option<PagingCursor>,
) -> Self {
DatabaseQuery {
paging: Some(Paging {
start_cursor: starting_point,
page_size: self.paging.and_then(|p| p.page_size),
}),
..self
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum NotionSearch {
/// When supplied, limits which pages are returned by comparing the query to the page title.
Query(String),
/// When supplied, sorts the results based on the provided criteria.
///
/// Limitation: Currently only a single sort is allowed and is limited to `last_edited_time`
Sort {
timestamp: SortTimestamp,
direction: SortDirection,
},
/// When supplied, filters the results based on the provided criteria.
///
/// Limitation: Currently the only filter allowed is `object` which will filter by type of object (either page or database)
Filter {
/// The name of the property to filter by.
/// Currently the only property you can filter by is the `object` type.
property: FilterProperty,
/// The value of the property to filter the results by.
/// Possible values for object type include `page` or `database`.
value: FilterValue,
},
}
impl NotionSearch {
pub fn filter_by_databases() -> Self {
Self::Filter {
property: FilterProperty::Object,
value: FilterValue::Database,
}
}
}
impl From<NotionSearch> for SearchRequest {
fn from(search: NotionSearch) -> Self {
match search {
NotionSearch::Query(query) => SearchRequest {
query: Some(query),
..Default::default()
},
NotionSearch::Sort {
direction,
timestamp,
} => SearchRequest {
sort: Some(Sort {
timestamp,
direction,
}),
..Default::default()
},
NotionSearch::Filter { property, value } => SearchRequest {
filter: Some(Filter { property, value }),
..Default::default()
},
}
}
}
#[cfg(test)]
mod tests {
mod text_filters {
use crate::models::search::PropertyCondition::{Checkbox, Number, RichText, Select};
use crate::models::search::{
CheckboxCondition, FilterCondition, NumberCondition, SelectCondition, TextCondition,
};
use serde_json::json;
#[test]
fn text_property_equals() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::Property {
property: "Name".to_string(),
condition: RichText(TextCondition::Equals("Test".to_string())),
})?;
assert_eq!(
json,
json!({"property":"Name","rich_text":{"equals":"Test"}})
);
Ok(())
}
#[test]
fn text_property_contains() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::Property {
property: "Name".to_string(),
condition: RichText(TextCondition::Contains("Test".to_string())),
})?;
assert_eq!(
dbg!(json),
json!({"property":"Name","rich_text":{"contains":"Test"}})
);
Ok(())
}
#[test]
fn text_property_is_empty() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::Property {
property: "Name".to_string(),
condition: RichText(TextCondition::IsEmpty),
})?;
assert_eq!(
dbg!(json),
json!({"property":"Name","rich_text":{"is_empty":true}})
);
Ok(())
}
#[test]
fn text_property_is_not_empty() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::Property {
property: "Name".to_string(),
condition: RichText(TextCondition::IsNotEmpty),
})?;
assert_eq!(
dbg!(json),
json!({"property":"Name","rich_text":{"is_not_empty":true}})
);
Ok(())
}
#[test]
fn compound_query_and() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::And {
and: vec![
FilterCondition::Property {
property: "Seen".to_string(),
condition: Checkbox(CheckboxCondition::Equals(false)),
},
FilterCondition::Property {
property: "Yearly visitor count".to_string(),
condition: Number(NumberCondition::GreaterThan(serde_json::Number::from(
1000000,
))),
},
],
})?;
assert_eq!(
dbg!(json),
json!({"and":[
{"property":"Seen","checkbox":{"equals":false}},
{"property":"Yearly visitor count","number":{"greater_than":1000000}}
]})
);
Ok(())
}
#[test]
fn compound_query_or() -> Result<(), Box<dyn std::error::Error>> {
let json = serde_json::to_value(&FilterCondition::Or {
or: vec![
FilterCondition::Property {
property: "Description".to_string(),
condition: RichText(TextCondition::Contains("fish".to_string())),
},
FilterCondition::And {
and: vec![
FilterCondition::Property {
property: "Food group".to_string(),
condition: Select(SelectCondition::Equals(
"🥦Vegetable".to_string(),
)),
},
FilterCondition::Property {
property: "Is protein rich?".to_string(),
condition: Checkbox(CheckboxCondition::Equals(true)),
},
],
},
],
})?;
assert_eq!(
dbg!(json),
json!({"or":[
{"property":"Description","rich_text":{"contains":"fish"}},
{"and":[
{"property":"Food group","select":{"equals":"🥦Vegetable"}},
{"property":"Is protein rich?","checkbox":{"equals":true}}
]}
]})
);
Ok(())
}
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/block.rs | src/models/block.rs | use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use crate::ids::{AsIdentifier, BlockId, DatabaseId, PageId};
use crate::models::text::{RichText, TextColor};
use crate::models::users::UserCommon;
mod tests;
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct BlockCommon {
pub id: BlockId,
pub created_time: DateTime<Utc>,
pub last_edited_time: DateTime<Utc>,
pub has_children: bool,
pub created_by: UserCommon,
pub last_edited_by: UserCommon,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct TextAndChildren {
pub rich_text: Vec<RichText>,
#[serde(skip_serializing_if = "Option::is_none")]
pub children: Option<Vec<Block>>,
pub color: TextColor,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Text {
pub rich_text: Vec<RichText>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct InternalFileObject {
url: String,
expiry_time: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ExternalFileObject {
url: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum FileOrEmojiObject {
Emoji { emoji: String },
File { file: InternalFileObject },
External { external: ExternalFileObject },
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum FileObject {
File { file: InternalFileObject },
External { external: ExternalFileObject },
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Callout {
pub rich_text: Vec<RichText>,
pub icon: FileOrEmojiObject,
pub color: TextColor,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ToDoFields {
pub rich_text: Vec<RichText>,
pub checked: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub children: Option<Vec<Block>>,
pub color: TextColor,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ChildPageFields {
pub title: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ChildDatabaseFields {
pub title: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct EmbedFields {
pub url: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct BookmarkFields {
pub url: String,
pub caption: Vec<RichText>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(rename_all = "lowercase")]
pub enum CodeLanguage {
Abap,
Arduino,
Bash,
Basic,
C,
Clojure,
Coffeescript,
#[serde(rename = "c++")]
CPlusPlus,
#[serde(rename = "c#")]
CSharp,
Css,
Dart,
Diff,
Docker,
Elixir,
Elm,
Erlang,
Flow,
Fortran,
#[serde(rename = "f#")]
FSharp,
Gherkin,
Glsl,
Go,
Graphql,
Groovy,
Haskell,
Html,
Java,
Javascript,
Json,
Julia,
Kotlin,
Latex,
Less,
Lisp,
Livescript,
Lua,
Makefile,
Markdown,
Markup,
Matlab,
Mermaid,
Nix,
#[serde(rename = "objective-c")]
ObjectiveC,
Ocaml,
Pascal,
Perl,
Php,
#[serde(rename = "plain text")]
PlainText,
Powershell,
Prolog,
Protobuf,
Python,
R,
Reason,
Ruby,
Rust,
Sass,
Scala,
Scheme,
Scss,
Shell,
Sql,
Swift,
Typescript,
#[serde(rename = "vb.net")]
VbNet,
Verilog,
Vhdl,
#[serde(rename = "visual basic")]
VisualBasic,
Webassembly,
Xml,
Yaml,
#[serde(rename = "java/c/c++/c#")]
JavaCAndCPlusPlusAndCSharp,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct CodeFields {
pub rich_text: Vec<RichText>,
pub caption: Vec<RichText>,
pub language: CodeLanguage,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Equation {
pub expression: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct TableOfContents {
pub color: TextColor,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ColumnListFields {
pub children: Vec<Block>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct ColumnFields {
pub children: Vec<Block>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct LinkPreviewFields {
pub url: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct TemplateFields {
pub rich_text: Vec<RichText>,
pub children: Vec<Block>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum LinkToPageFields {
PageId { page_id: PageId },
DatabaseId { database_id: DatabaseId },
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct SyncedFromObject {
pub block_id: BlockId,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct SyncedBlockFields {
pub synced_from: Option<SyncedFromObject>,
pub children: Vec<Block>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct TableFields {
pub table_width: u64,
pub has_column_header: bool,
pub has_row_header: bool,
pub children: Vec<Block>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct TableRowFields {
pub cells: Vec<RichText>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum Block {
Paragraph {
#[serde(flatten)]
common: BlockCommon,
paragraph: TextAndChildren,
},
#[serde(rename = "heading_1")]
Heading1 {
#[serde(flatten)]
common: BlockCommon,
heading_1: Text,
},
#[serde(rename = "heading_2")]
Heading2 {
#[serde(flatten)]
common: BlockCommon,
heading_2: Text,
},
#[serde(rename = "heading_3")]
Heading3 {
#[serde(flatten)]
common: BlockCommon,
heading_3: Text,
},
Callout {
#[serde(flatten)]
common: BlockCommon,
callout: Callout,
},
Quote {
#[serde(flatten)]
common: BlockCommon,
quote: TextAndChildren,
},
BulletedListItem {
#[serde(flatten)]
common: BlockCommon,
bulleted_list_item: TextAndChildren,
},
NumberedListItem {
#[serde(flatten)]
common: BlockCommon,
numbered_list_item: TextAndChildren,
},
ToDo {
#[serde(flatten)]
common: BlockCommon,
to_do: ToDoFields,
},
Toggle {
#[serde(flatten)]
common: BlockCommon,
toggle: TextAndChildren,
},
Code {
#[serde(flatten)]
common: BlockCommon,
code: CodeFields,
},
ChildPage {
#[serde(flatten)]
common: BlockCommon,
child_page: ChildPageFields,
},
ChildDatabase {
#[serde(flatten)]
common: BlockCommon,
child_page: ChildDatabaseFields,
},
Embed {
#[serde(flatten)]
common: BlockCommon,
embed: EmbedFields,
},
Image {
#[serde(flatten)]
common: BlockCommon,
image: FileObject,
},
Video {
#[serde(flatten)]
common: BlockCommon,
video: FileObject,
},
File {
#[serde(flatten)]
common: BlockCommon,
file: FileObject,
caption: Text,
},
Pdf {
#[serde(flatten)]
common: BlockCommon,
pdf: FileObject,
},
Bookmark {
#[serde(flatten)]
common: BlockCommon,
bookmark: BookmarkFields,
},
Equation {
#[serde(flatten)]
common: BlockCommon,
equation: Equation,
},
Divider {
#[serde(flatten)]
common: BlockCommon,
},
TableOfContents {
#[serde(flatten)]
common: BlockCommon,
table_of_contents: TableOfContents,
},
Breadcrumb {
#[serde(flatten)]
common: BlockCommon,
},
ColumnList {
#[serde(flatten)]
common: BlockCommon,
column_list: ColumnListFields,
},
Column {
#[serde(flatten)]
common: BlockCommon,
column: ColumnFields,
},
LinkPreview {
#[serde(flatten)]
common: BlockCommon,
link_preview: LinkPreviewFields,
},
Template {
#[serde(flatten)]
common: BlockCommon,
template: TemplateFields,
},
LinkToPage {
#[serde(flatten)]
common: BlockCommon,
link_to_page: LinkToPageFields,
},
Table {
#[serde(flatten)]
common: BlockCommon,
table: TableFields,
},
SyncedBlock {
#[serde(flatten)]
common: BlockCommon,
synced_block: SyncedBlockFields,
},
TableRow {
#[serde(flatten)]
common: BlockCommon,
table_row: TableRowFields,
},
Unsupported {
#[serde(flatten)]
common: BlockCommon,
},
#[serde(other)]
Unknown,
}
impl AsIdentifier<BlockId> for Block {
fn as_id(&self) -> &BlockId {
use Block::*;
match self {
Paragraph { common, .. }
| Heading1 { common, .. }
| Heading2 { common, .. }
| Heading3 { common, .. }
| Callout { common, .. }
| Quote { common, .. }
| BulletedListItem { common, .. }
| NumberedListItem { common, .. }
| ToDo { common, .. }
| Toggle { common, .. }
| Code { common, .. }
| ChildPage { common, .. }
| ChildDatabase { common, .. }
| Embed { common, .. }
| Image { common, .. }
| Video { common, .. }
| File { common, .. }
| Pdf { common, .. }
| Bookmark { common, .. }
| Equation { common, .. }
| Divider { common, .. }
| TableOfContents { common, .. }
| Breadcrumb { common, .. }
| ColumnList { common, .. }
| Column { common, .. }
| LinkPreview { common, .. }
| Template { common, .. }
| LinkToPage { common, .. }
| SyncedBlock { common, .. }
| Table { common, .. }
| TableRow { common, .. }
| Unsupported { common, .. } => &common.id,
Unknown => {
panic!("Trying to reference identifier for unknown block!")
}
}
}
}
impl Into<CreateBlock> for Block {
fn into(self) -> CreateBlock {
match self {
Block::Paragraph { paragraph, .. } => CreateBlock::Paragraph { paragraph },
Block::Heading1 { heading_1, .. } => CreateBlock::Heading1 { heading_1 },
Block::Heading2 { heading_2, .. } => CreateBlock::Heading2 { heading_2 },
Block::Heading3 { heading_3, .. } => CreateBlock::Heading3 { heading_3 },
Block::Callout { callout, .. } => CreateBlock::Callout { callout },
Block::Quote { quote, .. } => CreateBlock::Quote { quote },
Block::BulletedListItem {
bulleted_list_item, ..
} => CreateBlock::BulletedListItem { bulleted_list_item },
Block::NumberedListItem {
numbered_list_item, ..
} => CreateBlock::NumberedListItem { numbered_list_item },
Block::ToDo { to_do, .. } => CreateBlock::ToDo { to_do },
Block::Toggle { toggle, .. } => CreateBlock::Toggle { toggle },
Block::Code { code, .. } => CreateBlock::Code { code },
Block::ChildPage { child_page, .. } => CreateBlock::ChildPage { child_page },
Block::ChildDatabase { child_page, .. } => CreateBlock::ChildDatabase { child_page },
Block::Embed { embed, .. } => CreateBlock::Embed { embed },
Block::Image { image, .. } => CreateBlock::Image { image },
Block::Video { video, .. } => CreateBlock::Video { video },
Block::File { file, caption, .. } => CreateBlock::File { file, caption },
Block::Pdf { pdf, .. } => CreateBlock::Pdf { pdf },
Block::Bookmark { bookmark, .. } => CreateBlock::Bookmark { bookmark },
Block::Equation { equation, .. } => CreateBlock::Equation { equation },
Block::Divider { .. } => CreateBlock::Divider {},
Block::TableOfContents {
table_of_contents, ..
} => CreateBlock::TableOfContents { table_of_contents },
Block::Breadcrumb { .. } => CreateBlock::Breadcrumb {},
Block::ColumnList { column_list, .. } => CreateBlock::ColumnList { column_list },
Block::Column { column, .. } => CreateBlock::Column { column },
Block::LinkPreview { link_preview, .. } => CreateBlock::LinkPreview { link_preview },
Block::Template { template, .. } => CreateBlock::Template { template },
Block::LinkToPage { link_to_page, .. } => CreateBlock::LinkToPage { link_to_page },
Block::Table { table, .. } => CreateBlock::Table { table },
Block::SyncedBlock { synced_block, .. } => CreateBlock::SyncedBlock { synced_block },
Block::TableRow { table_row, .. } => CreateBlock::TableRow { table_row },
Block::Unsupported { .. } => CreateBlock::Unsupported,
Block::Unknown => CreateBlock::Unknown,
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum CreateBlock {
Paragraph {
paragraph: TextAndChildren,
},
#[serde(rename = "heading_1")]
Heading1 {
heading_1: Text,
},
#[serde(rename = "heading_2")]
Heading2 {
heading_2: Text,
},
#[serde(rename = "heading_3")]
Heading3 {
heading_3: Text,
},
Callout {
callout: Callout,
},
Quote {
quote: TextAndChildren,
},
BulletedListItem {
bulleted_list_item: TextAndChildren,
},
NumberedListItem {
numbered_list_item: TextAndChildren,
},
ToDo {
to_do: ToDoFields,
},
Toggle {
toggle: TextAndChildren,
},
Code {
code: CodeFields,
},
ChildPage {
child_page: ChildPageFields,
},
ChildDatabase {
child_page: ChildDatabaseFields,
},
Embed {
embed: EmbedFields,
},
Image {
image: FileObject,
},
Video {
video: FileObject,
},
File {
file: FileObject,
caption: Text,
},
Pdf {
pdf: FileObject,
},
Bookmark {
bookmark: BookmarkFields,
},
Equation {
equation: Equation,
},
Divider,
TableOfContents {
table_of_contents: TableOfContents,
},
Breadcrumb,
ColumnList {
column_list: ColumnListFields,
},
Column {
column: ColumnFields,
},
LinkPreview {
link_preview: LinkPreviewFields,
},
Template {
template: TemplateFields,
},
LinkToPage {
link_to_page: LinkToPageFields,
},
Table {
table: TableFields,
},
SyncedBlock {
synced_block: SyncedBlockFields,
},
TableRow {
table_row: TableRowFields,
},
Unsupported,
#[serde(other)]
Unknown,
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/text.rs | src/models/text.rs | use crate::models::properties::DateValue;
use crate::models::users::User;
use crate::{Database, Page};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Copy, Clone)]
#[serde(rename_all = "snake_case")]
pub enum TextColor {
Default,
Gray,
Brown,
Orange,
Yellow,
Green,
Blue,
Purple,
Pink,
Red,
GrayBackground,
BrownBackground,
OrangeBackground,
YellowBackground,
GreenBackground,
BlueBackground,
PurpleBackground,
PinkBackground,
RedBackground,
}
/// Rich text annotations
/// See <https://developers.notion.com/reference/rich-text#annotations>
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Annotations {
pub bold: Option<bool>,
pub code: Option<bool>,
pub color: Option<TextColor>,
pub italic: Option<bool>,
pub strikethrough: Option<bool>,
pub underline: Option<bool>,
}
/// Properties common on all rich text objects
/// See <https://developers.notion.com/reference/rich-text#all-rich-text>
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct RichTextCommon {
pub plain_text: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub href: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub annotations: Option<Annotations>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Link {
pub url: String,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Text {
pub content: String,
pub link: Option<Link>,
}
/// See https://developers.notion.com/reference/rich-text#mention-objects
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum MentionObject {
User {
user: User,
},
// TODO: need to add tests
Page {
page: Page,
},
// TODO: need to add tests
Database {
database: Database,
},
Date {
date: DateValue,
},
// TODO: need to add LinkPreview
// LinkPreview {
//
// },
#[serde(other)]
Unknown,
}
/// Rich text objects contain data for displaying formatted text, mentions, and equations.
/// A rich text object also contains annotations for style information.
/// Arrays of rich text objects are used within property objects and property
/// value objects to create what a user sees as a single text value in Notion.
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum RichText {
/// See <https://developers.notion.com/reference/rich-text#text-objects>
Text {
#[serde(flatten)]
rich_text: RichTextCommon,
text: Text,
},
/// See <https://developers.notion.com/reference/rich-text#mention-objects>
Mention {
#[serde(flatten)]
rich_text: RichTextCommon,
mention: MentionObject,
},
/// See <https://developers.notion.com/reference/rich-text#equation-objects>
Equation {
#[serde(flatten)]
rich_text: RichTextCommon,
},
}
impl RichText {
pub fn plain_text(&self) -> &str {
use RichText::*;
match self {
Text { rich_text, .. } | Mention { rich_text, .. } | Equation { rich_text, .. } => {
&rich_text.plain_text
}
}
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/error.rs | src/models/error.rs | use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
#[derive(Serialize, Deserialize, Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)]
#[serde(transparent)]
pub struct StatusCode(u16);
impl StatusCode {
pub fn code(&self) -> u16 {
self.0
}
}
impl Display for StatusCode {
fn fmt(
&self,
f: &mut Formatter<'_>,
) -> std::fmt::Result {
self.0.fmt(f)
}
}
/// <https://developers.notion.com/reference/errors>
#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)]
pub struct ErrorResponse {
pub status: StatusCode,
pub code: ErrorCode,
pub message: String,
}
/// <https://developers.notion.com/reference/errors>
#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum ErrorCode {
InvalidJson,
InvalidRequestUrl,
InvalidRequest,
ValidationError,
MissionVersion,
Unauthorized,
RestrictedResource,
ObjectNotFound,
ConflictError,
RateLimited,
InternalServerError,
ServiceUnavailable,
#[serde(other)] // serde issue #912
Unknown,
}
impl Display for ErrorCode {
fn fmt(
&self,
f: &mut Formatter<'_>,
) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
#[cfg(test)]
mod tests {
use crate::models::error::{ErrorCode, ErrorResponse};
#[test]
fn deserialize_error() {
let error: ErrorResponse = serde_json::from_str(include_str!("tests/error.json")).unwrap();
assert_eq!(error.code, ErrorCode::ValidationError)
}
#[test]
fn deserialize_unknown_error() {
let error: ErrorResponse =
serde_json::from_str(include_str!("tests/unknown_error.json")).unwrap();
assert_eq!(error.code, ErrorCode::Unknown)
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/users.rs | src/models/users.rs | use crate::ids::UserId;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct UserCommon {
pub id: UserId,
pub name: Option<String>,
pub avatar_url: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct Person {
pub email: String,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct Bot {
pub email: String,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum User {
Person {
#[serde(flatten)]
common: UserCommon,
person: Person,
},
Bot {
#[serde(flatten)]
common: UserCommon,
bot: Bot,
},
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/mod.rs | src/models/mod.rs | pub mod block;
pub mod error;
pub mod paging;
pub mod properties;
pub mod search;
#[cfg(test)]
mod tests;
pub mod text;
pub mod users;
use crate::models::properties::{PropertyConfiguration, PropertyValue};
use crate::models::text::RichText;
use crate::Error;
use block::ExternalFileObject;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
use crate::ids::{AsIdentifier, DatabaseId, PageId};
use crate::models::block::{Block, CreateBlock, FileObject};
use crate::models::error::ErrorResponse;
use crate::models::paging::PagingCursor;
use crate::models::users::User;
pub use chrono::{DateTime, Utc};
pub use serde_json::value::Number;
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Copy, Clone)]
#[serde(rename_all = "snake_case")]
enum ObjectType {
Database,
List,
}
/// Represents a Notion Database
/// See <https://developers.notion.com/reference/database>
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Database {
/// Unique identifier for the database.
pub id: DatabaseId,
/// Date and time when this database was created.
pub created_time: DateTime<Utc>,
/// Date and time when this database was updated.
pub last_edited_time: DateTime<Utc>,
/// Name of the database as it appears in Notion.
pub title: Vec<RichText>,
/// Schema of properties for the database as they appear in Notion.
//
// key string
// The name of the property as it appears in Notion.
//
// value object
// A Property object.
pub icon: Option<IconObject>,
pub properties: HashMap<String, PropertyConfiguration>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum IconObject {
File {
#[serde(flatten)]
file: FileObject,
},
External {
external: ExternalFileObject,
},
Emoji {
emoji: String,
},
}
impl AsIdentifier<DatabaseId> for Database {
fn as_id(&self) -> &DatabaseId {
&self.id
}
}
impl Database {
pub fn title_plain_text(&self) -> String {
self.title
.iter()
.flat_map(|rich_text| rich_text.plain_text().chars())
.collect()
}
}
/// <https://developers.notion.com/reference/pagination#responses>
#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)]
pub struct ListResponse<T> {
pub results: Vec<T>,
pub next_cursor: Option<PagingCursor>,
pub has_more: bool,
}
impl<T> ListResponse<T> {
pub fn results(&self) -> &[T] {
&self.results
}
}
impl ListResponse<Object> {
pub fn only_databases(self) -> ListResponse<Database> {
let databases = self
.results
.into_iter()
.filter_map(|object| match object {
Object::Database { database } => Some(database),
_ => None,
})
.collect();
ListResponse {
results: databases,
has_more: self.has_more,
next_cursor: self.next_cursor,
}
}
pub(crate) fn expect_databases(self) -> Result<ListResponse<Database>, crate::Error> {
let databases: Result<Vec<_>, _> = self
.results
.into_iter()
.map(|object| match object {
Object::Database { database } => Ok(database),
response => Err(Error::UnexpectedResponse { response }),
})
.collect();
Ok(ListResponse {
results: databases?,
has_more: self.has_more,
next_cursor: self.next_cursor,
})
}
pub(crate) fn expect_pages(self) -> Result<ListResponse<Page>, crate::Error> {
let items: Result<Vec<_>, _> = self
.results
.into_iter()
.map(|object| match object {
Object::Page { page } => Ok(page),
response => Err(Error::UnexpectedResponse { response }),
})
.collect();
Ok(ListResponse {
results: items?,
has_more: self.has_more,
next_cursor: self.next_cursor,
})
}
pub(crate) fn expect_blocks(self) -> Result<ListResponse<Block>, crate::Error> {
let items: Result<Vec<_>, _> = self
.results
.into_iter()
.map(|object| match object {
Object::Block { block } => Ok(block),
response => Err(Error::UnexpectedResponse { response }),
})
.collect();
Ok(ListResponse {
results: items?,
has_more: self.has_more,
next_cursor: self.next_cursor,
})
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum Parent {
#[serde(rename = "database_id")]
Database {
database_id: DatabaseId,
},
#[serde(rename = "page_id")]
Page {
page_id: PageId,
},
Workspace,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Properties {
#[serde(flatten)]
pub properties: HashMap<String, PropertyValue>,
}
impl Properties {
pub fn title(&self) -> Option<String> {
self.properties.values().find_map(|p| match p {
PropertyValue::Title { title, .. } => {
Some(title.iter().map(|t| t.plain_text()).collect())
}
_ => None,
})
}
}
#[derive(Serialize, Debug, Eq, PartialEq)]
pub struct PageCreateRequest {
pub parent: Parent,
pub properties: Properties,
#[serde(skip_serializing_if = "Option::is_none")]
pub children: Option<Vec<CreateBlock>>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Page {
pub id: PageId,
/// Date and time when this page was created.
pub created_time: DateTime<Utc>,
/// Date and time when this page was updated.
pub last_edited_time: DateTime<Utc>,
/// The archived status of the page.
pub archived: bool,
pub properties: Properties,
pub icon: Option<IconObject>,
pub parent: Parent,
}
impl Page {
pub fn title(&self) -> Option<String> {
self.properties.title()
}
}
impl AsIdentifier<PageId> for Page {
fn as_id(&self) -> &PageId {
&self.id
}
}
#[derive(Eq, Serialize, Deserialize, Clone, Debug, PartialEq)]
#[serde(tag = "object")]
#[serde(rename_all = "snake_case")]
pub enum Object {
Block {
#[serde(flatten)]
block: Block,
},
Database {
#[serde(flatten)]
database: Database,
},
Page {
#[serde(flatten)]
page: Page,
},
List {
#[serde(flatten)]
list: ListResponse<Object>,
},
User {
#[serde(flatten)]
user: User,
},
Error {
#[serde(flatten)]
error: ErrorResponse,
},
}
impl Object {
pub fn is_database(&self) -> bool {
matches!(self, Object::Database { .. })
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/block/tests.rs | src/models/block/tests.rs | #[cfg(test)]
mod tests {
use crate::ids::{BlockId, UserId};
use crate::models::block::{
Block, BlockCommon, Callout, ExternalFileObject, FileOrEmojiObject, InternalFileObject,
Text as TextBlockModel,
};
use crate::models::text::{Annotations, RichText, RichTextCommon, Text, TextColor};
use crate::models::users::UserCommon;
use crate::models::Object;
use chrono::DateTime;
use std::str::FromStr;
#[test]
fn heading_1() {
let heading_1: Block = serde_json::from_str(include_str!("tests/heading_1.json")).unwrap();
assert_eq!(
heading_1,
Block::Heading1 {
common: BlockCommon {
id: BlockId::from_str("9e891834-6a03-475c-a2b8-421e17f0f3aa").unwrap(),
created_time: DateTime::from_str("2022-05-12T21:15:00.000Z").unwrap(),
last_edited_time: DateTime::from_str("2022-05-12T22:10:00.000Z").unwrap(),
has_children: false,
created_by: UserCommon {
id: UserId::from_str("6419f912-5293-4ea8-b2c8-9c3ce44f90e3").unwrap(),
name: None,
avatar_url: None,
},
last_edited_by: UserCommon {
id: UserId::from_str("6419f912-5293-4ea8-b2c8-9c3ce44f90e3").unwrap(),
name: None,
avatar_url: None,
},
},
heading_1: TextBlockModel {
rich_text: vec![
RichText::Text {
rich_text: RichTextCommon {
plain_text: "This".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(true),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: "This".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: " ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: " ".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: "is".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(true),
}),
},
text: Text {
content: "is".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: " ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: " ".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: "a".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(true),
strikethrough: Some(false),
underline: Some(true),
}),
},
text: Text {
content: "a".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: " ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: " ".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: "Heading".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(true),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: "Heading".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: " ".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: " ".to_string(),
link: None,
},
},
RichText::Text {
rich_text: RichTextCommon {
plain_text: "1".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(true),
underline: Some(false),
}),
},
text: Text {
content: "1".to_string(),
link: None,
},
},
]
},
}
)
}
#[test]
fn emoji_object() {
let emoji_object: FileOrEmojiObject =
serde_json::from_str(include_str!("tests/emoji_object.json")).unwrap();
assert_eq!(
emoji_object,
FileOrEmojiObject::Emoji {
emoji: "💡".to_string()
}
)
}
#[test]
fn file_object() {
let file_object: FileOrEmojiObject =
serde_json::from_str(include_str!("tests/file_object.json")).unwrap();
assert_eq!(file_object, FileOrEmojiObject::File {
file: InternalFileObject {
url: "https://s3.us-west-2.amazonaws.com/secure.notion-static.com/2703e742-ace5-428c-a74d-1c587ceddc32/DiRT_Rally.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Content-Sha256=UNSIGNED-PAYLOAD&X-Amz-Credential=AKIAT73L2G45EIPT3X45%2F20220513%2Fus-west-2%2Fs3%2Faws4_request&X-Amz-Date=20220513T201035Z&X-Amz-Expires=3600&X-Amz-Signature=714b49bde0b499fb8f3aae1a88a8cbd374f2b09c1d128e91cac49e85ce0e00fb&X-Amz-SignedHeaders=host&x-id=GetObject".to_string(),
expiry_time: DateTime::from_str("2022-05-13T21:10:35.817Z").unwrap(),
}
})
}
#[test]
fn external_file_object() {
let external_file_object: FileOrEmojiObject =
serde_json::from_str(include_str!("tests/external_file_object.json")).unwrap();
assert_eq!(
external_file_object,
FileOrEmojiObject::External {
external: ExternalFileObject {
url: "https://nerdist.com/wp-content/uploads/2020/07/maxresdefault.jpg"
.to_string(),
}
}
)
}
#[test]
fn callout() {
let callout: Object = serde_json::from_str(include_str!("tests/callout.json")).unwrap();
assert_eq!(
callout,
Object::Block {
block: Block::Callout {
common: BlockCommon {
id: BlockId::from_str("00e8829a-a7b8-4075-884a-8f53be145d2f").unwrap(),
created_time: DateTime::from_str("2022-05-13T20:08:00.000Z").unwrap(),
last_edited_time: DateTime::from_str("2022-05-13T20:08:00.000Z").unwrap(),
has_children: true,
created_by: UserCommon {
id: UserId::from_str("e2507360-468c-4e0f-a928-7bbcbbb45353").unwrap(),
name: None,
avatar_url: None,
},
last_edited_by: UserCommon {
id: UserId::from_str("e2507360-468c-4e0f-a928-7bbcbbb45353").unwrap(),
name: None,
avatar_url: None,
},
},
callout: Callout {
rich_text: vec![RichText::Text {
rich_text: RichTextCommon {
plain_text: "Test callout".to_string(),
href: None,
annotations: Some(Annotations {
bold: Some(false),
code: Some(false),
color: Some(TextColor::Default),
italic: Some(false),
strikethrough: Some(false),
underline: Some(false),
}),
},
text: Text {
content: "Test callout".to_string(),
link: None
},
}],
icon: FileOrEmojiObject::Emoji {
emoji: "💡".to_string()
},
color: TextColor::Green,
},
}
}
)
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/properties/tests.rs | src/models/properties/tests.rs | use crate::models::{
properties::{DateOrDateTime, RollupPropertyValue, RollupValue},
PropertyValue,
};
use chrono::NaiveDate;
#[test]
fn verify_date_parsing() {
let date = NaiveDate::from_ymd_opt(2021, 01, 02).unwrap();
let result = serde_json::to_string(&DateOrDateTime::Date(date)).unwrap();
let parsed: DateOrDateTime = serde_json::from_str(&result).unwrap();
println!("{:?}", parsed);
}
#[test]
fn parse_date_property() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/date_property.json")).unwrap();
}
#[test]
fn parse_null_select_property() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/null_select_property.json")).unwrap();
}
#[test]
fn parse_select_property() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/select_property.json")).unwrap();
}
#[test]
fn parse_text_property_with_link() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/text_with_link.json")).unwrap();
}
#[test]
fn parse_rollup_property() {
let property: PropertyValue =
serde_json::from_str(include_str!("tests/rollup_property.json")).unwrap();
assert!(matches!(
property,
PropertyValue::Rollup {
rollup: Some(RollupValue::Array { .. }),
..
}
));
if let PropertyValue::Rollup {
rollup: Some(RollupValue::Array { array }),
..
} = property
{
assert!(matches!(array[0], RollupPropertyValue::Text { .. }))
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/src/models/properties/formulas.rs | src/models/properties/formulas.rs | #[cfg(test)]
mod tests {
use crate::models::properties::{FormulaResultValue, PropertyValue};
#[test]
fn parse_number_formula_prop() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/formula_number_value.json")).unwrap();
}
#[test]
fn parse_date_formula_prop() {
let _property: PropertyValue =
serde_json::from_str(include_str!("tests/formula_date_value.json")).unwrap();
}
#[test]
fn parse_number_formula() {
let _value: FormulaResultValue = serde_json::from_str(
r#"{
"type": "number",
"number": 0
}"#,
)
.unwrap();
}
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/examples/todo/commands.rs | examples/todo/commands.rs | pub mod configure;
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/examples/todo/main.rs | examples/todo/main.rs | mod commands;
use anyhow::{Context, Result};
use clap::Parser;
use notion::ids::DatabaseId;
use notion::NotionApi;
use serde::{Deserialize, Serialize};
// From <https://docs.rs/clap/3.0.0-beta.2/clap/>
#[derive(Parser, Debug)]
#[clap(version = "1.0", author = "Jake Swenson")]
struct Opts {
#[clap(subcommand)]
command: SubCommand,
}
#[derive(Parser, Debug)]
enum SubCommand {
/// Configure what database this notion-todo example uses
Config,
/// List all todos
List,
/// Add a todo item to the notion database
Add,
/// Complete a todo item
Check,
}
#[derive(Deserialize, Serialize)]
struct TodoConfig {
api_token: Option<String>,
task_database_id: Option<DatabaseId>,
}
#[tokio::main]
async fn main() -> Result<()> {
let opts: Opts = Opts::parse();
// https://docs.rs/config/0.11.0/config/
let config = config::Config::default()
.with_merged(config::File::with_name("todo_config"))
.unwrap_or_default()
.with_merged(config::Environment::with_prefix("NOTION"))?;
let config: TodoConfig = config.try_into().context("Failed to read config")?;
let notion_api = NotionApi::new(
std::env::var("NOTION_API_TOKEN")
.or(config
.api_token
.ok_or(anyhow::anyhow!("No api token from config")))
.context(
"No Notion API token found in either the environment variable \
`NOTION_API_TOKEN` or the config file!",
)?,
)?;
match opts.command {
SubCommand::Config => commands::configure::configure(notion_api).await,
SubCommand::List => list_tasks(notion_api),
SubCommand::Add => add_task(notion_api),
SubCommand::Check => complete_task(notion_api),
}
}
fn list_tasks(_notion_api: NotionApi) -> Result<()> {
Ok(())
}
fn add_task(_notion_api: NotionApi) -> Result<()> {
Ok(())
}
fn complete_task(_notion_api: NotionApi) -> Result<()> {
Ok(())
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
jakeswenson/notion | https://github.com/jakeswenson/notion/blob/aaff357c1b8ef6d266d68820f243122a2880fb3d/examples/todo/commands/configure.rs | examples/todo/commands/configure.rs | use crate::TodoConfig;
use anyhow::Result;
use notion::ids::{AsIdentifier, DatabaseId};
use notion::models::search::NotionSearch;
use notion::models::Database;
use notion::NotionApi;
use skim::{Skim, SkimItem, SkimItemReceiver, SkimItemSender, SkimOptions};
use std::borrow::Cow;
use std::ops::Deref;
use std::sync::Arc;
fn skim_select_database(databases: Vec<Database>) -> Result<DatabaseId> {
let options = SkimOptions::default();
let (sender, receiver): (SkimItemSender, SkimItemReceiver) = crossbeam_channel::bounded(500);
struct SkimDB {
db: Database,
}
impl SkimItem for SkimDB {
fn text(&self) -> Cow<str> {
Cow::Owned(self.db.title_plain_text())
}
}
for db in databases {
sender.send(Arc::new(SkimDB { db }))?;
}
// `run_with` would read and show items from the stream
let selected_items = Skim::run_with(&options, Some(receiver))
.filter(|out| !out.is_abort)
.map(|out| out.selected_items)
.unwrap_or_else(|| Vec::new());
let db = selected_items
.first()
.expect("No database selected, aborting...")
.clone();
let db: &SkimDB = db
.deref()
.as_any()
.downcast_ref()
.expect("Couldn't cast back to SkimDB");
let database_id = db.db.as_id();
Ok(database_id.clone())
}
pub async fn configure(notion_api: NotionApi) -> Result<()> {
let databases: Vec<Database> = notion_api
.search(NotionSearch::filter_by_databases())
.await?
.only_databases()
.results;
let database_id = skim_select_database(databases)?;
println!("Selected database's id: {}", database_id);
let bytes = toml::to_vec(&TodoConfig {
api_token: None,
task_database_id: Some(database_id),
})?;
std::fs::write("../todo_config.toml", bytes)?;
Ok(())
}
| rust | MIT | aaff357c1b8ef6d266d68820f243122a2880fb3d | 2026-01-04T20:18:07.998626Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/build.rs | src-tauri/build.rs | fn main() {
tauri_build::build()
}
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/lib.rs | src-tauri/src/lib.rs | mod command;
use std::sync::{Arc, Mutex};
mod utils;
use command::model::ServerState;
use tauri::menu::*;
pub fn run() {
tauri::Builder::default()
.manage(Arc::new(Mutex::new(ServerState {
server_handle: None,
})))
.menu(|handle| {
let menu = Menu::with_items(
handle,
&[
#[cfg(target_os = "macos")]
&Submenu::with_items(
handle,
"Edit",
true,
&[
&PredefinedMenuItem::undo(handle, None)?,
&PredefinedMenuItem::redo(handle, None)?,
&PredefinedMenuItem::cut(handle, None)?,
&PredefinedMenuItem::copy(handle, None)?,
&PredefinedMenuItem::paste(handle, None)?,
&PredefinedMenuItem::select_all(handle, None)?,
],
)?,
],
);
menu
})
.plugin(tauri_plugin_os::init())
.plugin(tauri_plugin_fs::init())
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_http::init())
.plugin(tauri_plugin_process::init())
.plugin(tauri_plugin_clipboard_manager::init())
.plugin(tauri_plugin_updater::Builder::new().build())
.plugin(tauri_plugin_store::Builder::default().build())
.invoke_handler(tauri::generate_handler![
command::cmds::open_window,
command::cmds::preview_from_config,
command::cmds::update_build_file,
command::cmds::update_config_file,
command::cmds::update_cargo_file,
command::cmds::update_main_rust,
command::cmds::rust_lib_window,
command::cmds::update_custom_js,
command::cmds::get_custom_js,
command::cmds::content_to_base64,
command::cmds::update_config_json,
command::cmds::rust_main_window,
command::cmds::open_url,
command::cmds::open_devtools,
command::cmds::update_init_rs,
command::cmds::start_server,
command::cmds::stop_server,
command::cmds::get_machine_uid,
command::cmds::compress_folder,
command::cmds::decompress_file,
command::cmds::download_file,
command::cmds::notification,
command::cmds::run_command,
command::cmds::get_env_var,
command::cmds::find_port,
command::cmds::get_exe_dir,
command::cmds::windows_build,
command::cmds::macos_build,
command::cmds::linux_build,
command::cmds::build_local,
])
.setup(|app| {
tauri::async_runtime::block_on(async move {
let _ = utils::init::resolve_setup(app).await;
});
Ok(())
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/main.rs | src-tauri/src/main.rs | // Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
fn main() {
PacBao_lib::run()
}
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/command/model.rs | src-tauri/src/command/model.rs | pub struct ServerState {
pub server_handle: Option<tokio::task::JoinHandle<()>>,
}
#[derive(serde::Serialize)]
pub struct ProgressPayload {
pub file_id: String,
pub downloaded: u64,
pub total: Option<u64>,
}
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/command/mod.rs | src-tauri/src/command/mod.rs | pub mod cmds;
pub mod model;
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/command/cmds.rs | src-tauri/src/command/cmds.rs | use crate::command::model::ServerState;
use base64::prelude::*;
use futures::StreamExt;
use notify_rust::Notification;
use reqwest::Client;
use serde::Serialize;
use std::env;
use std::fs;
use std::fs::File;
use std::io;
use std::io::Read;
use std::io::Write;
use std::net::TcpListener;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::{Arc, Mutex};
use std::time::Instant;
use tauri::WindowEvent;
use tauri::{
path::BaseDirectory, utils::config::WindowConfig, AppHandle, Emitter, LogicalSize, Manager,
};
use tauri_plugin_http::reqwest;
use tokio::time::{sleep, Duration};
use walkdir::WalkDir;
use warp::Filter;
use zip::write::FileOptions;
use zip::ZipArchive;
use zip::ZipWriter;
#[tauri::command]
pub async fn start_server(
state: tauri::State<'_, Arc<Mutex<ServerState>>>,
path: String,
) -> Result<u16, String> {
let mut state = state.lock().unwrap();
if state.server_handle.is_some() {
return Err("Server is already running".into());
}
let path_clone = path.clone();
let port = find_port().unwrap();
// println!("port: {}", port);
let server_handle = tokio::spawn(async move {
let route = warp::fs::dir(path_clone)
.map(|reply| {
warp::reply::with_header(
reply,
"Cache-Control",
"no-store, no-cache, must-revalidate, max-age=0",
)
})
.map(|reply| warp::reply::with_header(reply, "Vary", "*"))
.map(|reply| warp::reply::with_header(reply, "Surrogate-Control", "no-store"))
.map(|reply| warp::reply::with_header(reply, "Pragma", "no-cache"))
.map(|reply| warp::reply::with_header(reply, "Expires", "0"));
warp::serve(route).run(([127, 0, 0, 1], port)).await;
});
state.server_handle = Some(server_handle);
Ok(port)
}
#[tauri::command]
pub async fn stop_server(state: tauri::State<'_, Arc<Mutex<ServerState>>>) -> Result<(), String> {
let mut state = state.lock().unwrap();
if let Some(handle) = state.server_handle.take() {
handle.abort();
Ok(())
} else {
Err("Server is not running".into())
}
}
#[tauri::command]
pub async fn open_window(
handle: AppHandle,
app_url: String,
app_name: String,
platform: String,
user_agent: String,
resize: bool,
width: f64,
height: f64,
js_content: String,
) {
let window_label = "previewWeb";
if let Some(existing_window) = handle.get_webview_window(window_label) {
if resize {
let new_size = LogicalSize::new(width, height);
match existing_window.set_size(new_size) {
Ok(_) => println!("Window resized to {}x{}", width, height),
Err(e) => eprintln!("Failed to resize window: {}", e),
}
} else {
existing_window.close().unwrap();
println!("Existing window closed.");
let start = Instant::now();
while handle.get_webview_window(window_label).is_some() {
if start.elapsed().as_secs() > 2 {
println!("Window close took too long. Aborting.");
return;
}
std::thread::yield_now();
}
}
}
println!("Opening docs in external window: {}, {}", app_url, platform);
let resource_path = handle
.path()
.resolve("data/custom.js", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut custom_js = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
custom_js.read_to_string(&mut contents).unwrap();
contents += js_content.as_str();
println!("js file contents: {}", contents);
if !resize {
let _window = tauri::WebviewWindowBuilder::new(
&handle,
"previewWeb", /* the unique window label */
tauri::WebviewUrl::External(app_url.parse().unwrap()),
)
.title(app_name)
.inner_size(width, height)
.user_agent(user_agent.as_str())
.center()
.build()
.unwrap();
}
}
#[tauri::command]
pub async fn preview_from_config(
handle: AppHandle,
resize: bool,
config: WindowConfig,
js_content: String,
devbug: bool,
) {
let window_label = "PreView";
if let Some(existing_window) = handle.get_webview_window(window_label) {
if resize {
let new_size = LogicalSize::new(config.width, config.height);
match existing_window.set_size(new_size) {
Ok(_) => println!("Window resized to {}x{}", config.width, config.height),
Err(e) => eprintln!("Failed to resize window: {}", e),
}
} else {
existing_window.close().unwrap();
let start = Instant::now();
while handle.get_webview_window(window_label).is_some() {
if start.elapsed().as_secs() > 2 {
println!("Window close took too long. Aborting.");
return;
}
std::thread::yield_now();
}
}
}
let mut contents = String::new();
if devbug {
contents += include_str!("../../data/vconsole.min.js");
contents += "var vConsole = new window.VConsole();";
}
// custom js
contents += js_content.as_str();
if !resize {
let pre_window = tauri::WebviewWindowBuilder::from_config(&handle, &config)
.unwrap()
.initialization_script(contents.as_str())
.build()
.unwrap();
pre_window.on_window_event(move |event| {
if let WindowEvent::Destroyed = event {
handle.emit("stop_server", "0").unwrap();
}
});
}
}
#[tauri::command]
pub async fn update_build_file(handle: tauri::AppHandle, name: String, body: String) -> String {
let resource_path = handle
.path()
.resolve("data/build.yml", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut build_file = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
build_file.read_to_string(&mut contents).unwrap();
contents = contents
.replace("PROJECTNAME", name.as_str())
.replace("RELEASEBODY", body.as_str());
// println!("Updated build file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn update_config_file(
handle: tauri::AppHandle,
name: String,
version: String,
id: String,
ascii: bool,
window_config: String,
tauri_api: bool,
) -> String {
let resource_path = handle
.path()
.resolve("data/config.json", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut config_file = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
config_file.read_to_string(&mut contents).unwrap();
contents = contents
.replace("PROJECTNAME", name.as_str())
.replace("PROJECTVERSION", version.as_str())
.replace("PROJECTID", id.as_str());
if tauri_api {
contents = contents.replace("-2", r#"true"#);
} else {
contents = contents.replace("-2", r#"false"#);
}
if ascii {
contents = contents.replace("-3", r#""all""#);
} else {
contents = contents.replace("-3", r#"["deb", "appimage", "nsis", "app", "dmg"]"#);
}
contents = contents.replace("-1", window_config.as_str());
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn update_config_json(
handle: tauri::AppHandle,
name: String,
version: String,
id: String,
ascii: bool,
) -> String {
let resource_path = handle
.path()
.resolve("data/config.json", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut config_file = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
config_file.read_to_string(&mut contents).unwrap();
contents = contents
.replace("PROJECTNAME", name.as_str())
.replace("PROJECTVERSION", version.as_str())
.replace("PROJECTID", id.as_str());
if ascii {
contents = contents.replace("-3", r#""all""#);
} else {
contents = contents.replace("-3", r#"["deb", "appimage", "nsis", "app", "dmg"]"#);
}
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn update_cargo_file(
handle: tauri::AppHandle,
name: String,
version: String,
desc: String,
debug: bool,
) -> String {
let resource_path = handle
.path()
.resolve("data/cargo.txt", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut config_file = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
config_file.read_to_string(&mut contents).unwrap();
contents = contents
.replace("PROJECTNAME", name.as_str())
.replace("PROJECTVERSION", version.as_str())
.replace("PROJECTDESC", desc.as_str());
if debug {
// "shell-open", "devtools"
contents = contents.replace("-3", r#""protocol-asset", "devtools""#);
} else {
contents = contents.replace("-3", r#""protocol-asset""#);
}
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn update_main_rust(
handle: tauri::AppHandle,
app_url: String,
app_name: String,
user_agent: String,
width: f64,
height: f64,
) -> String {
let resource_path = handle
.path()
.resolve("data/lib.rs", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut main_rust = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
main_rust.read_to_string(&mut contents).unwrap();
contents = contents
.replace("PROJECTNAME", app_name.as_str())
.replace("PROJECTURL", app_url.as_str())
.replace("PROJECTUSERAGENT", user_agent.as_str())
.replace("-1", width.to_string().as_str())
.replace("-2", height.to_string().as_str());
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn rust_main_window(handle: tauri::AppHandle, config: String) -> String {
let resource_path = handle
.path()
.resolve("data/main.rs", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut main_rust = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
main_rust.read_to_string(&mut contents).unwrap();
// test replace
contents = contents.replace("WINDOWCONFIG", config.as_str());
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn rust_lib_window(handle: tauri::AppHandle, config: String) -> String {
let resource_path = handle
.path()
.resolve("data/lib.rs", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut main_rust = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
main_rust.read_to_string(&mut contents).unwrap();
contents = contents.replace("WINDOWCONFIG", config.as_str());
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn get_custom_js(handle: tauri::AppHandle) -> String {
let resource_path = handle
.path()
.resolve("data/custom.js", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut custom_js = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
custom_js.read_to_string(&mut contents).unwrap();
return contents;
}
#[tauri::command]
pub async fn update_custom_js(_: tauri::AppHandle, js_content: String) -> String {
// let resource_path = handle
// .path()
// .resolve("data/custom.js", BaseDirectory::Resource)
// .expect("failed to resolve resource");
// let mut custom_js = std::fs::File::open(&resource_path).unwrap();
// let mut contents = String::new();
// custom_js.read_to_string(&mut contents).unwrap();
// contents += js_content.as_str();
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(js_content);
return encoded_contents;
}
#[tauri::command]
pub async fn content_to_base64(_: tauri::AppHandle, content: String) -> String {
// println!("Updated config file: {}", contents);
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(content);
return encoded_contents;
}
#[tauri::command]
pub async fn open_url(_: tauri::AppHandle, url: String) {
open::that(url).unwrap();
}
// open devtools
#[tauri::command]
pub async fn open_devtools(handle: AppHandle) {
if let Some(_) = handle.get_webview_window("main") {
println!("open devtools");
// existing_window.open_devtools();
}
}
#[tauri::command]
pub async fn update_init_rs(handle: tauri::AppHandle, config: String, state: bool) -> String {
let resource_path = handle
.path()
.resolve("data/init.rs", BaseDirectory::Resource)
.expect("failed to resolve resource");
let mut main_rust = std::fs::File::open(&resource_path).unwrap();
let mut contents = String::new();
main_rust.read_to_string(&mut contents).unwrap();
contents = contents.replace("WINDOWCONFIG", config.as_str());
// 替换state
if state {
println!("state: true");
} else {
contents = contents.replace("if true {", "if false {");
}
// The new file content, using Base64 encoding
let encoded_contents = BASE64_STANDARD.encode(contents);
return encoded_contents;
}
#[tauri::command]
pub async fn run_command(command: String) -> Result<String, String> {
#[cfg(target_os = "windows")]
let output = tokio::process::Command::new("powershell")
.arg("-Command")
.arg(&command)
.creation_flags(0x08000000)
.output()
.await
.map_err(|e| e.to_string())?;
#[cfg(not(target_os = "windows"))]
let output = tokio::process::Command::new("sh")
.arg("-c")
.arg(&command)
.output()
.await
.map_err(|e| e.to_string())?;
if output.status.success() {
#[cfg(target_os = "windows")]
{
use encoding_rs::GBK;
let (decoded, _, _) = GBK.decode(&output.stdout);
Ok(decoded.into_owned())
}
#[cfg(not(target_os = "windows"))]
{
Ok(String::from_utf8_lossy(&output.stdout).to_string())
}
} else {
#[cfg(target_os = "windows")]
{
use encoding_rs::GBK;
let (decoded, _, _) = GBK.decode(&output.stderr);
Err(decoded.into_owned())
}
#[cfg(not(target_os = "windows"))]
{
Err(String::from_utf8_lossy(&output.stderr).to_string())
}
}
}
#[tauri::command]
pub fn get_machine_uid() -> String {
let uid: String = machine_uid::get().unwrap();
uid
}
fn zip_folder(src_path: &str, dst_path: &str) -> std::io::Result<()> {
let file = File::create(dst_path)?;
let mut zip = ZipWriter::new(file);
print!("src_path = {src_path}");
print!("dst_path = {dst_path}");
let options: FileOptions<()> =
FileOptions::default().compression_method(zip::CompressionMethod::Deflated);
let src_path = Path::new(src_path);
let walkdir = WalkDir::new(src_path);
let it = walkdir.into_iter();
for entry in it.filter_map(|e| e.ok()) {
let path = entry.path();
let name = path.strip_prefix(src_path).unwrap().to_str().unwrap();
if path.is_file() {
zip.start_file(name, options)?;
let mut f = File::open(path)?;
std::io::copy(&mut f, &mut zip)?;
} else if !name.is_empty() {
zip.add_directory(name, options)?;
}
}
zip.finish()?;
Ok(())
}
fn unzip_file(src_path: &str, dst_path: &str) -> std::io::Result<()> {
let file = File::open(src_path)?;
let mut archive = ZipArchive::new(file)?;
let dst_path = Path::new(dst_path);
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = dst_path.join(file.mangled_name());
if file.name().ends_with('/') {
std::fs::create_dir_all(&outpath)?;
} else {
if let Some(p) = outpath.parent() {
if !p.exists() {
std::fs::create_dir_all(p)?;
}
}
let mut outfile = File::create(&outpath)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
Ok(())
}
#[tauri::command]
pub async fn compress_folder(source: String, destination: String) -> Result<(), String> {
zip_folder(&source, &destination).map_err(|e| e.to_string())
}
#[tauri::command]
pub async fn decompress_file(source: String, destination: String) -> Result<(), String> {
unzip_file(&source, &destination).map_err(|e| e.to_string())
}
#[derive(Clone, Serialize)]
#[serde(rename_all = "camelCase")]
struct DownloadProgress {
file_id: String,
downloaded: u64,
total: u64,
}
#[tauri::command]
pub async fn download_file(
app: AppHandle,
url: String,
save_path: String,
file_id: String,
) -> Result<(), String> {
let client = Client::new();
let resp = client.get(&url).send().await.map_err(|e| e.to_string())?;
// if save path is empty
let mut save_path = save_path;
let file_name = url.split('/').last().unwrap();
if save_path.is_empty() {
let file_path = app
.path()
.resolve(file_name, BaseDirectory::Download)
.expect("failed to resolve resource");
save_path = file_path.to_str().unwrap().to_string();
}
let total_size = resp.content_length();
let mut stream = resp.bytes_stream();
let mut file = File::create(&save_path).map_err(|e| e.to_string())?;
let mut downloaded: u64 = 0;
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|e| e.to_string())?;
file.write_all(&chunk).map_err(|e| e.to_string())?;
downloaded += chunk.len() as u64;
app.emit(
"download_progress",
DownloadProgress {
file_id: file_id.clone(),
downloaded,
total: total_size.unwrap_or(0),
},
)
.unwrap();
}
Ok(())
}
#[derive(serde::Deserialize)]
pub struct NotificationParams {
title: String,
body: String,
icon: String,
}
#[tauri::command]
pub fn notification(app: AppHandle, params: NotificationParams) -> Result<(), String> {
let mut notifi_app = Notification::new();
#[cfg(target_os = "macos")]
{
let _ = notify_rust::set_application(if tauri::is_dev() {
"com.apple.Terminal"
} else {
&app.config().identifier
});
}
#[cfg(windows)]
{
use std::path::MAIN_SEPARATOR as SEP;
let curr_dir = get_exe_dir(true);
// set the notification's System.AppUserModel.ID only when running the installed app
if !(curr_dir.ends_with(format!("{SEP}target{SEP}debug").as_str())
|| curr_dir.ends_with(format!("{SEP}target{SEP}release").as_str()))
{
notifi_app.app_id(&app.config().identifier);
}
}
if !params.icon.is_empty() {
notifi_app.icon(¶ms.icon);
} else {
notifi_app.auto_icon();
}
tauri::async_runtime::spawn(async move {
let _ = notifi_app
.summary(¶ms.title)
.body(¶ms.body)
.show()
.expect("show notification failed");
});
Ok(())
}
#[tauri::command]
pub fn get_exe_dir(parent: bool) -> String {
let exe_dir = env::current_exe().unwrap();
if parent {
exe_dir.parent().unwrap().to_str().unwrap().to_string()
} else {
exe_dir.to_str().unwrap().to_string()
}
}
// load man.json
pub fn load_man(base_dir: &str) -> Result<String, io::Error> {
let mut man_path = PathBuf::from(base_dir);
man_path.push("config");
man_path.push("man");
match fs::read_to_string(&man_path) {
Ok(man_base64) => match BASE64_STANDARD.decode(man_base64.trim()) {
Ok(decoded_bytes) => match String::from_utf8(decoded_bytes) {
Ok(decoded_str) => Ok(decoded_str),
Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),
},
Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e)),
},
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(String::new()),
Err(e) => Err(e),
}
}
// server config www dir
#[tauri::command]
pub fn get_www_dir(base_dir: &str) -> Result<String, io::Error> {
let mut www_dir = PathBuf::from(base_dir);
www_dir.push("config");
www_dir.push("www");
if fs::metadata(&www_dir).is_ok() {
let files = fs::read_dir(&www_dir)?;
if files.count() > 0 {
let port = find_port().unwrap();
let route = warp::fs::dir(www_dir);
tokio::spawn(async move {
warp::serve(route).run(([127, 0, 0, 1], port)).await;
});
return Ok(format!("http://127.0.0.1:{}", port));
} else {
return Ok(String::new());
}
}
Ok(String::new())
}
// get config custom js
#[tauri::command]
pub fn get_config_js(base_dir: &str) -> Result<String, io::Error> {
let mut config_dir = PathBuf::from(base_dir);
config_dir.push("config");
config_dir.push("inject");
config_dir.push("custom.js");
if fs::metadata(&config_dir).is_ok() {
let content = fs::read_to_string(&config_dir)?;
Ok(content)
} else {
Ok(String::new())
}
}
#[tauri::command]
pub fn get_env_var(name: String) -> Result<String, String> {
std::env::var(name).map_err(|e| e.to_string())
}
#[tauri::command]
pub fn find_port() -> Result<u16, String> {
let listener = TcpListener::bind("127.0.0.1:0").unwrap();
let port = listener.local_addr().unwrap().port();
Ok(port)
}
// copy dir all
#[tauri::command]
pub fn copy_dir(src: &Path, dst: &Path) -> Result<(), String> {
if !dst.exists() {
fs::create_dir_all(dst).expect("create dst dir failed");
}
for entry in fs::read_dir(src).expect("read src dir failed") {
let entry = entry.expect("read src dir entry failed");
let ty = entry
.file_type()
.expect("read src dir entry file type failed");
if ty.is_dir() {
copy_dir(&entry.path(), &dst.join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.join(entry.file_name())).expect("copy file failed");
}
}
Ok(())
}
#[tauri::command]
pub async fn windows_build(
base_dir: &str,
exe_name: &str,
config: String,
custom_js: String,
html_path: String,
script_path: String,
) -> Result<(), String> {
let base_path = Path::new(base_dir).join(exe_name);
if !base_path.exists() {
fs::create_dir_all(&base_path).map_err(|e| e.to_string())?;
}
let config_dir = base_path.join("config").join("inject");
if !config_dir.exists() {
fs::create_dir_all(&config_dir).map_err(|e| e.to_string())?;
}
let www_dir = base_path.join("config").join("www");
if !html_path.is_empty() {
let html_dir = Path::new(&html_path);
if html_dir.exists() {
copy_dir(html_dir, &www_dir).expect("copy html dir failed");
}
}
let custom_js_path = config_dir.join("custom.js");
fs::write(custom_js_path, custom_js).map_err(|e| e.to_string())?;
let man_path = base_path.join("config").join("man");
fs::write(man_path, config).map_err(|e| e.to_string())?;
let www_dir = base_path.join("config").join("www");
if !html_path.is_empty() {
let html_dir = Path::new(&html_path);
if html_dir.exists() {
copy_dir(html_dir, &www_dir).expect("copy html dir failed");
}
}
let exe_path = env::current_exe().unwrap();
let exe_dir = exe_path.parent().unwrap();
let rhexe_dir = exe_dir.join("data").join("rh.exe");
let rh_command = format!(
"& \"{}\" -script \"{}\"",
rhexe_dir.to_str().unwrap(),
script_path
);
run_command(rh_command).await?;
Ok(())
}
#[tauri::command]
pub async fn macos_build(
base_dir: &str,
exe_name: &str,
config: String,
base64_png: String,
custom_js: String,
html_path: String,
) -> Result<(), String> {
let base_path = Path::new(base_dir).join(exe_name);
let app_dir = base_path.join("Contents");
if !app_dir.exists() {
fs::create_dir_all(&app_dir).expect("create app dir failed");
}
let config_dir = base_path.join("Contents/MacOS/config/inject");
let resources_dir = base_path.join("Contents/Resources");
if !config_dir.exists() {
fs::create_dir_all(&config_dir).expect("create config dir failed");
}
if !resources_dir.exists() {
fs::create_dir_all(&resources_dir).expect("create resources dir failed");
}
let www_dir = base_path.join("Contents/MacOS/config/www");
if !html_path.is_empty() {
let html_dir = Path::new(&html_path);
if html_dir.exists() {
copy_dir(html_dir, &www_dir).expect("copy html dir failed");
}
}
let custom_js_path = config_dir.join("custom.js");
fs::write(custom_js_path, custom_js).expect("write custom.js failed");
let exe_path = env::current_exe().unwrap();
let exe_dir = exe_path.parent().unwrap();
let exe_parent_dir = exe_dir.parent().unwrap();
let info_plist_source = exe_parent_dir.join("Info.plist");
let info_plist_target = base_path.join("Contents/Info.plist");
fs::copy(&info_plist_source, &info_plist_target).expect("copy info.plist failed");
let pp_app_target = base_path.join("Contents/MacOS/PacBao");
fs::copy(&exe_path, &pp_app_target).expect("copy PacBao app failed");
let man_path = base_path.join("Contents/MacOS/config/man");
fs::write(man_path, config).expect("write man failed");
if !base64_png.is_empty() {
let _ = png_to_icns(
base64_png.replace("data:image/png;base64,", ""),
resources_dir.to_str().unwrap().to_string(),
)
.expect("convert png to icns failed");
}
let base_app = Path::new(base_dir).join(format!("{}.app", exe_name));
if base_app.exists() {
fs::remove_dir_all(&base_app).expect("delete old app failed");
}
fs::rename(base_path, base_app).expect("rename app failed");
Ok(())
}
#[tauri::command]
pub async fn linux_build(
base_dir: &str,
exe_name: &str,
config: String,
base64_png: String,
custom_js: String,
html_path: String,
) -> Result<(), String> {
Ok(())
}
#[tauri::command]
pub async fn build_local(
handle: AppHandle,
target_dir: &str,
project_name: &str,
exe_name: &str,
config: WindowConfig,
base64_png: String,
debug: bool,
custom_js: String,
html_path: String,
) -> Result<(), String> {
handle.emit("local-progress", "10").unwrap();
let resource_path = handle
.path()
.resolve("data/man.json", BaseDirectory::Resource)
.expect("failed to resolve resource");
handle.emit("local-progress", "20").unwrap();
let man_json = fs::read_to_string(&resource_path).expect("read man.json failed");
handle.emit("local-progress", "30").unwrap();
let mut man_json =
serde_json::from_str::<serde_json::Value>(&man_json).expect("parse man.json failed");
man_json["window"] = serde_json::to_value(config).unwrap();
man_json["debug"] = serde_json::to_value(debug).unwrap();
man_json["name"] = serde_json::to_value(project_name).unwrap();
man_json["visible"] = serde_json::to_value(false).unwrap();
#[cfg(target_os = "windows")]
{
if !base64_png.is_empty() {
man_json["icon"] =
serde_json::to_value(base64_png.replace("data:image/png;base64,", "")).unwrap();
}
}
let man_json_base64 = BASE64_STANDARD.encode(man_json.to_string());
handle.emit("local-progress", "40").unwrap();
#[cfg(target_os = "windows")]
{
let script_path = handle
.path()
.resolve("rhscript.txt", BaseDirectory::AppData)
.expect("failed to resolve resource");
windows_build(
target_dir,
exe_name,
man_json_base64,
custom_js,
html_path,
script_path.to_str().unwrap().to_string(),
)
.await?;
}
handle.emit("local-progress", "60").unwrap();
#[cfg(target_os = "macos")]
macos_build(
target_dir,
exe_name,
man_json_base64,
base64_png,
custom_js,
html_path,
)
.await?;
handle.emit("local-progress", "80").unwrap();
#[cfg(target_os = "linux")]
linux_build(
target_dir,
exe_name,
man_json_base64,
base64_png,
custom_js,
html_path,
)
.await?;
handle.emit("local-progress", "100").unwrap();
Ok(())
}
#[tauri::command]
pub fn png_to_icns(base64_png: String, output_dir: String) -> Result<(), String> {
let iconset_path = format!("{}/temp.iconset", output_dir);
if Path::new(&iconset_path).exists() {
fs::remove_dir_all(&iconset_path)
.map_err(|e| format!("delete old iconset dir failed: {}", e))?;
}
fs::create_dir_all(&iconset_path).map_err(|e| format!("create iconset dir failed: {}", e))?;
let png_data = BASE64_STANDARD
.decode(&base64_png)
.map_err(|e| format!("decode base64 png failed: {}", e))?;
let input_png_path = format!("{}/icon.png", output_dir);
let mut png_file =
File::create(&input_png_path).map_err(|e| format!("write png failed: {}", e))?;
png_file
.write_all(&png_data)
.map_err(|e| format!("write png content failed: {}", e))?;
let sizes = vec![16, 32, 128, 256, 512];
for size in sizes {
let double = size * 2;
let filename = format!("{}/icon_{}x{}.png", iconset_path, size, size);
let filename2x = format!("{}/icon_{}x{}@2x.png", iconset_path, size, size);
let status1 = Command::new("sips")
.args([
"-z",
&size.to_string(),
&size.to_string(),
&input_png_path,
"--out",
&filename,
])
.status()
.map_err(|e| format!("execute sips failed: {}", e))?;
let status2 = Command::new("sips")
.args([
"-z",
&double.to_string(),
&double.to_string(),
&input_png_path,
"--out",
&filename2x,
])
.status()
.map_err(|e| format!("execute sips 2x failed: {}", e))?;
if !status1.success() || !status2.success() {
return Err("sips convert failed".into());
}
}
let icns_path = format!("{}/icon.icns", output_dir);
let status = Command::new("iconutil")
.args(["-c", "icns", &iconset_path, "-o", &icns_path])
.status()
.map_err(|e| format!("execute iconutil failed: {}", e))?;
if !status.success() {
return Err("iconutil convert failed".into());
}
let _ = fs::remove_file(&input_png_path);
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | true |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/utils/mod.rs | src-tauri/src/utils/mod.rs | pub mod init; | rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
Sjj1024/PacBao | https://github.com/Sjj1024/PacBao/blob/23ef96a8a12be8536587548611ceeab76e800c54/src-tauri/src/utils/init.rs | src-tauri/src/utils/init.rs | use crate::command::cmds::{get_config_js, get_exe_dir, get_www_dir, load_man};
use base64::{prelude::BASE64_STANDARD, Engine};
use serde::{Deserialize, Serialize};
use serde_json::{json, Error, Value};
use tauri::{utils::config::WindowConfig, App, Url, WebviewUrl, WindowEvent};
use tauri_plugin_store::StoreExt;
#[derive(Debug, Serialize, Deserialize)]
pub struct Man {
pub name: String,
pub version: String,
pub description: String,
pub author: String,
pub license: String,
pub window: WindowConfig,
pub debug: bool,
pub icon: String,
}
pub fn append_param(original_url: &str, value: &str) -> String {
let separator = if original_url.contains('?') { "&" } else { "?" };
format!("{}{}args={}", original_url, separator, url_encode(value))
}
pub fn url_encode(input: &str) -> String {
input
.bytes()
.map(|b| match b {
b'-' | b'_' | b'.' | b'~' | b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' => {
(b as char).to_string()
}
_ => format!("%{:02X}", b),
})
.collect()
}
// handle something when start app
pub async fn resolve_setup(app: &mut App) -> Result<(), Error> {
let args: Vec<String> = std::env::args().collect();
let args_str = args[1..].join("|");
let args_base64 = BASE64_STANDARD.encode(args_str.as_bytes());
let app_handle = app.handle();
let window_json = r#"
{
"title": "PacBao",
"visible": false,
"url": "index.html",
"width": 1024,
"height": 720
}
"#;
let mut json_value: Value = serde_json::from_str(window_json)?;
if !args_base64.is_empty() {
if let Some(url) = json_value.get_mut("url") {
if let Some(original_url) = url.as_str() {
let new_url = append_param(original_url, args_base64.as_str());
*url = Value::String(new_url);
}
}
}
let mut store_name = "app_data.json".to_string();
let mut config: WindowConfig = serde_json::from_value(json_value).unwrap();
// load man
let startup_dir = get_exe_dir(true);
let man = load_man(&startup_dir);
let man_content = man.unwrap();
// custom js
let mut contents = String::new();
#[cfg(target_os = "windows")]
let mut icon_bytes: Vec<u8> = Vec::new();
// let mut ico_byte =
if man_content.len() > 0 {
let mut man_config: Man = serde_json::from_str(&man_content).unwrap();
let www_dir = get_www_dir(&startup_dir);
let www_dir_str = www_dir.unwrap();
man_config.window.label = "main".to_string();
man_config.window.visible = false;
store_name = format!("{}.json", man_config.name.as_str());
if www_dir_str.len() > 0 {
man_config.window.url = WebviewUrl::External(Url::parse(&www_dir_str).unwrap());
}
config = man_config.window;
// custom js
let custom_js = get_config_js(&startup_dir);
if custom_js.is_ok() {
contents = custom_js.unwrap();
}
// debug
if man_config.debug {
contents += "var vConsole = new window.VConsole();";
}
// icon
#[cfg(target_os = "windows")]
if man_config.icon.len() > 0 {
let icon_base64 = BASE64_STANDARD.decode(man_config.icon.trim());
icon_bytes = icon_base64.unwrap();
}
}
// init window
let window = tauri::WebviewWindowBuilder::from_config(app_handle, &config)
.unwrap()
.initialization_script(contents.as_str())
.build()
.unwrap();
let store = app.store(store_name).unwrap();
// store.clear();
let window_size: Option<serde_json::Value> = store.get("window_size");
let mut width = 0.0;
let mut height = 0.0;
if let Some(window_size) = window_size {
let size = window_size.as_object().unwrap();
width = size["width"].as_f64().unwrap();
height = size["height"].as_f64().unwrap();
}
#[cfg(target_os = "windows")]
if icon_bytes.len() > 0 {
use tauri::image::Image;
let png_image = Image::from_bytes(&icon_bytes).unwrap();
window.set_icon(png_image).unwrap();
}
let window_position: Option<serde_json::Value> = store.get("window_position");
let mut x = 0.0;
let mut y = 0.0;
// println!("windows_position: {:?}", window_position);
if let Some(window_position) = window_position {
let position = window_position.as_object().unwrap();
x = position["x"].as_f64().unwrap();
y = position["y"].as_f64().unwrap();
}
// position
if config.center || x <= 0.0 || y <= 0.0 {
window.center().unwrap();
} else {
window
.set_position(tauri::PhysicalPosition::new(x, y))
.unwrap();
}
if config.fullscreen
|| store
.get("fullscreen")
.unwrap_or(serde_json::Value::Bool(false))
.as_bool()
.unwrap()
{
window.set_fullscreen(true).unwrap();
} else if config.maximized
|| store
.get("maximized")
.unwrap_or(serde_json::Value::Bool(false))
.as_bool()
.unwrap()
{
window.maximize().unwrap();
} else if width > 0.0 && height > 0.0 {
window
.set_size(tauri::PhysicalSize::new(width, height))
.unwrap();
}
let window_clone = window.clone();
window.on_window_event(move |event| {
if let WindowEvent::Resized(size) = event {
// println!("window_size: {:?}", size);
if window_clone.is_maximized().unwrap_or(false) {
let _ = store.set("maximized", true);
} else if size.width > 0
&& size.height > 0
&& !window_clone.is_minimized().unwrap_or(false)
{
let _ = store.set(
"window_size",
json!({
"width": size.width,
"height": size.height
}),
);
let _ = store.set("maximized", false);
}
if window_clone.is_fullscreen().unwrap_or(false) {
// println!("Window entered fullscreen mode.");
let _ = store.set("fullscreen", true);
} else {
let _ = store.set("fullscreen", false);
}
} else if let WindowEvent::Moved(position) = event {
// println!("window_position: {:?}", position);
if position.x > 0
&& position.y > 0
&& !window_clone.is_minimized().unwrap_or(false)
&& !window_clone.is_maximized().unwrap_or(false)
{
let _ = store.set(
"window_position",
json!({ "x": position.x, "y": position.y }),
);
}
} else if let WindowEvent::DragDrop(drag_drop) = event {
println!("drag_drop: {:?}", drag_drop);
}
});
window.show().unwrap();
window.set_focus().unwrap();
Ok(())
}
| rust | MIT | 23ef96a8a12be8536587548611ceeab76e800c54 | 2026-01-04T20:17:44.488910Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/lib.rs | src/lib.rs | //! Wrap REST calls with Rust traits.
//!
//! ```rust
//! #[macro_use] extern crate anterofit;
//! # fn main() {}
//!
//! service! {
//! /// Trait wrapping `myservice.com` API.
//! pub trait MyService {
//! /// Get the version of this API.
//! fn api_version(&self) -> String {
//! GET("/version")
//! }
//!
//! /// Register a user with the API.
//! fn register(&self, username: &str, password: &str) {
//! POST("/register");
//! fields! {
//! username, password
//! }
//! }
//! }
//! }
//! ```
//!
//! # Important Types
//!
//! ## Service Traits
//! Created with the `service!{}` macro as shown above, service traits encompass the actual request
//! submission and response parsing. Each service trait is automatically implemented for
//! `Adapter`, and is object-safe by default, so you can use generic bounds or trait object coercion
//! to narrow the scope:
//!
//! ```rust,ignore
//! fn print_api_version(service: &MyService) {
//! // This completes synchronously, blocking until the request is complete.
//! let api_version = service.api_version().exec_here().unwrap();
//! println!("API version: {}", api_version);
//! }
//!
//! fn register_user<S: MyService>(service: &S, username: &str, password: &str) {
//! // By default, this will complete asynchronously.
//! service.register(username, password)
//! // exec() queues the request on the executor,
//! // and ignore() silences the `unused_result` lint for `Call`.
//! .exec().ignore();
//!
//! // This function returns immediately; all the work is done on the executor.
//! }
//! ```
//!
//! For more details, see the [`service!{}` macro](macro.service.html).
//!
//! ## Adapter
//! Built via `Adapter::builder()`, this is the starting point for all requests. It encompasses
//! five core components, and one very important property:
//!
//! * The `Executor` is responsible for taking prepared requests and executing them. Since Anterofit
//! is primarily designed to be asynchronous, the executor should submit jobs to be completed in the
//! background. Several executors are provided in the `executor` module, but a sane default
//! for low-volume asynchronous requests is provided automatically.
//!
//! * The `Interceptor` is a non-essential but endlessly useful component which can modify
//! request parameters before they are submitted. This currently encompasses modifying the request
//! URL and adding or overwriting HTTP headers. If your app requires some sort of API key or
//! authentication header, you can add an interceptor to your adapter to automatically include
//! the appropriate credentials with each request:
//!
//! ```rust,no_run
//! use anterofit::{Adapter, Url};
//! use anterofit::net::intercept::AddHeader;
//! use anterofit::net::header::{Headers, Authorization, Bearer};
//!
//! let adapter = Adapter::builder()
//! .base_url(Url::parse("https://myservice.com/api").unwrap())
//! .interceptor(AddHeader(Authorization (
//! Bearer {
//! token: "asdf1234hjkl5678".to_string()
//! }
//! )))
//! .build();
//! ```
//!
//! `Interceptor` is also implemented for closures of the kind `Fn(&mut anterofit::net::request::RequestHead)`,
//! but common operations are implemented as types in the `anterofit::net::intercept` module.
//! You can also chain interceptors together; they will be called in declaration order.
//!
//! * The `Serializer` is responsible for taking a strongly typed request body and converting
//! it to something that can be read into the HTTP stream, such as JSON or a raw byte sequence.
//!
//! * Conversely, the `Deserializer` is responsible for taking a response body in some predetermined
//! format, such as JSON or XML, and reading out a strongly typed value.
//!
//! If you just want JSON serialization and deserialization and don't care about the details,
//! use the `serialize_json()` method of your adapter builder to set the serializer and deserializer
//! simultaneously.
//!
//! * The `Client` (`hyper::client::Client`) is responsible for managing proxies, DNS resolution,
//! and bootstrapping connections. A default instance will be constructed automatically if one is
//! not provided, but you can configure your own instance to tweak some low-level stuff like
//! timeouts or to use a particular proxy.
//!
//! * Finally, the `base_url`, if provided, is automatically prepended to every request URL. This would
//! generally be the protocol, domain and perhaps a path prefix, while request URLs can be standalone paths.
//! That way you can easily swap between, for example, testing and production endpoints implementing
//! the same REST API:
//!
//! ```rust
//! # extern crate anterofit;
//! # fn print_api_version<T>(_: &T) {}
//! # fn register_user<T>(_: &T, _: &str, _: &str) {}
//! # fn main() {
//! use anterofit::{Adapter, Url};
//!
//! let adapter = Adapter::builder()
//! .base_url(Url::parse("https://test.myservice.com/api").unwrap())
//! .build();
//!
//! print_api_version(&adapter);
//! register_user(&adapter, "username", "password");
//!
//! let adapter = Adapter::builder()
//! .base_url(Url::parse("https://prod.myservice.com/api").unwrap())
//! .build();
//!
//! print_api_version(&adapter);
//! register_user(&adapter, "username", "password");
//! # }
//! ```
//!
//! ## `Request`
//! This type wraps the return value of every service trait method. Unlike in Retrofit,
//! where the request is determined to
//! be synchronous or asynchronous at the service method declaration site^1, `Request` gives the power
//! over this choice to the caller so that no change to the trait is needed to change the execution
//! context:
//!
//! ```rust,ignore
//! fn print_api_version(service: &MyService) {
//! service.api_version()
//! // This closure will be called with the `String` value on the executor
//! .on_complete(|api_version| println!("API version: {}", api_version))
//! // We don't care about the result since it's `()` anyway.
//! .exec().ignore();
//! }
//! ```
//! ^1 : Retrofit v1 established synchronicity at the declaration site; v2 follows the same
//! pattern as Anterofit, but the two were developed independently.
//!
//! ## `Call`
//! Returned by `Request::exec()`, this type is a pollable `Future` which will yield the result
//! of the request when it is ready. If there was an error in constructing the request,
//! the result will be available immediately. `Call` provides alternative methods wrapping
//! `Future::poll()` and `Future::wait()` without external types so you
//! have a choice over whether you want to use futures in your app or not.
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![cfg_attr(feature="clippy", deny(clippy))]
#![warn(missing_docs)]
#![cfg_attr(feature = "nightly", feature(specialization))]
#![recursion_limit="100"]
#[macro_use]
extern crate mime as mime_;
#[macro_use]
extern crate quick_error;
extern crate futures;
extern crate crossbeam;
extern crate parking_lot;
extern crate multipart;
extern crate serde;
extern crate url;
pub extern crate hyper;
mod adapter;
#[macro_use]
mod macros;
mod mpmc;
pub mod mime;
pub mod net;
pub mod serialize;
pub mod executor;
pub mod error;
pub use error::Error;
pub use hyper::Url;
pub use adapter::{Adapter, AbsAdapter, AdapterBuilder, InterceptorMut};
#[cfg(feature = "serde_json")]
pub use adapter::JsonAdapter;
pub use net::body::RawBody;
pub use net::request::Request;
use std::sync::Arc;
/// The result type for this crate; used frequently in public APIs.
///
/// Recommended to be used as `anterofit::Result` to avoid confusing
/// shadowing of `std::result::Result`.
pub type Result<T> = ::std::result::Result<T, Error>;
/// Strong typing hint for delegate adapter-getters.
#[doc(hidden)]
pub fn get_adapter<D, A: AbsAdapter, F: FnOnce(&D) -> &A>(delegate: &D, map: F) -> &A {
map(delegate)
}
/// Unsizeable service trait. Used with `Adapter::arc_service()`.
///
/// Use `unsizeable!()` to create an impl of this trait for your service trait.
///
/// Mutually exclusive with delegate service impls for the foreseeable future.
pub trait UnsizeService {
/// Unsize the given `Arc<A: AbsAdapter>` to the service trait object.
fn from_adapter<A>(adpt: Arc<A>) -> Arc<Self> where A: AbsAdapter;
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/mpmc.rs | src/mpmc.rs | use crossbeam::sync::SegQueue;
use parking_lot::{Condvar, Mutex};
use std::iter::IntoIterator;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use executor::ExecBox;
pub fn channel() -> (Sender, Receiver) {
let inner = Arc::new(
Inner {
queue: SegQueue::new(),
mutex: Mutex::new(()),
cvar: Condvar::new(),
closed: AtomicBool::new(false)
}
);
let inner_ = inner.clone();
(Sender(inner), Receiver(inner_))
}
pub struct Sender(Arc<Inner>);
/// The receiver half of an MPMC queue of executor jobs.
///
/// Poll with `recv()`, when it returns `None` the job queue is closed.
pub struct Receiver(Arc<Inner>);
struct Inner {
queue: SegQueue<Box<ExecBox>>,
mutex: Mutex<()>,
cvar: Condvar,
closed: AtomicBool,
}
impl Sender {
pub fn send(&self, exec: Box<ExecBox>) {
self.0.queue.push(exec);
self.0.cvar.notify_all();
}
}
impl Drop for Sender {
fn drop(&mut self) {
self.0.closed.store(true, Ordering::Release);
self.0.cvar.notify_all();
}
}
impl Receiver {
fn wait(&self) {
// RFC: should this have a timeout?
self.0.cvar.wait(&mut self.0.mutex.lock());
}
/// Poll the queue, blocking if it is empty.
///
/// Returns `None` when the sending half of the queue is closed.
pub fn recv(&self) -> Option<Box<ExecBox>> {
loop {
if let Some(val) = self.0.queue.try_pop() {
// Wake another thread so it can check if there's more work in the queue
self.0.cvar.notify_one();
return Some(val);
}
if self.0.closed.load(Ordering::Acquire) {
// Wake any remaining blocked threads so they can observe the closed status
self.0.cvar.notify_all();
return None;
}
self.wait();
}
}
/// Get a blocking iterator that yields `None` when the queue is closed.
///
/// `IntoIter` is also implemented for `&Receiver`.
pub fn iter(&self) -> RecvIter {
RecvIter(self)
}
}
impl Clone for Receiver {
fn clone(&self) -> Self {
Receiver(self.0.clone())
}
}
impl<'a> IntoIterator for &'a Receiver {
type Item = Box<ExecBox>;
type IntoIter = RecvIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl IntoIterator for Receiver {
type Item = Box<ExecBox>;
type IntoIter = RecvIntoIter;
fn into_iter(self) -> Self::IntoIter {
RecvIntoIter(self)
}
}
/// Blocking owned iterator type.
pub struct RecvIntoIter(Receiver);
impl Iterator for RecvIntoIter {
type Item = Box<ExecBox>;
fn next(&mut self) -> Option<Self::Item> {
self.0.recv()
}
}
/// Blocking shared iterator type.
pub struct RecvIter<'a>(&'a Receiver);
impl<'a> Iterator for RecvIter<'a> {
type Item = Box<ExecBox>;
fn next(&mut self) -> Option<Self::Item> {
self.0.recv()
}
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/adapter.rs | src/adapter.rs | use hyper::Url;
use hyper::client::Client;
use std::sync::Arc;
use std::fmt;
use executor::{DefaultExecutor, Executor};
use mpmc::{self, Sender};
use net::intercept::{Interceptor, Chain, NoIntercept};
use serialize::{self, Serializer, Deserializer};
use serialize::none::NoSerializer;
use serialize::FromStrDeserializer;
use UnsizeService;
/// A builder for `Adapter`. Call `Adapter::builder()` to get an instance.
pub struct AdapterBuilder<S, D, E, I> {
base_url: Option<Url>,
client: Option<Client>,
executor: E,
interceptor: I,
serializer: S,
deserializer: D,
}
impl AdapterBuilder<NoSerializer, FromStrDeserializer, DefaultExecutor, NoIntercept> {
fn new() -> Self {
AdapterBuilder {
base_url: None,
client: None,
executor: DefaultExecutor::new(),
interceptor: NoIntercept,
serializer: NoSerializer,
deserializer: FromStrDeserializer,
}
}
}
impl<S, D, E, I> AdapterBuilder<S, D, E, I> {
/// Set the base URL that the adapter will use for all requests.
///
/// If a base URL is not provided, then all service method URLs are assumed to be absolute.
pub fn base_url(self, url: Url) -> Self {
AdapterBuilder { base_url: Some(url), .. self }
}
/// Set a `hyper::Client` instance to use with the adapter.
///
/// If not supplied, a default instance will be constructed.
pub fn client(mut self, client: Client) -> Self {
self.client = Some(client);
self
}
/// Set a new executor for the adapter.
pub fn executor<E_>(self, executor: E_) -> AdapterBuilder<S, D, E_, I>
where E: Executor {
AdapterBuilder {
base_url: self.base_url,
client: self.client,
executor: executor,
interceptor: self.interceptor,
serializer: self.serializer,
deserializer: self.deserializer,
}
}
/// Set a new interceptor for the adapter.
pub fn interceptor<I_>(self, interceptor: I_) -> AdapterBuilder<S, D, E, I_>
where I_: Interceptor {
AdapterBuilder {
base_url: self.base_url,
client: self.client,
executor: self.executor,
interceptor: interceptor,
serializer: self.serializer,
deserializer: self.deserializer,
}
}
/// Chain a new interceptor with the current one. They will be called in-order.
pub fn chain_interceptor<I_>(self, next: I_) -> AdapterBuilder<S, D, E, Chain<I, I_>>
where I: Interceptor, I_: Interceptor {
AdapterBuilder {
base_url: self.base_url,
client: self.client,
executor: self.executor,
interceptor: self.interceptor.chain(next),
serializer: self.serializer,
deserializer: self.deserializer,
}
}
/// Set a new `Serializer` impl for the adapter.
pub fn serializer<S_>(self, serialize: S_) -> AdapterBuilder<S_, D, E, I>
where S_: Serializer {
AdapterBuilder {
base_url: self.base_url,
client: self.client,
executor: self.executor,
interceptor: self.interceptor,
serializer: serialize,
deserializer: self.deserializer,
}
}
/// Set a new `Deserializer` impl for the adapter.
pub fn deserializer<D_>(self, deserialize: D_) -> AdapterBuilder<S, D_, E, I>
where D_: Deserializer {
AdapterBuilder {
base_url: self.base_url,
client: self.client,
executor: self.executor,
interceptor: self.interceptor,
serializer: self.serializer,
deserializer: deserialize,
}
}
}
#[cfg(any(feature = "rustc-serialize", feature = "serde_json"))]
impl<S, D, E, I> AdapterBuilder<S, D, E, I> {
/// Convenience method for using JSON serialization.
///
/// Enabled with either the `rust-serialize` feature or the `serde-json` feature.
pub fn serialize_json(self) -> AdapterBuilder<serialize::json::Serializer, serialize::json::Deserializer, E, I> {
self.serializer(serialize::json::Serializer)
.deserializer(serialize::json::Deserializer)
}
}
impl<S, D, E, I> AdapterBuilder<S, D, E, I>
where S: Serializer, D: Deserializer, E: Executor, I: Interceptor {
/// Using the supplied types, complete the adapter.
///
/// `<E as Executor>::start()` will be called here.
pub fn build(self) -> Adapter<S, D> {
let (tx, rx) = mpmc::channel();
self.executor.start(rx);
let consts = AdapterConsts {
base_url: self.base_url,
client: self.client.unwrap_or_else(Client::new),
serializer: self.serializer,
deserializer: self.deserializer,
sender: tx,
};
Adapter {
inner: Arc::new(
Adapter_ {
consts: Arc::new(consts),
interceptor: self.interceptor.into_opt_obj(),
}
),
}
}
}
/// A shorthand for an adapter with JSON serialization enabled.
#[cfg(any(feature = "rustc-serialize", feature = "serde_json"))]
pub type JsonAdapter= Adapter<serialize::json::Serializer, serialize::json::Deserializer>;
/// The starting point of all Anterofit requests.
///
/// Use `builder()` to start constructing an instance.
#[derive(Debug)]
pub struct Adapter<S = NoSerializer, D = FromStrDeserializer> {
inner: Arc<Adapter_<S, D>>,
}
impl<S, D> Clone for Adapter<S, D> {
fn clone(&self) -> Self {
Adapter {
inner: self.inner.clone(),
}
}
}
impl Adapter<NoSerializer, FromStrDeserializer> {
/// Start building an impl of `Adapter` using the default inner types.
pub fn builder() -> AdapterBuilder<NoSerializer, FromStrDeserializer, DefaultExecutor, NoIntercept> {
AdapterBuilder::new()
}
}
impl<S, D> Adapter<S, D> {
/// Modify this adaptor's interceptor.
///
/// ## Note
/// Any existing service trait objects and copies of this adapter will be unaffected
/// by this change.
pub fn interceptor_mut(&mut self) -> InterceptorMut {
InterceptorMut(&mut Arc::make_mut(&mut self.inner).interceptor)
}
}
impl<S, D> fmt::Debug for Adapter_<S, D>
where S: fmt::Debug, D: fmt::Debug {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("anterofit::Adapter")
.field("base_url", &self.consts.base_url)
.field("client", &self.consts.client)
.field("serializer", &self.consts.serializer)
.field("deserializer", &self.consts.deserializer)
.field("interceptor", &self.interceptor)
.finish()
}
}
/// A mutator for modifying the `Interceptor` of an `Adapter`.
pub struct InterceptorMut<'a>(&'a mut Option<Arc<Interceptor>>);
impl<'a> InterceptorMut<'a> {
/// Remove the interceptor from the adapter.
pub fn remove(&mut self) {
*self.0 = None;
}
/// Set a new interceptor, discarding the old one.
pub fn set<I>(&mut self, new: I) where I: Interceptor {
*self.0 = new.into_opt_obj();
}
/// Chain the given `Interceptor` before the one currently in the adapter.
///
/// Equivalent to `set(before)` if the adapter does not have an interceptor or was constructed
/// with `NoIntercept` as the interceptor.
pub fn chain_before<I>(&mut self, before: I) where I: Interceptor {
*self.0 = match self.0.take() {
Some(current) => before.chain(current).into_opt_obj(),
None => before.into_opt_obj(),
};
}
/// Chain the given `Interceptor` after the one currently in the adapter.
///
/// Equivalent to `set(after)` if the adapter does not have an interceptor or was constructed
/// with `NoIntercept` as the interceptor.
pub fn chain_after<I>(&mut self, after: I) where I: Interceptor {
*self.0 = match self.0.take() {
Some(current) => current.chain(after).into_opt_obj(),
None => after.into_opt_obj(),
};
}
/// Chain the given `Interceptor`s before and after the one currently in the adapter.
///
/// This saves a level of boxing over calling `chain_before()` and `chain_after()`
/// separately.
///
/// Equivalent to `set(before.chain(after))` if the adapter does not have an interceptor or
/// was constructed with `NoIntercept` as the interceptor.
pub fn chain_around<I1, I2>(&mut self, before: I1, after: I2)
where I1: Interceptor, I2: Interceptor {
*self.0 = match self.0.take() {
Some(current) => before.chain2(current, after).into_opt_obj(),
None => before.chain(after).into_opt_obj(),
};
}
}
/// Constant types in an adapter
pub struct AdapterConsts<S, D> {
pub base_url: Option<Url>,
pub client: Client,
pub sender: Sender,
pub serializer: S,
pub deserializer: D,
}
/// Public but not accessible
pub struct Adapter_<S, D> {
consts: Arc<AdapterConsts<S, D>>,
interceptor: Option<Arc<Interceptor>>,
}
impl<S, D> Clone for Adapter_<S, D> {
fn clone(&self) -> Self {
Adapter_ {
consts: self.consts.clone(),
interceptor: self.interceptor.clone()
}
}
}
impl<S, D> Adapter<S, D> where S: Serializer, D: Deserializer {
/// Get a service trait object from an existing shared allocation.
///
/// Requires that the service implement `UnsizeService`.
pub fn arc_service<Serv: ?Sized>(&self) -> Arc<Serv> where Serv: UnsizeService {
Serv::from_adapter(self.inner.clone())
}
}
/// Used by Anterofit's various APIs.
pub trait AbsAdapter: PrivAdapter {}
/// Private adapter trait
pub trait PrivAdapter: Send + 'static {
/// The adapter's serializer type.
type Ser: Serializer;
/// The adapter's deserializer type.
type De: Deserializer;
fn ref_consts(&self) -> &AdapterConsts<Self::Ser, Self::De>;
fn consts(&self) -> Arc<AdapterConsts<Self::Ser, Self::De>>;
fn interceptor(&self) -> Option<Arc<Interceptor>>;
}
impl<S, D> AbsAdapter for Adapter<S, D> where S: Serializer, D: Deserializer {}
impl<S, D> PrivAdapter for Adapter<S, D> where S: Serializer, D: Deserializer {
type Ser = S;
type De = D;
fn ref_consts(&self) -> &AdapterConsts<S, D> {
&self.inner.consts
}
fn consts(&self) -> Arc<AdapterConsts<S, D>> {
self.inner.consts.clone()
}
fn interceptor(&self) -> Option<Arc<Interceptor>> {
self.inner.interceptor.clone()
}
}
impl<S, D> AbsAdapter for Adapter_<S, D> where S: Serializer, D: Deserializer {}
impl<S, D> PrivAdapter for Adapter_<S, D> where S: Serializer, D: Deserializer {
type Ser = S;
type De = D;
fn ref_consts(&self) -> &AdapterConsts<S, D> {
&self.consts
}
fn consts(&self) -> Arc<AdapterConsts<S, D>> {
self.consts.clone()
}
fn interceptor(&self) -> Option<Arc<Interceptor>> {
self.interceptor.clone()
}
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/error.rs | src/error.rs | //! Assorted error types and helper functions used by this crate.
/// Error type from the `hyper` crate.
///
/// Associated with errors from connection issues or I/O issues with sockets.
pub use hyper::Error as HyperError;
/// Error type from the `url` crate.
///
/// Associated with errors with URL string parsing or concatenation.
pub use hyper::error::ParseError as UrlError;
/// Error type from the `multipart` crate.
///
/// Associated with errors writing out `multipart/form-data` requests.
pub type MultipartError = ::multipart::client::lazy::LazyIoError<'static>;
use net::request::RequestHead;
use serialize::none::NoSerializeError;
use std::io::Error as IoError;
use std::error::Error as StdError;
use std::fmt;
quick_error! {
/// The error type for this crate.
///
/// Can be converted from basically any error returned by any crate used here.
#[derive(Debug)]
pub enum Error {
/// Error type from the `hyper` crate.
///
/// Associated with errors from connection issues or I/O issues with sockets.
Hyper(e: HyperError) {
from()
cause(e)
description(e.description())
}
/// Error type from the `url` crate.
///
/// Associated with errors with URL string parsing or concatenation.
Url(e: UrlError) {
from()
cause(e)
description(e.description())
}
/// Errors that occur during serialization.
Serialize(e: Box<StdError + Send + 'static>) {
cause(&**e)
description(e.description())
}
/// Errors that occur during deserialization.
Deserialize(e: Box<StdError + Send + 'static>) {
cause(&**e)
description(e.description())
}
/// The `std::io::Error` type.
///
/// Associated with miscellaneous errors dealing with I/O streams.
StdIo(e: IoError){
from()
cause(e)
description(e.description())
}
/// Error type from the `multipart` crate.
///
/// Associated with errors writing out `multipart/form-data` requests.
Multipart(e: MultipartError) {
from()
cause(e)
description(e.description())
}
/// Returned when a service method requests (de)serialization, but no (de)serializer was provided.
///
/// Check the error description for which.
NoSerialize(e: NoSerializeError) {
from()
cause(e)
description(e.description())
}
/// The miscellaneous error type, can be anything.
Other(e: Box<StdError + Send + 'static>){
from()
cause(&**e)
description(e.description())
}
/// Error returned when a panic occurred while completing a request.
///
/// The request head is provided for inspection.
Panic(e: RequestPanicked) {
from()
cause(e)
description(e.description())
}
/// A `Request` callback (`on_complete()` or `on_request()`) panicked.
UnknownPanic {
from(::futures::Canceled)
description("A panic occurred during a callback assigned to a request.")
}
/// Returned by methods on `Call` if the result was already taken.
ResultTaken {
description("The result has already been taken from this Call.")
}
}
}
impl Error {
/// Map the result, boxing and wrapping the error as `Error::Serialize`
pub fn map_serialize<T, E: StdError + Send + 'static>(res: Result<T, E>) -> Result<T, Self> {
res.map_err(|e| Error::Serialize(Box::new(e)))
}
/// Map the result, boxing and wrapping the error as `Error::Deserialize`
pub fn map_deserialize<T, E: StdError + Send + 'static>(res: Result<T, E>) -> Result<T, Self> {
res.map_err(|e| Error::Deserialize(Box::new(e)))
}
/// Create a value of `Error::Deserialize`
pub fn deserialize<E: Into<Box<StdError + Send + Sync + 'static>>>(err: E) -> Self {
Error::Deserialize(err.into())
}
}
/// Flatten a `Result` of a `Result` where the outer's error type is convertible to `anterofit::Result`.
pub fn flatten_res<T, E>(res: Result<Result<T, Error>, E>) -> Result<T, Error> where Error: From<E> {
try!(res)
}
/// Error returned when a panic occurred while completing a request.
///
/// The request head is provided for inspection.
#[derive(Debug)]
pub struct RequestPanicked(pub RequestHead);
impl fmt::Display for RequestPanicked {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Panic while executing request: \"{}\"", self.0)
}
}
impl StdError for RequestPanicked {
fn description(&self) -> &str {
"A panic occurred while executing a request."
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/mime.rs | src/mime.rs | //! Shorthands for various MIME types.
pub use mime_::Mime;
/// `application/octet-stream`
pub fn octet_stream() -> Mime {
mime!(Application/OctetStream)
}
/// `application/json`
pub fn json() -> Mime {
mime!(Application/Json)
}
/// `application/www-form-urlencoded`
pub fn form_urlencoded() -> Mime {
mime!(Application/WwwFormUrlEncoded)
}
/// `multipart/form-data; boundary={boundary}`
pub fn formdata(boundary: &str) -> Mime {
mime!(Multipart/FormData; ("boundary")=(boundary))
}
/// `text/plain; charset=utf8`
pub fn text_plain_utf8() -> Mime {
mime!(Text/Plain; Charset=Utf8)
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/serialize/none.rs | src/serialize/none.rs | //! No-op serializers which return errors when invoked.
use std::io::{Read, Write};
use super::{Serializer, Deserializer, Serialize, Deserialize};
use mime::Mime;
use ::Result;
/// A no-op serializer which returns an error when attempting to use it.
#[derive(Debug)]
pub struct NoSerializer;
impl Serializer for NoSerializer {
fn serialize<T: Serialize, W: Write>(&self, _: &T, _: &mut W) -> Result<()> {
Err(NoSerializeError::Serialize.into())
}
fn content_type(&self) -> Option<Mime> {
None
}
}
/// A no-op deserializer which returns an error when attempting to use it.
#[derive(Debug)]
pub struct NoDeserializer;
impl Deserializer for NoDeserializer {
fn deserialize<T: Deserialize, R: Read>(&self, _: &mut R) -> Result<T> {
Err(NoSerializeError::Deserialize.into())
}
}
quick_error! {
/// Error returned by `NoSerializer` and `NoDeserializer`
#[derive(Debug)]
pub enum NoSerializeError {
/// "A request method requested serialization, but no serializer was provided"
Serialize {
description("A request method requested serialization, but no serializer was provided")
}
/// "A request method requested deserialization, but no deserializer was provided"
Deserialize {
description("A request method requested deserialization, but no deserializer was provided")
}
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/serialize/json.rs | src/serialize/json.rs | //! Integration with the `serde_json` crate providing JSON serialization.
extern crate serde_json;
use mime::{self, Mime};
use std::io::{Read, Write};
use super::{Serialize, Deserialize};
use serialize;
use ::{Error, Result};
/// Serializer for JSON request bodies with compact output.
#[derive(Clone, Debug, Default)]
pub struct Serializer;
impl serialize::Serializer for Serializer {
fn serialize<T: Serialize, W: Write>(&self, val: &T, write: &mut W) -> Result<()> {
Error::map_serialize(self::serde_json::to_writer(write, val))
}
/// Returns `application/json`.
fn content_type(&self) -> Option<Mime> {
Some(mime::json())
}
}
/// Serializer for JSON request bodies which pretty-prints its output.
#[derive(Clone, Debug, Default)]
pub struct PrettySerializer;
impl serialize::Serializer for PrettySerializer {
fn serialize<T: Serialize, W: Write>(&self, val: &T, write: &mut W) -> Result<()> {
Error::map_serialize(self::serde_json::to_writer_pretty(write, val))
}
fn content_type(&self) -> Option<Mime> {
Some(mime::json())
}
}
/// Deserializer for pulling values from JSON response bodies.
#[derive(Clone, Debug, Default)]
pub struct Deserializer;
impl serialize::Deserializer for Deserializer {
fn deserialize<T: Deserialize, R: Read>(&self, read: &mut R) -> Result<T> {
Error::map_deserialize(self::serde_json::from_reader(read))
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/serialize/xml.rs | src/serialize/xml.rs | //! Integration with the `serde_xml` crate providing XML serialization.
//!
//! ##Note
//! As of November 2016, only deserialization is supported by `serde_xml`.
extern crate serde_xml;
use std::io::Read;
use super::Deserialize;
use serialize;
use ::{Error, Result};
/// Deserializer for pulling values from XML responses.
#[derive(Clone, Debug, Default)]
pub struct Deserializer;
impl serialize::Deserializer for Deserializer {
fn deserialize<T: Deserialize, R: Read>(&self, read: &mut R) -> Result<T> {
Error::map_deserialize(self::serde_xml::de::from_iter(read.bytes()))
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/serialize/mod.rs | src/serialize/mod.rs | //! Types used to serialize and deserialize request and response bodies, respectively.
//!
//! ## Note
//! If you get an error about duplicate types or items in this module, make sure you don't have both
//! the `rustc-serialize` and `serde` features enabled.
use mime::Mime;
use std::fmt;
use std::io::{Read, Write};
pub mod none;
#[cfg(feature = "serde_json")]
pub mod json;
#[cfg(feature = "serde_xml")]
pub mod xml;
pub use serde::Serialize;
pub use serde::de::DeserializeOwned as Deserialize;
use serde::de::IntoDeserializer;
use serde::ser::SerializeMap;
/// A trait describing types which can concurrently serialize other types into byte-streams.
pub trait Serializer: Send + Sync + 'static {
/// Serialize `T` to `write`, returning any errors.
fn serialize<T: Serialize, W: Write>(&self, val: &T, write: &mut W) -> ::Result<()>;
/// Return the MIME type of the serialized content, if applicable.
///
/// Used to set the `Content-Type` header of the request this serializer
/// is being used for.
fn content_type(&self) -> Option<Mime>;
}
/// A trait describing types which can concurrently deserialize other types from byte-streams.
pub trait Deserializer: Send + Sync + 'static {
/// Deserialize `T` from `read`, returning the result.
fn deserialize<T: Deserialize, R: Read>(&self, read: &mut R) -> ::Result<T>;
}
/// A deserializer which attempts to parse values from the response as a string.
pub struct FromStrDeserializer;
/// A simple series of key-value pairs that can be serialized as a map.
///
/// Nothing will be done with duplicate keys.
#[derive(Clone)]
pub struct PairMap<K, V> {
pairs: Vec<(K, V)>,
}
impl<K, V> PairMap<K, V> {
/// Create an empty series.
pub fn new() -> Self {
PairMap {
pairs: Vec::new()
}
}
/// Add a key-value pair to the end of this series.
pub fn insert(&mut self, key: K, val: V) {
self.pairs.push((key, val));
}
/// Get the current series of pairs as a slice.
pub fn pairs(&self) -> &[(K, V)] {
&self.pairs
}
/// Get the current series of pairs as a mutable reference to a vector.
pub fn pairs_mut(&mut self) -> &mut Vec<(K, V)> {
&mut self.pairs
}
/// Take the key-value pair series as a vector of 2-tuples.
pub fn into_pairs(self) -> Vec<(K, V)> {
self.pairs
}
}
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for PairMap<K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_map();
for &(ref key, ref val) in &self.pairs {
debug.entry(key, val);
}
debug.finish()
}
}
use serde::de::Error;
use std::error::Error as StdError;
use std::fmt::Display;
/// JSON only allows string keys, so all keys are converted to strings.
impl<K: Display, V: Serialize> Serialize for PairMap<K, V> {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> where S: ::serde::Serializer,
S::SerializeMap: SerializeMap {
use std::fmt::Write;
let pairs = self.pairs();
let mut map_s = s.serialize_map(Some(pairs.len()))?;
let mut key_buf = String::new();
for &(ref key, ref val) in pairs {
key_buf.clear();
write!(key_buf, "{}", key).expect("Error formatting key");
map_s.serialize_entry(&key_buf, val)?;
}
map_s.end()
}
}
impl Error for ::Error {
fn custom<T: Display>(msg: T) -> Self {
let error: Box<StdError + Send + Sync> = msg.to_string().into();
::Error::Deserialize(error)
}
}
impl Deserializer for FromStrDeserializer {
fn deserialize<T: Deserialize, R: Read>(&self, read: &mut R) -> ::Result<T> {
let mut string = String::new();
let string = read.read_to_string(&mut string)?;
T::deserialize(string.into_deserializer())
}
}
#[test]
fn pair_map_is_serialize() {
use std::io;
let pair_map: PairMap<String, String> = PairMap::new();
let _ = none::NoSerializer.serialize(&pair_map, &mut io::sink());
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/macros/mod.rs | src/macros/mod.rs | //! Macros for Anterofit.
#[macro_use]
mod request;
/// Define a service trait whose methods make HTTP requests.
///
/// ##Example
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// pub type ApiToken = String;
///
/// service! {
/// pub trait MyService {
/// /// Get the version of this API.
/// fn api_version(&self) -> String {
/// GET("/version")
/// }
///
/// /// Register a new user with the API.
/// fn register(&self, username: &str, password: &str) {
/// POST("/register");
/// fields! {
/// username, password
/// }
/// }
///
/// /// Login an existing user with the API, returning the API token.
/// fn login(&self, username: &str, password: &str) -> ApiToken {
/// POST("/login");
/// fields! {
/// username, password
/// }
/// }
/// }
/// }
/// ```
///
/// ##Generics and `where` clauses
/// Both of these are supported; however, the Rust grammar must be changed slightly
/// so that they can be parsed and transformed properly by the `service!{}` macro without
/// making its implementation details too complex.
///
/// Put simply, use `[]` instead of `<>` to wrap your generic declarations,
/// and wrap your entire `where` clause, if present, with `[]`:
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// pub type ApiToken = String;
///
/// service! {
/// pub trait MyService {
/// /// Register a new user with the API.
/// fn register[U: ToString, P: ToString](&self, username: U, password: P) {
/// POST("/register");
/// fields! {
/// username, password
/// }
/// }
///
/// /// Login an existing user with the API.
/// fn login[U, P](&self, username: U, password: P) -> ApiToken
/// [where U: ToString, P: ToString] {
/// POST("/login");
/// fields! {
/// username, password
/// }
/// }
/// }
/// }
/// ```
///
/// ##Delegates
/// By default, every service trait declared with `service!{}` has a blanket-impl for
/// `T: anterofit::AbsAdapter`, which makes it most useful for the default use-case, where you're
/// using Anterofit to make HTTP requests within your application.
///
/// However, if you want to use Anterofit to create a library wrapping some REST API, such as [Github's](https://developer.github.com/v3/),
/// this blanket impl is not so useful as you will probably want to create your own wrapper for Anterofit's
/// adapter that always uses the correct base URL, serializer/deserializer, adds auth tokens, etc.
///
/// In this case, you can declare one or more delegate impls which will be used instead of the default
/// blanket impl; the only requirement of these delegate impl declarations is that they provide an
/// accessor for an underlying `AbsAdapter` implementation (which is only required to be
/// visible to the declaring module, allowing an opaque abstraction while using service traits
/// in a public API). The accessor is an expression that resolves to an `FnOnce()` closure
/// which is passed the `self` parameter, and is expected to return `&T` where `T: AbsAdapter`.
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # // This mess of cfg's is required to make sure this is a no-op when the `serde` feature is enabled.
/// # #[cfg(feature = "rustc-serialize")]
/// extern crate rustc_serialize;
/// # fn main() {}
/// # #[cfg(feature = "rustc-serialize")]
/// # mod only_rustc_serialize {
///
/// use anterofit::{Adapter, JsonAdapter, Url};
///
/// pub struct DelegateAdapter {
/// // Notice that this field remains private but due to visibility rules,
/// // the impls of `DelegatedService` still get to access it.
/// // This allows you to hide the adapter as an implementation detail.
/// inner: JsonAdapter
/// }
///
/// impl DelegateAdapter {
/// pub fn new() -> Self {
/// let adapter = Adapter::builder()
/// .serialize_json()
/// .base_url(Url::parse("https://myservice.com").unwrap())
/// .build();
///
/// DelegateAdapter {
/// inner: adapter,
/// }
/// }
/// }
///
/// // If using the `serde` feature, you would use `#[derive(Deserialize)]` instead
/// // and `extern crate serde;` at the crate root.
/// #[derive(RustcDecodable)]
/// pub struct Record {
/// pub id: u64,
/// pub title: String,
/// pub body: String,
/// }
///
/// service! {
/// pub trait DelegatedService {
/// /// Create a new record, returning the record ID.
/// fn create_record(&self, title: &str, body: &str) -> u64 {
/// POST("/record");
/// fields! { title, body }
/// }
///
/// /// Get an existing record by ID.
/// fn get_record(&self, record_id: u64) -> Record {
/// GET("/record/{}", record_id)
/// }
/// }
///
/// // This generates `impl DelegatedService for DelegateAdapter {}`
/// impl for DelegateAdapter {
/// // Closure parameter is just `&self` from the service method body.
/// |this| &this.inner
/// }
///
/// // Generics and `where` clauses are allowed in their usual positions, however `[]` is
/// // required in the same places as mentioned under the previous header.
/// impl[T] for T [where T: AsRef<DelegateAdapter>] {
/// |this| &this.as_ref().inner
/// }
///
/// // As shown here, multiple declarations are allowed as well.
/// }
/// # }
/// ```
#[cfg(not(feature = "service-attr"))]
#[macro_export]
macro_rules! service {
(
$(#[$meta:meta])*
trait $servicenm:ident {
$($guts:tt)*
}
) => (
service! {
$(#[$meta])*
trait $servicenm {
$($guts)*
}
impl[T: $crate::AbsAdapter] for T {
|this| this
}
}
);
(
$(#[$meta:meta])*
pub trait $servicenm:ident {
$($guts:tt)*
}
) => (
service! {
$(#[$meta])*
pub trait $servicenm {
$($guts)*
}
impl[T: $crate::AbsAdapter] for T {
|this| this
}
}
);
(
$(#[$meta:meta])*
trait $servicenm:ident {
$($guts:tt)*
}
$($delegates:tt)+
) => (
$(#[$meta])*
trait $servicenm {
method_proto!($($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($delegates)+);
);
(
$(#[$meta:meta])*
pub trait $servicenm:ident {
$($guts:tt)*
}
$($delegates:tt)+
) => (
$(#[$meta])*
pub trait $servicenm {
method_proto!($($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($delegates)+);
);
}
/// Create an implementation of `UnsizeService` for the given service trait.
///
/// Note that this only works with object-safe traits, and does *not* work with
/// traits that have delegate impls. Can be invoked with more than one name at a time.
#[macro_export]
macro_rules! unsizeable(
($($servicenm:ty),+) => (
$(impl ::anterofit::UnsizeService for $servicenm {
fn from_adapter<A>(adpt: ::std::sync::Arc<A>) -> ::std::sync::Arc<Self>
where A: ::anterofit::AbsAdapter {
adpt
}
})*
)
);
#[doc(hidden)]
#[macro_export]
macro_rules! method_proto(
// Plain declaration
(
$(#[$fnmeta:meta])*
fn $fnname:ident (&self $($args:tt)*) $(-> $ret:ty)* {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname (&self $($args)*) -> $crate::net::Request<$($ret)*>;
method_proto!($($rem)*);
);
// Generics
(
$(#[$fnmeta:meta])*
fn $fnname:ident [$($generics:tt)+] (&self $($args:tt)*) $(-> $ret:ty)* {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname <$($generics)+> (&self $($args)*) -> $crate::net::Request<$($ret)*>;
method_proto!($($rem)*);
);
// Where clause
(
$(#[$fnmeta:meta])*
fn $fnname:ident (&self $($args:tt)*) $(-> $ret:ty)* [where $($wheres:tt)+] {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname (&self $($args)*) -> $crate::net::Request<$($ret)*> where $($wheres)+ ;
method_proto!($($rem)*);
);
// Generics + where clause
(
$(#[$fnmeta:meta])*
fn $fnname:ident [$($generics:tt)+] (&self $($args:tt)*) $(-> $ret:ty)* [where $($wheres:tt)+] {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname <$($generics)+> (&self $($args)*) -> $crate::net::Request<$($ret)*> where $($wheres)+;
method_proto!($($rem)*);
);
// Empty end case for recursion
() => ();
);
#[doc(hidden)]
#[macro_export]
macro_rules! method_impl(
// Plain declaration
(
$getadapt:expr;
$(#[$fnmeta:meta])*
fn $fnname:ident (&self $($args:tt)*) $(-> $ret:ty)* {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname (&self $($args)*) -> $crate::net::Request<$($ret)*> {
request_impl! {
$crate::get_adapter(self, $getadapt); $($body)+
}
}
method_impl!($getadapt; $($rem)*);
);
// Generics
(
$getadapt:expr;
$(#[$fnmeta:meta])*
fn $fnname:ident [$($generics:tt)+] (&self $($args:tt)*) $(-> $ret:ty)* {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname <$($generics)+> (&self $($args)*) -> $crate::net::Request<$($ret)*> {
request_impl! {
$crate::get_adapter(self, $getadapt); $($body)+
}
}
method_impl!($getadapt; $($rem)*);
);
// Where clause
(
$getadapt:expr;
$(#[$fnmeta:meta])*
fn $fnname:ident (&self $($args:tt)*) $(-> $ret:ty)* [where $($wheres:tt)+] {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname (&self $($args)*) -> $crate::net::Request<$($ret)*> where $($wheres)+ {
request_impl! {
$crate::get_adapter(self, $getadapt); $($body)+
}
}
method_impl!($getadapt; $($rem)*);
);
// Generics + Where clause
(
$getadapt:expr;
$(#[$fnmeta:meta])*
fn $fnname:ident [$($generics:tt)+] (&self $($args:tt)*) $(-> $ret:ty)* [where $($wheres:tt)+] {
$($body:tt)+
}
$($rem:tt)*
) => (
$(#[$fnmeta])*
fn $fnname <$($generics)+> (&self $($args)*) -> $crate::net::Request<$($ret)*> where $($wheres)+ {
request_impl! {
$crate::get_adapter(self, $getadapt); $($body)+
}
}
method_impl!($getadapt; $($rem)*);
);
// Empty end-case for recursion
($_getadapt:expr; ) => ();
);
#[doc(hidden)]
#[macro_export]
macro_rules! delegate_impl {
(
$servicenm:ident; [$($guts:tt)*]
impl for $delegate:ty {
$getadapt:expr
}
$($rem:tt)*
) => (
impl $servicenm for $delegate {
method_impl!($getadapt; $($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($rem)*);
);
(
$servicenm:ident; [$($guts:tt)*]
impl [$($decls:tt)*] for $delegate:ty {
$getadapt:expr
}
$($rem:tt)*
) => (
impl<$($decls)*> $servicenm for $delegate {
method_impl!($getadapt; $($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($rem)*);
);
(
$servicenm:ident; [$($guts:tt)*]
impl for $delegate:ty [where $($wheres:tt)+]{
$getadapt:expr
}
$($rem:tt)*
) => (
impl $servicenm for $delegate where $($wheres)+ {
method_impl!($getadapt; $($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($rem)*);
);
(
$servicenm:ident; [$($guts:tt)*]
impl [$($decls:tt)*] for $delegate:ty [where $($wheres:tt)+]{
$getadapt:expr
}
$($rem:tt)*
) => (
impl<$($decls)*> $servicenm for $delegate where $($wheres)+ {
method_impl!($getadapt; $($guts)*);
}
delegate_impl!($servicenm; [$($guts)*] $($rem)*);
);
// Empty end-case for recursion
($servicenm:ident; [$($guts:tt)*]) => ();
}
/// Create a meta-service trait which combines the listed service traits.
///
/// This can be used as a shorthand to combine several service traits in generics
/// or trait objects, and then upcast at-will:
///
/// ```no_run
/// #[macro_use] extern crate anterofit;
///
/// use anterofit::Adapter;
///
/// service! {
/// trait FooService {
/// fn foo(&self) -> String {
/// GET("/foo")
/// }
/// }
/// }
///
/// service! {
/// trait BarService {
/// fn bar(&self, param: &str) {
/// POST("/bar");
/// query! { "param" => param }
/// }
/// }
/// }
///
/// meta_service! { trait BazService: FooService + BarService }
///
/// fn use_baz<T: BazService>(service: &T) {
/// service.foo().exec_here().unwrap();
/// service.bar("Hello, world!").exec_here().unwrap();
/// }
///
/// fn obj_baz(service: &BazService) {
/// service.foo().exec_here().unwrap();
/// service.bar("Hello, world!").exec_here().unwrap();
/// }
///
/// # fn main() {
/// // Useless adapter, just for demonstration
/// let adapter = Adapter::builder().build();
///
/// use_baz(&adapter);
/// obj_baz(&adapter);
/// # }
/// ```
#[macro_export]
macro_rules! meta_service (
(trait $metanm:ident : $($superr:tt)+ ) => (
trait $metanm : $($superr)+ {}
impl<T: $($superr)+> $metanm for T {}
);
(pub trait $metanm:ident : $($superr:tt)+ ) => (
pub trait $metanm : $($superr)+ {}
impl<T: $($superr)+> $metanm for T {}
);
);
// No-op macro to silence errors in IDEs when using `#[service]`
#[cfg(feature = "service-attr")]
#[doc(hidden)]
#[macro_export]
macro_rules! delegate (
($($a:tt)*) => ()
);
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/macros/request.rs | src/macros/request.rs | /// A `try!()` macro replacement for service method bodies.
///
/// Instead of returning the error in a method returning `Result`,
/// this returns a `Request<T>` which will immediate return the error when it is executed;
/// no network or disk activity will occur.
#[macro_export]
#[doc(hidden)]
macro_rules! try_request (
($try:expr) => (
match $try {
Ok(val) => val,
Err(e) => return $crate::net::Request::immediate(Err(e.into())),
}
)
);
#[macro_export]
#[doc(hidden)]
macro_rules! url (
($urlstr:expr) => (
$urlstr
);
($urlstr:expr, $($fmt:tt)+) => (
format!($urlstr, $($fmt)+)
);
);
#[macro_export]
#[doc(hidden)]
macro_rules! request_impl {
($adapter:expr; $method:ident($($urlpart:tt)+) $(; $buildexpr:expr)*) => ({
use $crate::net::RequestBuilder;
let builder = RequestBuilder::new(
$adapter, http_verb!($method), url!($($urlpart)+).into()
);
$(
let builder = try_request!(builder.apply($buildexpr));
)*
builder.build()
})
}
/// Allows the inside expression to set a body on a request which doesn't regularly take one.
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
///
/// service! {
/// pub trait ForcedBodyService {
/// fn get_with_fields(&self) -> String {
/// // `GET` requests are not regularly allowed to take bodies;
/// // this is checked at compile time.
/// GET("/foo");
/// force_body! {
/// fields! {
/// "Hello" => "World"
/// }
/// }
/// }
/// }
/// }
/// ```
#[macro_export]
macro_rules! force_body {
($expr:expr) => (
move | builder | {
let (builder, method) = builder.swap_method(::anterofit::net::method::ForceBody);
builder.apply($expr).map(move |builder| builder.swap_method(method).0)
}
)
}
/// Serialize the given value as the request body.
///
/// Serialization will be performed on the adapter's executor, using the adapter's serializer,
/// when the request is submitted.
///
/// If the value is intended to be read directly as the request body, wrap it with `RawBody`.
///
/// By default, serialization will be done on the adapter's executor,
/// so the body type must be `Send + 'static`.
///
/// If you want to serialize borrowed values or other types which cannot be sent to other threads,
/// use the `EAGER:` contextual keyword, which will cause the body to be immediately serialized
/// on the current thread.
///
/// ## Overwrites Body
/// Setting a new body will overwrite any previous body on the request.
///
/// ## Disallowed verbs: `GET, DELETE`
/// `GET` and `DELETE` requests are generally not expected to have bodies. As an
/// anti-footgun, Anterofit does not allow bodies on these requests by default.
///
/// Wrap this invocation in `force_body!()` if you want to set a body anyways.
///
/// See [`net::method::TakesBody`](net/method/trait.TakesBody.html) for more details.
#[macro_export]
macro_rules! body (
($body:expr) => (
move | builder | Ok(builder.body($body))
);
(EAGER: $body:expr) => (
move | builder | builder.body_eager($body)
)
);
/// Serialize a series of key-value pairs as the request body.
///
/// The series will be serialized as if it were a map, like `HashMap` or `BTreeMap`
/// but with much less restrictive trait bounds: keys must be `Display` and values
/// must be `Serialize`; thus, keys will not be deduplicated
/// or appear in any different order than provided.
///
/// By default, serialization will be done on the adapter's executor,
/// so the key and value types must be `Send + 'static`.
///
/// If you want to serialize borrowed values or other types which cannot be sent to other threads,
/// use the `EAGER:` contextual keyword, which will cause the map to be immediately serialized on
/// the current thread.
///
/// ## Overwrites Body
/// Setting a new body will overwrite any previous body on the request.
///
/// ## Disallowed verbs: `GET, DELETE`
/// `GET` and `DELETE` requests are generally not expected to have bodies. As an
/// anti-footgun, Anterofit does not allow bodies on these requests by default.
///
/// Wrap this invocation in `force_body!()` if you want to set a body anyways.
///
/// See [`net::method::TakesBody`](net/method/trait.TakesBody.html) for more details.
#[macro_export]
macro_rules! body_map {
($($key:expr => $val:expr),+) => ({
let mut pairs = $crate::serialize::PairMap::new();
$(
pairs.insert($key, $val);
)+;
move |builder| Ok(builder.body(pairs))
});
(EAGER: $($key:expr => $val:expr),+) => ({
let mut pairs = $crate::serialize::PairMap::new();
$(
pairs.insert($key, $val);
)+;
move |builder| builder.body_eager(pairs)
});
}
/// Serialize a series of fields as the request body (form-encode them).
///
/// Each field can be a key-value pair, or a single identifier. The key (field name) should be a
/// string literal, and the value can be anything that is `Display`.
///
/// For a single identifier, the identifier will be stringified for the field name, and its
/// value will become the field value:
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// service! {
/// pub trait RegisterService {
/// fn register(&self, username: &str, password: &str) {
/// POST("/register");
/// fields! {
/// "username" => username,
/// // Equivalent to "password" => password
/// password
/// }
/// }
/// }
/// }
/// ```
///
/// By default, this will serialize to a `www-form-urlencoded` body.
///
/// However, if you use the `path!()` or `stream!()` macros as a value expression,
/// it will transform the request to a `multipart/form-data` request.
///
/// ```rust,no_run
/// # #[macro_use] extern crate anterofit;
/// # fn main() {::anterofit::Adapter::builder().build().upload_file("file.txt".as_ref()).exec_here().unwrap()}
/// use std::path::Path;
///
/// service! {
/// pub trait UploadService {
/// fn upload_file(&self, file: &Path) {
/// POST("/upload");
/// fields! {
/// "file" => path!(file),
/// }
/// }
/// }
/// }
/// ```
///
/// In some server stacks (e.g. PHP), these would be called `POST` parameters.
///
/// ## Overwrites Body
/// Setting a new body will overwrite any previous body on the request.
///
/// ## Disallowed verbs: `GET, DELETE`
/// `GET` and `DELETE` requests are generally not expected to have bodies. As an
/// anti-footgun, Anterofit does not allow bodies on these requests by default.
///
/// Wrap this invocation in `force_body!()` if you want to set a body anyways.
///
/// See [`net::method::TakesBody`](net/method/trait.TakesBody.html) for more details.
#[macro_export]
macro_rules! fields {
($($key:expr $(=> $val:expr)*),*) => ({
use $crate::net::body::EmptyFields;
let fields = EmptyFields;
$(
let fields = (field!($key, $($val)*)) (fields);
)*;
move |builder| Ok(builder.body(fields))
});
($($key:expr $(=> $val:expr)*),*,) => ({
use $crate::net::body::EmptyFields;
let fields = EmptyFields;
$(
let fields = (field!($key, $($val)*)) (fields);
)*;
move |builder| Ok(builder.body(fields))
});
}
#[doc(hidden)]
#[macro_export]
macro_rules! field {
($key:expr, $val:expr) => (
move |fields| $crate::net::body::AddField::add_to($val, $key, fields)
);
($keyval:expr, ) => (
move |fields| $crate::net::body::AddField::add_to($keyval, stringify!($keyval), fields)
)
}
/// A field value for anything that is `Read + Send + 'static`.
///
/// Adding a stream field to the request will turn it into a `multipart/form-data` request
/// and treat it as a file field.
///
/// If given, the `filename` and `content_type` keys will be supplied with the request.
/// `filename` can be a borrowed or owned string, and `content_type` should be a `Mime`
/// value from the `mime` crate.
#[macro_export]
macro_rules! stream (
($stream:expr) => (
$crate::net::body::FileField::from_stream($stream, None, None)
);
($stream:expr, filename: $filename:expr) => (
$crate::net::body::FileField::from_stream($stream, Some($filename), None)
);
($stream:expr, content_type: $conttype:expr) => (
$crate::net::body::FileField::from_stream($stream, None, Some($conttype))
);
($stream:expr, filename: $filename:expr, content_type: $conttype:expr) => (
$crate::net::body::FileField::from_stream($stream, Some($filename), Some($conttype))
);
);
/// A field value that resolves to a path on the filesystem.
///
/// The value can be anything that implements `Into<PathBuf>`, such as `&Path` or `&str`.
///
/// This will make the request into a `multipart/form-data` request and upload the file
/// that this path points to.
///
/// The filename and `Content-Type` header to be supplied with the field will be inferred from
/// the file name and extension, respectively.
///
/// To supply these values yourself, and/or your own opened file handle, see the `stream!()` macro.
#[macro_export]
macro_rules! path (
($path:expr) => (
$crate::net::body::FileField::from_path($path)
)
);
/// Append a series of query pairs to the URL of the request.
///
/// `$key` and `$val` can be anything that is `Display`; neither `Send` nor `'static` is required.
///
/// Can be invoked multiple times. Duplicate keys are left to be handled by the server.
///
/// In some server stacks (e.g. PHP), these would be called `GET` parameters.
#[macro_export]
macro_rules! query {
($($key:expr => $val:expr),+) => (
|mut builder| {
builder.head_mut().query(&[
$((&$key as &::std::fmt::Display, &$val as &::std::fmt::Display)),+
]);
Ok(builder)
}
)
}
/// Use in a service method body to perform an arbitrary transformation on the builder.
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// use anterofit::RawBody;
///
/// service! {
/// trait MyService {
/// fn send_whatever(&self) {
/// POST("/whatever");
/// // `move` and `mut` are allowed in their expected positions as well
/// map_builder!(|builder| builder.body(RawBody::text("Hello, world!")))
/// }
/// }
/// }
/// ```
///
/// You can even use `try!()` as long as the error type is convertible to `anterofit::Error`:
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// use anterofit::RawBody;
/// use std::fs::File;
///
/// service! {
/// trait MyService {
/// fn put_log_file(&self) {
/// PUT("/log");
/// map_builder!(|builder| {
/// let logfile = try!(File::open("/etc/log"));
/// builder.body(RawBody::new(logfile, None))
/// })
/// }
/// }
/// }
/// ```
///
/// If you just want to return a `Result<RequestBuilder>`, use a bare closure in your service
/// method body.
#[macro_export]
macro_rules! map_builder {
(|$builder:ident| $expr:expr) => (
|$builder| Ok($expr)
);
(move |$builder:ident| $expr:expr) => (
move |$builder| Ok($expr)
);
(|mut $builder:ident| $expr:expr) => (
|mut $builder| Ok($expr)
);
(move |mut $builder:ident| $expr:expr) => (
move |mut $builder| Ok($expr)
);
}
/// Use in a service body to access the builder without consuming it.
///
/// The expression can resolve to anything, as the result is silently discarded.
///
/// ```rust
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// service! {
/// trait MyService {
/// fn get_whatever(&self) {
/// GET("/whatever");
/// with_builder!(|builder| println!("Request: {:?}", builder.head()))
/// }
/// }
/// }
/// ```
///
/// You can even use `try!()` as long as the error type is convertible to `anterofit::Error`:
///
/// ```rust,no_run
/// # #[macro_use] extern crate anterofit;
/// # fn main() {}
/// use std::fs::OpenOptions;
/// // Required for `write!()`
/// use std::io::Write;
///
/// service! {
/// trait MyService {
/// fn get_whatever(&self) {
/// GET("/whatever");
/// with_builder!(|builder| {
/// let mut logfile = try!(OpenOptions::new()
/// .append(true).create(true).open("/etc/log"));
/// try!(write!(logfile, "Request: {:?}", builder.head()));
/// })
/// }
/// }
/// }
/// ```
///
/// (In practice, logging requests should probably be done in an `Interceptor` instead;
/// this is merely an example demonstrating a plausible use-case.)
#[macro_export]
macro_rules! with_builder {
(|$builder:ident| $expr:expr) => (
|$builder| {
let _ = $expr;
Ok($builder)
}
);
(move |$builder:ident| $expr:expr) => (
move |$builder {
let _ = $expr;
Ok($builder)
}
);
(|mut $builder:ident| $expr:expr) => (
|mut $builder| {
let _ = $expr;
Ok($builder)
}
);
(move |mut $builder:ident| $expr:expr) => (
|mut $builder| {
let _ = $expr;
Ok($builder)
}
);
}
#[doc(hidden)]
#[macro_export]
macro_rules! http_verb {
(GET) => ($crate::net::method::Get);
(POST) => ($crate::net::method::Post);
(PUT) => ($crate::net::method::Put);
(PATCH) => ($crate::net::method::Patch);
(DELETE) => ($crate::net::method::Delete);
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/call.rs | src/net/call.rs | use futures::{Future, Canceled, Complete, Oneshot, Async, Poll};
use futures::executor::{self, Unpark, Spawn};
use ::{Result, Error};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use error::RequestPanicked;
use super::request::RequestHead;
/// A handle representing a pending result to an executed request.
///
/// May be polled for its status (compatible with `futures`) or blocked on.
///
/// Depending on the stage of the request, this may return immediately.
#[must_use = "Result of request is unknown unless polled for"]
//#[derive(Debug)]
pub struct Call<T> {
state: CallState<T>,
notify: Arc<Notify>,
}
enum CallState<T> {
Waiting(CallFuture<T>),
Immediate(Result<T>),
Taken
}
type CallFuture<T> = Spawn<Oneshot<Result<T>>>;
impl<T> Call<T> {
/// Ignore the result of this call.
///
/// Equivalent to `let _ = self` but more friendly for method-chaining.
pub fn ignore(self) {}
/// Ignore the result of this call, returning `Ok(())` so it can be used
/// in a `try!()`.
pub fn ignore_ok(self) -> Result<()> { Ok(()) }
/// Block on this call until a result is available.
///
/// Depending on the stage of the request, this may return immediately.
/// Call `is_immediate()` check for this if you want.
pub fn block(self) -> Result<T> {
self.wait()
}
/// Poll this call for a result.
///
/// Convenience wrapper for `poll_no_task()` which doesn't use types from `futures`.
///
/// Returns `None` in two cases:
///
/// * The result is not ready yet
/// * The result has already been taken
pub fn check(&mut self) -> Option<Result<T>> {
match self.poll_no_task() {
Ok(Async::Ready(val)) => Some(Ok(val)),
Ok(Async::NotReady) | Err(Error::ResultTaken) => None,
Err(e) => Some(Err(e))
}
}
/// Return `true` if a result is available
/// (a call to `check()` will return the result).
pub fn is_available(&self) -> bool {
if let CallState::Immediate(_) = self.state {
true
} else {
self.notify.check()
}
}
/// Returns `true` if the result has already been taken.
pub fn result_taken(&self) -> bool {
if let CallState::Taken = self.state {
true
} else {
false
}
}
/// Poll the inner future without requiring a task.
///
/// You can call `is_available()` to check readiness.
pub fn poll_no_task(&mut self) -> Poll<T, Error> {
let notify = self.notify.clone();
self.poll_by(move |fut| fut.poll_future(notify))
}
fn poll_by<F>(&mut self, poll: F) -> Poll<T, Error>
where F: FnOnce(&mut CallFuture<T>) -> Poll<Result<T>, Canceled> {
match self.state {
CallState::Waiting(ref mut future) => return map_poll(poll(future)),
CallState::Taken => return Err(Error::ResultTaken),
_ => (),
}
if let CallState::Immediate(res) = mem::replace(&mut self.state, CallState::Taken) {
res.map(Async::Ready)
} else {
unreachable!();
}
}
}
impl<T> Future for Call<T> {
type Item = T;
type Error = Error;
/// ### Panics
/// If the current thread is not running a futures task.
///
/// Use `poll_no_task()` instead if you want to poll outside of a task.
fn poll(&mut self) -> Poll<T, Error> {
self.poll_by(|fut| fut.get_mut().poll())
}
}
#[derive(Default)]
struct Notify(AtomicBool);
impl Notify {
fn check(&self) -> bool {
self.0.load(Ordering::Relaxed)
}
}
impl Unpark for Notify {
fn unpark(&self) {
self.0.store(true, Ordering::Relaxed);
}
}
/// Implementation detail
pub fn oneshot<T>(head: Option<RequestHead>) -> (PanicGuard<T>, Call<T>) {
let (tx, rx) = ::futures::oneshot();
let guard = PanicGuard {
head: head,
tx: Some(tx)
};
(guard, Call {
state: CallState::Waiting(executor::spawn(rx)),
notify: Default::default(),
})
}
/// Implementation detail
pub fn immediate<T>(res: Result<T>) -> Call<T> {
Call {
state: CallState::Immediate(res),
notify: Default::default(),
}
}
/// Sends the request head on panic.
pub struct PanicGuard<T> {
head: Option<RequestHead>,
tx: Option<Complete<Result<T>>>,
}
impl<T> PanicGuard<T> {
/// Get a mutable reference to the request head.
pub fn head_mut(&mut self) -> &mut RequestHead {
self.head.as_mut().expect("PanicGuard::head was None")
}
/// Send a result, which will prevent the head being sent on-panic.
pub fn complete(&mut self, res: Result<T>) {
if let Some(tx) = self.tx.take() {
let _ = tx.send(res);
}
}
}
impl<T> Drop for PanicGuard<T> {
fn drop(&mut self) {
if let Some(head) = self.head.take() {
self.complete(Err(RequestPanicked(head).into()));
}
}
}
fn map_poll<T>(poll: Poll<Result<T>, Canceled>) -> Poll<T, Error> {
let ret = match try!(poll) {
Async::Ready(val) => Async::Ready(try!(val)),
Async::NotReady => Async::NotReady,
};
Ok(ret)
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/response.rs | src/net/response.rs | //! Types concerning the responses from REST calls.
pub use hyper::client::Response;
use std::io::{self, Read};
use serialize::{Deserialize, Deserializer};
use ::Result;
/// A trait describing types which can be converted from raw response bodies.
///
/// Implemented for `T: Deserialize + Send + 'static`.
///
/// Use `response::Raw` if you just want the response body, or `WithRaw` or `TryWithRaw`
/// if you want the response body and the deserialized value.
pub trait FromResponse: Send + Sized + 'static {
/// Deserialize or otherwise convert an instance of `Self` from `response`.
fn from_response<D>(des: &D, response: Response) -> Result<Self>
where D: Deserializer;
}
impl<T> FromResponse for T where T: Deserialize + Send + 'static {
fn from_response<D>(des: &D, mut response: Response) -> Result<Self>
where D: Deserializer {
des.deserialize(&mut response)
}
}
/// Wrapper for `hyper::client::Response`.
///
/// Use this as a service method return type when you want to just get the raw response body from
/// a REST call.
///
/// Implements `Read` and `Into<hyper::client::Response>`.
pub struct Raw(pub Response);
impl Into<Response> for Raw {
fn into(self) -> Response {
self.0
}
}
impl Read for Raw {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
}
impl FromResponse for Raw {
/// Simple wrapping operation; infallible.
fn from_response<D>(_des: &D, response: Response) -> Result<Self>
where D: Deserializer {
Ok(Raw(response))
}
}
/// Wrapper for the parsed response value along with the raw response.
///
/// Use this as a service method return type when you want to inspect the response
/// after the true return value has been deserialized.
pub struct WithRaw<T> {
/// The raw `hyper::client::Response` instance.
///
/// ### Note
/// The deserializer will likely have already read to the end of the HTTP stream. Use `Raw`
/// if you want to read the response yourself.
pub raw: Response,
/// The deserialized value.
pub value: T,
}
impl<T> FromResponse for WithRaw<T> where T: Deserialize + Send + 'static {
fn from_response<D>(des: &D, mut response: Response) -> Result<Self>
where D: Deserializer {
let val = try!(des.deserialize(&mut response));
Ok(WithRaw {
raw: response,
value: val
})
}
}
/// Wrapper for the deserialization result along with the raw response.
///
/// Use this as a service method return type if you want the raw response whether
/// or not deserialization of the true return type succeeded.
pub struct TryWithRaw<T> {
/// The raw `hyper::client::Response` instance.
///
/// ### Note
/// The deserializer will likely have already read to the end of the HTTP stream. Use `Raw`
/// if you want to read the response yourself.
pub raw: Response,
/// The result of attempting to deserialize the value.
pub result: Result<T>,
}
impl<T> FromResponse for TryWithRaw<T> where T: Deserialize + Send + 'static {
fn from_response<D>(des: &D, mut response: Response) -> Result<Self>
where D: Deserializer {
let res = des.deserialize(&mut response);
Ok(TryWithRaw {
raw: response,
result: res,
})
}
}
impl<T> Into<Result<T>> for TryWithRaw<T> {
fn into(self) -> Result<T> {
self.result
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/mod.rs | src/net/mod.rs | //! Anterofit's HTTP client framework, built on Hyper.
//!
//! Works standalone, but designed to be used with the `service!{}` macro.
pub use hyper::method::Method;
pub use hyper::header::Headers;
pub use hyper::header;
pub use self::intercept::{Interceptor, Chain};
pub use self::call::Call;
pub use self::request::{RequestHead, RequestBuilder, Request};
pub use self::response::{FromResponse, Raw as RawResponse};
pub mod body;
mod call;
pub mod intercept;
pub mod method;
pub mod request;
pub mod response; | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/method.rs | src/net/method.rs | //! Strongly typed HTTP methods and their traits
macro_rules! method (
($(#[$meta:meta])* pub struct $method:ident) => (
$(#[$meta])*
#[derive(Debug)]
pub struct $method;
impl Method for $method {
fn to_hyper(&self) -> ::hyper::method::Method {
::hyper::method::Method::$method
}
}
);
($($(#[$meta:meta])* pub struct $method:ident);+;) =>(
$(method!($(#[$meta])* pub struct $method);)+
)
);
macro_rules! takes_body (
($method:ident) => (
impl TakesBody for $method {}
);
($($method:ident),+) => (
$(takes_body!($method);)+
);
);
method! {
/// Method for `GET` requests.
///
/// ### Note: Body
/// While `GET` requests are not forbidden to have a body by the HTTP spec,
/// it is not meaningful to provide a body with a `GET` request and any endpoint
/// that expects a body with such a request is considered non-conformant.
pub struct Get;
/// Method for `POST` requests, can take a body.
pub struct Post;
/// Method for `PUT` requests, can take a body.
pub struct Put;
/// Method for `PATCH` requests, can take a body.
pub struct Patch;
/// Method for `DELETE` requests.
///
/// ### Note: Body
/// While `DELETE` requests are not forbidden to have a body by the HTTP spec,
/// it is not meaningful to provide a body with a `DELETE` request and any endpoint
/// that expects a body with such a request is considered non-conformant.
pub struct Delete;
}
#[doc(hidden)]
pub struct ForceBody;
impl Method for ForceBody {
fn to_hyper(&self) -> ::hyper::method::Method {
panic!("`ForceBody` is not an actual HTTP method; you must have caught a panic \
during a `force_body!()` invocation and continued outside.")
}
}
takes_body! { Post, Put, Patch, ForceBody }
/// The HTTP method of a request in Anterofit.
pub trait Method {
/// Convert to Hyper's `Method` enum.
fn to_hyper(&self) -> ::hyper::method::Method;
}
/// Trait describing an HTTP method which is allowed to have a body.
///
/// ### Motivation
/// Though not forbidden in the HTTP spec, `GET` and `DELETE` requests are generally not
/// meant to have bodies, and it is recommended for servers to ignore bodies on such
/// requests ([IETF RFC 2616 (HTTP 1.1 Spec), Section 4.3][rfc2616-4.3]).
///
/// Thus, this trait acts as a strongly typed anti-footgun for when you specified a
/// `GET` or `DELETE` request when you actually meant `POST` or `PUT` or `PATCH`.
///
/// If you must have a body on a `GET` or `DELETE` request, you can use the
/// `force_body!()` macro to override this protection.
///
/// [rfc2616-4.3]: https://tools.ietf.org/html/rfc2616#section-4.3
pub trait TakesBody {} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/body.rs | src/net/body.rs | //! Types that can be serialized to bodies of HTTP requests.
use serialize::{Serialize, Serializer};
use mime::{self, Mime};
use serialize::PairMap;
type Multipart = ::multipart::client::lazy::Multipart<'static, 'static>;
type PreparedFields = ::multipart::client::lazy::PreparedFields<'static>;
use url::form_urlencoded::Serializer as FormUrlEncoder;
use std::borrow::Borrow;
use std::fmt;
use std::io::{self, Cursor, Read};
use std::path::PathBuf;
use ::Result;
/// The result type for `Body::into_readable()`.
pub type ReadableResult<T> = Result<Readable<T>>;
/// The result of serializing the request body, ready to be sent over the network.
#[derive(Debug)]
pub struct Readable<R> {
/// The inner `Read` impl which will be copied into the request body.
pub readable: R,
/// The MIME type of the request body, if applicable.
pub content_type: Option<Mime>,
// Throwaway private field for backwards compatibility.
_private: (),
}
impl<R: Read> Readable<R> {
/// Create a new `Readable` wrapped in `::Result::Ok` for convenience.
pub fn new_ok<C: Into<Option<Mime>>>(readable: R, content_type: C) -> Result<Self> {
Ok(Self::new(readable, content_type))
}
/// Create a new `Readable` with the given `Read` and MIME type (can be an `Option` or a bare
/// `Mime` value).
pub fn new<C: Into<Option<Mime>>>(readable: R, content_type: C) -> Self {
Readable {
readable: readable,
content_type: content_type.into(),
_private: (),
}
}
}
/// A trait describing a type which can be serialized into a request body.
///
/// Implemented for `T: Serialize + Send + 'static`.
pub trait Body: Send + 'static {
/// The readable request body.
type Readable: Read + 'static;
/// Serialize `self` with the given adapter into a request body.
fn into_readable<S>(self, ser: &S) -> ReadableResult<Self::Readable>
where S: Serializer;
}
impl<B: EagerBody + Send + 'static> Body for B {
type Readable = <B as EagerBody>::Readable;
fn into_readable<S>(self, ser: &S) -> ReadableResult<Self::Readable> where S: Serializer {
<B as EagerBody>::into_readable(self, ser)
}
}
/// A trait describing a type which can be serialized into a request body.
///
/// Implemented for `T: Serialize + Send + 'static`.
pub trait EagerBody {
/// The readable request body.
type Readable: Read + Send + 'static;
/// Serialize `self` with the given adapter into a request body.
fn into_readable<S>(self, ser: &S) -> ReadableResult<Self::Readable>
where S: Serializer;
}
impl<B: Serialize> EagerBody for B {
type Readable = Cursor<Vec<u8>>;
fn into_readable<S>(self, ser: &S) -> ReadableResult<Self::Readable> where S: Serializer {
let mut buf = Vec::new();
try!(ser.serialize(&self, &mut buf));
Readable::new_ok(Cursor::new(buf), ser.content_type())
}
}
/// A wrapper around a type that is intended to be read directly as the request body,
/// instead of being serialized.
#[derive(Debug)]
pub struct RawBody<R>(Readable<R>);
impl<R: Read> RawBody<R> {
/// Wrap a `Read` type and a content-type
pub fn new<C: Into<Option<Mime>>>(readable: R, content_type: C) -> Self {
RawBody(Readable::new(readable, content_type))
}
}
impl<T: AsRef<[u8]>> RawBody<Cursor<T>> {
/// Wrap anything `Cursor` can work with (such as `String` or `Vec<u8>`) as a raw request body.
///
/// Assumes `application/octet-stream` as the content-type.
pub fn bytes(bytes: T) -> Self {
RawBody::new(Cursor::new(bytes), mime::octet_stream())
}
/// Wrap anything `Send + 'static` that can deref to `str`
/// (`String`, `&'static str`, `Box<str>`, etc)
/// as a plain text body.
///
/// Assumes `text/plain; charset=utf8` as the content-type.
pub fn text(text: T) -> Self where T: Borrow<str> {
RawBody::new(Cursor::new(text), mime::text_plain_utf8())
}
}
impl RawBody<Cursor<String>> {
/// Convert the `ToString` value to a `String` and wrap it.
///
/// Assumes `text/plain; charset=utf8` as the content-type.
pub fn display<T: ToString>(text: &T) -> Self {
RawBody::text(text.to_string())
}
}
impl RawBody<Cursor<Vec<u8>>> {
/// Use the serializer in `adapter` to serialize `val` as a raw body immediately.
pub fn serialize_now<S, T>(ser: &S, val: &T) -> Result<Self>
where S: Serializer, T: Serialize {
let mut buf: Vec<u8> = Vec::new();
try!(ser.serialize(val, &mut buf));
Ok(RawBody::new(Cursor::new(buf), ser.content_type()))
}
}
impl<R: Read + Send + 'static> EagerBody for RawBody<R> {
type Readable = R;
fn into_readable<S>(self, _ser: &S) -> ReadableResult<Self::Readable> where S: Serializer {
Ok(self.0)
}
}
impl<R> From<Readable<R>> for RawBody<R> {
fn from(readable: Readable<R>) -> Self {
RawBody(readable)
}
}
/// Helps save some imports and typing.
pub type RawBytesBody = RawBody<Cursor<Vec<u8>>>;
/// A builder trait describing collections of key-value pairs to be serialized into a request body.
pub trait Fields {
/// The type that results from adding a text field; may or may not change depending on the
/// initial type.
type WithText: Fields;
/// Add a key/text-value pair to this fields collection, returning the resulting type.
fn with_text<K: ToString, V: ToString>(self, key: K, val: V) -> Self::WithText;
/// Add a key/file-value pair to this fields collection, returning the resulting type.
fn with_file<K: ToString>(self, key: K, file: FileField) -> MultipartFields;
}
/// An empty fields collection, will serialize to nothing.
#[derive(Debug)]
pub struct EmptyFields;
impl Fields for EmptyFields {
type WithText = TextFields;
fn with_text<K: ToString, V: ToString>(self, key: K, val: V) -> TextFields {
TextFields::new().with_text(key, val)
}
fn with_file<K: ToString>(self, key: K, file: FileField) -> MultipartFields {
MultipartFields::new().with_file(key, file)
}
}
impl Body for EmptyFields {
type Readable = io::Empty;
fn into_readable<S>(self, _ser: &S) -> ReadableResult<Self::Readable>
where S: Serializer {
Readable::new_ok(io::empty(), None)
}
}
/// A collection of key-string value pairs to be serialized as fields in the request.
///
/// Will be serialized as form/percent-encoded pairs.
#[derive(Debug)]
pub struct TextFields(PairMap<String, String>);
impl TextFields {
fn new() -> TextFields {
TextFields(PairMap::new())
}
fn push<K: ToString, V: ToString>(&mut self, key: K, val: V) {
self.0.insert(key.to_string(), val.to_string());
}
}
impl Fields for TextFields {
type WithText = Self;
fn with_text<K: ToString, V: ToString>(mut self, key: K, val: V) -> Self {
self.push(key, val);
self
}
fn with_file<K: ToString>(self, key: K, file: FileField) -> MultipartFields {
MultipartFields::from_text(self).with_file(key, file)
}
}
impl Body for TextFields {
type Readable = Cursor<String>;
fn into_readable<S>(self, _ser: &S) -> ReadableResult<Self::Readable> where S: Serializer {
let readable = Cursor::new(
FormUrlEncoder::new(String::new())
.extend_pairs(self.0.into_pairs())
.finish()
);
Readable::new_ok(readable, mime::form_urlencoded())
}
}
/// A collection of key-value pairs where the values may be string fields or file fields.
///
/// Will be serialized as a `multipart/form-data` request.
#[derive(Debug)]
pub struct MultipartFields {
text: PairMap<String, String>,
files: PairMap<String, FileField>,
}
impl MultipartFields {
fn new() -> Self {
Self::from_text(TextFields::new())
}
fn from_text(text: TextFields) -> Self {
MultipartFields {
text: text.0,
files: PairMap::new(),
}
}
}
impl Fields for MultipartFields {
type WithText = Self;
fn with_text<K: ToString, V: ToString>(mut self, key: K, val: V) -> Self::WithText {
self.text.insert(key.to_string(), val.to_string());
self
}
fn with_file<K: ToString>(mut self, key: K, file: FileField) -> MultipartFields {
self.files.insert(key.to_string(), file);
self
}
}
impl Body for MultipartFields {
type Readable = PreparedFields;
fn into_readable<S>(self, _ser: &S) -> ReadableResult<Self::Readable> where S: Serializer {
use self::FileField_::*;
let mut multipart = Multipart::new();
for (key, val) in self.text.into_pairs() {
multipart.add_text(key, val);
}
for (key, file) in self.files.into_pairs() {
match file.0 {
Stream {
stream,
filename,
content_type
} => {
stream.add_self(key, filename, content_type, &mut multipart);
},
Path(path) => {
multipart.add_file(key, path);
}
}
}
let prepared = try!(multipart.prepare());
let content_type = mime::formdata(prepared.boundary());
Readable::new_ok(prepared, content_type)
}
}
/// A file field, can be a generic `Read` impl or a `Path`.
pub struct FileField(FileField_);
impl FileField {
/// Wrap a `Read` impl with an optional filename and MIME type to be serialized as a file field.
pub fn from_stream<S: Read + Send + 'static>(stream: S, filename: Option<String>, content_type: Option<Mime>) -> Self {
FileField(FileField_::Stream {
stream: Box::new(stream),
filename: filename,
content_type: content_type
})
}
/// Wrap a `Path` to be serialized as a file field, inferring its filename and MIME type.
pub fn from_path<P: Into<PathBuf>>(path: P) -> Self {
FileField(FileField_::Path(path.into()))
}
}
impl fmt::Debug for FileField {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
FileField_::Stream {
ref content_type, ref filename, ..
} => f.debug_struct("FileField::Stream")
.field("stream", &"Box<Read + Send + 'static>")
.field("content_type", &content_type)
.field("filename", filename)
.finish(),
FileField_::Path(ref path) =>
f.debug_tuple("FileField::Path").field(path).finish()
}
}
}
enum FileField_ {
Stream {
stream: Box<StreamField>,
filename: Option<String>,
content_type: Option<Mime>,
},
Path(PathBuf),
}
trait StreamField: Read + Send + 'static {
fn add_self(self: Self, name: String, filename: Option<String>, content_type: Option<Mime>, to: &mut Multipart);
}
impl<T> StreamField for T where T: Read + Send + 'static {
fn add_self(self: Self, name: String, filename: Option<String>, content_type: Option<Mime>, to: &mut Multipart) {
to.add_stream(name, self, filename, content_type);
}
}
#[doc(hidden)]
pub trait AddField<F> {
type Output: Fields;
fn add_to<K: ToString>(self, key: K, to: F) -> Self::Output;
}
impl<F: Fields, T: ToString> AddField<F> for T {
type Output = <F as Fields>::WithText;
fn add_to<K: ToString>(self, key: K, to: F) -> F::WithText {
to.with_text(key, self)
}
}
impl<F: Fields> AddField<F> for FileField {
type Output = MultipartFields;
fn add_to<K: ToString>(self, key: K, to: F) -> MultipartFields {
to.with_file(key, self)
}
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/request.rs | src/net/request.rs | //! Types for constructing and issuing HTTP requests.
use hyper::client::{Client, Response, RequestBuilder as NetRequestBuilder};
use hyper::header::{Headers, Header, HeaderFormat, ContentType};
use hyper::method::Method as HyperMethod;
use url::Url;
use url::form_urlencoded::Serializer as FormUrlEncoded;
use url::percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
use std::borrow::{Borrow, Cow};
use std::fmt::{self, Write};
use std::mem;
use adapter::{AbsAdapter, AdapterConsts};
use mpmc::Sender;
use net::body::{Body, EmptyFields, EagerBody, RawBody};
use net::call::Call;
use net::intercept::Interceptor;
use net::method::{Method, TakesBody};
use net::response::FromResponse;
use executor::ExecBox;
use serialize::{Serializer, Deserializer};
use ::Result;
/// The request header, containing all the information needed to initialize a request.
#[derive(Debug)]
pub struct RequestHead {
url: Cow<'static, str>,
query: String,
method: HyperMethod,
headers: Headers
}
impl RequestHead {
fn new(method: HyperMethod, url: Cow<'static, str>) -> Self {
RequestHead {
url: url.into(),
query: String::new(),
method: method,
headers: Headers::new(),
}
}
/// Set an HTTP header for this request, overwriting any previous value.
///
/// ##Note
/// Some headers, such as `Content-Type`, may be overwritten by Anterofit.
pub fn header<H: Header + HeaderFormat>(&mut self, header: H) -> &mut Self {
self.headers.set(header);
self
}
/// Copy all the HTTP headers from `headers` into this request.
///
/// Duplicate headers will be overwritten.
///
/// ##Note
/// Some headers, such as `Content-Type`, may be overwritten by Anterofit.
pub fn headers(&mut self, headers: &Headers) -> &mut Self {
self.headers.extend(headers.iter());
self
}
/// Append `append` to the URL of this request.
///
/// If this causes the request's URL to be malformed, an error will immediately
/// be returned by `init_request()`.
///
/// Characters that are not allowed to appear in a URL will be percent-encoded as appropriate
/// for the path section of a URL.
///
/// ## Note
/// Adding a query segment via this method will not work as `?` and `=` will be encoded. Use
/// `query()` instead to add query pairs.
pub fn append_url<A: AsRef<str>>(&mut self, append: A) -> &mut Self {
self.url.to_mut().extend(utf8_percent_encode(append.as_ref(), DEFAULT_ENCODE_SET));
self
}
/// Prepend `prepend` to the URL of this request.
///
/// This will appear between the current request URL and the base URL supplied by the adapter,
/// if present, as the base URL is not appended until `init_request()`.
///
/// If this causes the request's URL to be malformed, an error will immediately
/// be returned by `init_request()`.
///
/// Characters that are not allowed to appear in a URL will not be automatically percent-encoded.
pub fn prepend_url<P: AsRef<str>>(&mut self, prepend: P) -> &mut Self {
prepend_str(prepend.as_ref(), self.url.to_mut());
self
}
/// Add a series of key-value pairs to this request's query. These will appear in the request
/// URL.
///
/// Characters that are not allowed to appear in a URL will automatically be percent-encoded.
///
/// It is left up to the server how to resolve duplicate keys.
///
/// Thanks to the mess of generics, this method is incredibly flexible: you can pass a reference
/// to an array of pairs (2-tuples), a vector of pairs, a `HashMap` or `BTreeMap`, or any other
/// iterator that yields pairs or references to pairs.
///
/// ```rust,no_run
/// # extern crate anterofit;
/// # use std::collections::HashMap;
/// # let head: &mut anterofit::net::RequestHead = unimplemented!();
/// // `head` is `&mut RequestHead`
/// head.query(&[("hello", "world"), ("id", "3")]);
///
/// let query_pairs: HashMap<String, String> = HashMap::new();
///
/// // Add some items to the map (...)
/// head.query(query_pairs);
/// ```
///
/// ##Panics
/// If an error is returned from `<K as Display>::fmt()` or `<V as Display>::fmt()`.
pub fn query<Q, P, K, V>(&mut self, query: Q) -> &mut Self
where Q: IntoIterator<Item=P>, P: Borrow<(K, V)>, K: fmt::Display, V: fmt::Display {
let mut query_out = FormUrlEncoded::new(mem::replace(&mut self.query, String::new()));
let mut kbuf = String::new();
let mut vbuf = String::new();
for pair in query {
let &(ref key, ref val) = pair.borrow();
kbuf.clear();
vbuf.clear();
// Errors here should be rare and usually indicate more serious problems.
write!(kbuf, "{}", key).expect("Error returned from Display::fmt()");
write!(vbuf, "{}", val).expect("Error returned from Display::fmt()");
query_out.append_pair(&kbuf, &vbuf);
}
self.query = query_out.finish();
self
}
/// Initialize a `hyper::client::RequestBuilder` with the parameters in this header.
///
/// If provided, `base_url` will be prepended to the URL associated with this request,
/// *then* the constructed query will be set to the URL.
///
/// Finally, `client` will be used to create the `RequestBuilder` and the contained headers
/// will be added.
pub fn init_request<'c>(&self, base_url: Option<&Url>, client: &'c Client) -> Result<NetRequestBuilder<'c>> {
let mut url = if let Some(base_url) = base_url {
try!(base_url.join(&self.url))
} else {
try!(Url::parse(&self.url))
};
url.set_query(Some(&self.query));
// This `.clone()` should be zero-cost, we don't expose Method::Extension at all.
Ok(client.request(self.method.clone(), url).headers(self.headers.clone()))
}
/// Get the current URL of this request.
pub fn get_url(&self) -> &str {
&self.url
}
/// Get the current query string of this request.
pub fn get_query(&self) -> &str {
&self.query
}
/// Get the HTTP method of this request.
pub fn get_method(&self) -> &HyperMethod {
&self.method
}
/// Get the headers of this request (may be modified later).
pub fn get_headers(&self) -> &Headers {
&self.headers
}
}
impl fmt::Display for RequestHead {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}{}", self.method, self.url, self.query)
}
}
/// A container for a request header and body.
///
/// Used in the body of service methods to construct a request.
#[derive(Debug)]
pub struct RequestBuilder<'a, A: 'a + ?Sized, M, B> {
head: RequestHead,
method: M,
body: B,
adapter: &'a A,
}
impl<'a, A: 'a + ?Sized, M> RequestBuilder<'a, A, M, EmptyFields> where M: Method {
/// Create a new request builder with the given method and URL.
///
/// `url` can be `String` or `&'static str`.
pub fn new(adapter: &'a A, method: M, url: Cow<'static, str>) -> Self {
RequestBuilder {
adapter: adapter,
head: RequestHead::new(method.to_hyper(), url),
method: method,
body: EmptyFields,
}
}
}
impl<'a, A: 'a + ?Sized, M, B> RequestBuilder<'a, A, M, B> {
/// Get a reference to the header of the request to inspect it.
pub fn head(&self) -> &RequestHead {
&self.head
}
/// Get a mutable reference to the header of the request.
///
/// Can be used to change the request URL, add GET query pairs or HTTP headers to be
/// sent with the request.
pub fn head_mut(&mut self) -> &mut RequestHead {
&mut self.head
}
/// Pass `self` to the closure, allowing it to mutate and transform the builder
/// arbitrarily.
///
/// `try!()` will work in this closure.
pub fn apply<F, B_>(self, functor: F) -> Result<RequestBuilder<'a, A, M, B_>>
where F: FnOnce(Self) -> Result<RequestBuilder<'a, A, M, B_>> {
functor(self)
}
#[doc(hidden)]
pub fn swap_method<M_>(self, method: M_) -> (RequestBuilder<'a, A, M_, B>, M) {
let old_method = self.method;
(
RequestBuilder {
head: self.head,
method: method,
body: self.body,
adapter: self.adapter,
},
old_method
)
}
}
impl<'a, A: 'a + ?Sized, M, B> RequestBuilder<'a, A, M, B> where A: AbsAdapter, M: TakesBody {
/// Set a body to be sent with the request.
///
/// Generally, `GET` and `DELETE` are not to have bodies
// If you need to have a body on a GET or DELETE request
pub fn body<B_>(self, body: B_) -> RequestBuilder<'a, A, M, B_> {
RequestBuilder {
adapter: self.adapter,
head: self.head,
method: self.method,
body: body,
}
}
/// Immediately serialize `body` on the current thread and set the result as the body
/// of this request.
///
/// This is useful if you want to use a body type that is not `Send` or `'static`.
///
/// ##Panics
/// If this is a GET request (cannot have a body).
pub fn body_eager<B_>(self, body: B_)
-> Result<RequestBuilder<'a, A, M, RawBody<<B_ as EagerBody>::Readable>>>
where B_: EagerBody {
let body = try!(body.into_readable(&self.adapter.ref_consts().serializer)).into();
Ok(self.body(body))
}
}
impl<'a, A: 'a + ?Sized, M, B> RequestBuilder<'a, A, M, B> where A: AbsAdapter {
/// Prepare a `Request` to be executed with the parameters supplied in this builder.
///
/// This request will need to be executed (using `exec()` or `exec_here()`) before anything
/// else is done. As much work as possible will be relegated to the adapter's executor.
pub fn build<T>(self) -> Request<'a, T> where B: Body, T: FromResponse {
let RequestBuilder {
adapter, head, method: _method, body
} = self;
let consts = adapter.consts();
let interceptor = adapter.interceptor();
let (mut guard, call) = super::call::oneshot(Some(head));
let exec = ExecRequest {
sender: &adapter.ref_consts().sender,
exec: Box::new(move || {
let interceptor = interceptor.as_ref().map(|i| &**i);
let res = exec_request(&consts, interceptor, guard.head_mut(), body)
.and_then(|response| T::from_response(&consts.deserializer, response));
guard.complete(res);
}),
};
Request {
exec: Some(exec),
call: call,
}
}
}
struct ExecRequest<'a> {
sender: &'a Sender,
exec: Box<ExecBox>,
}
impl<'a> ExecRequest<'a> {
fn exec(self) {
self.sender.send(self.exec);
}
fn exec_here(self) {
self.exec.exec();
}
}
/// A request which is ready to be sent to the server.
///
/// Use `exec()` or `exec_here()` to send the request and receive the response.
///
/// ##Note
/// If an error occurred during initialization of the request, it will be immediately
/// returned when the request is executed; no network or disk activity will occur.
#[must_use = "Request has not been sent yet"]
pub struct Request<'a, T = ()> {
exec: Option<ExecRequest<'a>>,
call: Call<T>,
}
impl<'a, T> Request<'a, T> {
/// Construct a `Result` wrapping an immediate return of `res`.
///
/// No network or disk activity will occur when this request is executed.
pub fn immediate(res: Result<T>) -> Request<'static, T> {
Request {
exec: None,
call: super::call::immediate(res),
}
}
/// Execute this request on the current thread, **blocking** until the result is available.
pub fn exec_here(self) -> Result<T> {
self.exec.map(ExecRequest::exec_here);
self.call.block()
}
/// Returns `true` if a result is immediately available (`exec_here()` will not block).
pub fn is_immediate(&self) -> bool {
self.call.is_available()
}
}
impl<'a, T> Request<'a, T> where T: Send + 'static {
/// Execute this request on the adapter's executor, returning a type which can
/// be polled for the result.
pub fn exec(self) -> Call<T> {
self.exec.map(ExecRequest::exec);
self.call
}
/// Add a callback to be executed with the request's return value when available, mapping
/// it to another value (or `()` if no return value).
///
/// `on_complete` will always be executed on the adapter's executor because the return
/// value will not be available until the request is executed, whereas `on_result()`'s closure
/// may be executed immediately if an immediate result is available.
///
/// If a result is immediately available, `on_complete` will be discarded.
///
/// ## Note
/// `on_complete` should not be long-running in order to not block other requests waiting
/// on the executor.
///
/// ## Warning about Panics
/// Panics in `on_complete` will cause the return value to be lost. There is no safety
/// issue and subsequent requests shouldn't be affected, but it may be harder to debug
/// without knowing which request caused the panic.
pub fn on_complete<F, R>(self, on_complete: F) -> Request<'a, R>
where F: FnOnce(T) -> R + Send + 'static, R: Send + 'static {
self.on_result(|res| res.map(on_complete))
}
// RFC: add `on_error()`?
/// Add a callback to be executed with the request's result when available, mapping it to
/// another result (which can be `::Result<()>`).
///
/// If a result is immediately available, `on_result` will be executed on the current thread
/// with the result, and the return value will be immediately available as well.
///
/// ## Note
/// `on_result` should not be long-running in order to not block other requests waiting
/// on the executor, or block the current thread if the result is immediate.
///
/// ## Warning about Panics
/// Panics in `on_result` will cause the return value to be lost. There is no safety
/// issue and subsequent requests shouldn't be affected, but it may be harder to debug
/// without knowing which request caused the panic.
///
/// If the result is immediately available, panics in `on_result` will occur on the
/// current thread.
pub fn on_result<F, R>(self, on_result: F) -> Request<'a, R>
where F: FnOnce(Result<T>) -> Result<R> + Send + 'static, R: Send + 'static {
let Request { exec, call } = self;
if call.is_available() {
let res = on_result(call.block());
return Request::immediate(res);
}
let ExecRequest { exec, sender } = exec.expect("`self.exec` was `None` when it shouldn't be");
let (mut guard, new_call) = super::call::oneshot(None);
let new_exec = ExecRequest {
sender: sender,
exec: Box::new(move || {
exec.exec();
guard.complete(
on_result(call.block())
);
})
};
Request {
exec: Some(new_exec),
call: new_call,
}
}
}
fn exec_request<S, D, B>(consts: &AdapterConsts<S, D>, interceptor: Option<&Interceptor>, head: &mut RequestHead, body: B) -> Result<Response>
where S: Serializer, D: Deserializer, B: Body {
if let Some(interceptor) = interceptor {
interceptor.intercept(head);
}
let mut readable = try!(body.into_readable(&consts.serializer));
if let Some(content_type) = readable.content_type {
head.header(ContentType(content_type));
}
head.init_request(consts.base_url.as_ref(), &consts.client)?
.body(&mut readable.readable).send().map_err(Into::into)
}
// FIXME: stable in 1.16
#[cfg(feature = "nightly")]
fn prepend_str(prepend: &str, to: &mut String) {
to.insert_str(0, prepend);
}
// Slower workaround for 1.15
#[cfg(not(feature = "nightly"))]
fn prepend_str(prepend: &str, to: &mut String) {
*to = prepend.to_string() + to;
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/net/intercept.rs | src/net/intercept.rs | //! Types for modifying outgoing requests on-the-fly, e.g. to add headers or query parameters.
use hyper::header::{Header, HeaderFormat};
use super::RequestHead;
use std::borrow::Cow;
use std::fmt;
use std::sync::Arc;
impl fmt::Debug for Interceptor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.debug(f)
}
}
impl<I: Interceptor + ?Sized> Interceptor for Arc<I> {
fn intercept(&self, req: &mut RequestHead) {
(**self).intercept(req)
}
}
/// A trait describing a type which may intercept and modify outgoing request from an adapter
/// instance.
///
/// Implemented for `Fn(&mut RequestHead) + Send + Sync + 'static`.
pub trait Interceptor: Send + Sync + 'static {
/// Modify the request head in any way desired.
///
/// Great care must be taken to not introduce logic errors in service methods
/// (i.e. by changing their endpoints such that they receive unexpected responses).
fn intercept(&self, req: &mut RequestHead);
/// Chain `self` with `then`, invoking `self` then `then` for each request.
fn chain<I>(self, then: I) -> Chain<Self, I> where Self: Sized, I: Interceptor {
Chain(self, then)
}
/// Chain `self` with two more interceptors.
///
/// Saves a level in debug printing, mainly.
fn chain2<I1, I2>(self, then: I1, after: I2) -> Chain2<Self, I1, I2> where Self: Sized,
I1: Interceptor, I2: Interceptor {
Chain2(self, then, after)
}
/// Write debug output equivalent to `std::fmt::Debug`.
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.fmt_debug(f)
}
/// Overridden by `NoIntercept`
#[doc(hidden)]
fn into_opt_obj(self) -> Option<Arc<Interceptor>> where Self: Sized {
Some(Arc::new(self))
}
}
impl<F> Interceptor for F where F: Fn(&mut RequestHead) + Send + Sync + 'static {
fn intercept(&self, req: &mut RequestHead) {
(*self)(req)
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("<(closure) as Interceptor>")
}
}
/// Chains one interceptor with another, invoking them in declaration order.
#[derive(Debug)]
pub struct Chain<I1, I2>(I1, I2);
impl<I1: Interceptor, I2: Interceptor> Interceptor for Chain<I1, I2> {
fn intercept(&self, req: &mut RequestHead) {
self.0.intercept(req);
self.1.intercept(req);
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Chain")
.field(&(&self.0 as &Interceptor))
.field(&(&self.1 as &Interceptor))
.finish()
}
}
/// Chains one interceptor with two more, invoking them in declaration order.
#[derive(Debug)]
pub struct Chain2<I1, I2, I3>(I1, I2, I3);
impl<I1: Interceptor, I2: Interceptor, I3: Interceptor> Interceptor for Chain2<I1, I2, I3> {
fn intercept(&self, req: &mut RequestHead) {
self.0.intercept(req);
self.1.intercept(req);
self.2.intercept(req);
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Chain2")
.field(&(&self.0 as &Interceptor))
.field(&(&self.1 as &Interceptor))
.field(&(&self.2 as &Interceptor))
.finish()
}
}
/// A no-op interceptor which does nothing when invoked.
#[derive(Debug)]
pub struct NoIntercept;
impl Interceptor for NoIntercept {
fn intercept(&self, _req: &mut RequestHead) {}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::Debug>::fmt(self, f)
}
fn into_opt_obj(self) -> Option<Arc<Interceptor>> {
None
}
}
/// Adds the wrapped header to every request.
///
/// To add multiple headers to one request, chain this interceptor with another.
#[derive(Debug)]
pub struct AddHeader<H: Header + HeaderFormat>(pub H);
impl<H: Header + HeaderFormat> Interceptor for AddHeader<H> {
fn intercept(&self, req: &mut RequestHead) {
req.header(self.0.clone());
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::Debug>::fmt(self, f)
}
}
/// Prepends the given string to every request's URL.
///
/// This is done *before* the adapter prepends the base URL. To override the base URL,
/// use a different adapter.
pub struct PrependUrl<S>(pub S);
impl<S: AsRef<str> + Send + Sync + 'static> Interceptor for PrependUrl<S> {
fn intercept(&self, req: &mut RequestHead) {
req.prepend_url(&self.0);
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("PrependUrl")
.field(&self.0.as_ref())
.finish()
}
}
/// Appends the given string to every request's URL.
///
/// This is done *before* the adapter prepends the base URL. To override the base URL,
/// use a different adapter.
#[derive(Debug)]
pub struct AppendUrl<S>(pub S);
impl<S: AsRef<str> + Send + Sync + 'static> Interceptor for AppendUrl<S> {
fn intercept(&self, req: &mut RequestHead) {
req.append_url(self.0.as_ref());
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("AppendUrl")
.field(&self.0.as_ref())
.finish()
}
}
/// Appends the given query pairs to every request.
///
/// Meant to be used in a builder style by calling `pair()` repeatedly.
///
/// This will not overwrite previous query pairs with the same key; it is left
/// to the server to decide which duplicate keys to keep.
#[derive(Default)]
pub struct AppendQuery(Vec<(Cow<'static, str>, Cow<'static, str>)>);
impl AppendQuery {
/// Create an empty vector of pairs.
///
/// Meant to be used in a builder style.
pub fn new() -> Self {
AppendQuery(Vec::new())
}
/// Add a query key-value pair to this interceptor. Returns `self` for builder-style usage.
///
/// `key` and `val` can be any of: `String`, `&'static str` or `Cow<'static, str>`.
pub fn pair<K, V>(mut self, key: K, val: V) -> Self
where K: Into<Cow<'static, str>>, V: Into<Cow<'static, str>> {
self.pair_mut(key, val);
self
}
/// Add a query key-value pair to this interceptor. Returns `&mut self` for builder-style usage.
///
/// `key` and `val` can be any of: `String`, `&'static str` or `Cow<'static, str>`.
pub fn pair_mut<K, V>(&mut self, key: K, val: V) -> &mut Self
where K: Into<Cow<'static, str>>, V: Into<Cow<'static, str>> {
self.0.push((key.into(), val.into()));
self
}
}
impl Interceptor for AppendQuery {
fn intercept(&self, req: &mut RequestHead) {
req.query(self.0.iter());
}
fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (&**k, &**v))).finish()
}
}
/// Specialized version of `fmt::Debug`
trait InterceptDebug {
fn fmt_debug(&self, f: &mut fmt::Formatter) -> fmt::Result;
}
#[cfg(not(feature = "nightly"))]
impl<T> InterceptDebug for T {
fn fmt_debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Interceptor")
}
}
#[cfg(feature = "nightly")]
mod nightly {
use std::fmt;
use super::InterceptDebug;
impl<T> InterceptDebug for T {
default fn fmt_debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Interceptor")
}
}
impl<T: fmt::Debug> InterceptDebug for T {
fn fmt_debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
<Self as fmt::Debug>::fmt(self, f)
}
}
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/executor/threaded.rs | src/executor/threaded.rs | //! Executors using background threads
use std::thread::{self, Builder};
use super::{Executor, Receiver};
/// An executor which uses multiple threads to complete jobs.
#[derive(Debug)]
pub struct MultiThread {
threads: usize,
}
impl MultiThread {
/// Create a new multithreaded executor using the given thread count.
///
/// The background threads will not be spawned until `Executor::start()` is called.
pub fn new(threads: usize) -> Self {
MultiThread {
threads: threads
}
}
}
impl Executor for MultiThread {
/// Spawn new worker threads to complete jobs. The threads will be named such that they
/// can easily be associated with Anterofit.
///
/// If a panic occurs on a worker thread, it will be restarted under the same name.
///
/// ## Panics
/// If a worker thread failed to spawn.
fn start(self, recv: Receiver) {
for thread in 0 .. self.threads {
spawn_thread(thread, recv.clone());
}
}
}
/// An executor which uses a single thread to complete jobs.
#[derive(Debug, Default)]
pub struct SingleThread(());
impl SingleThread {
/// Create a new single-threaded executor.
///
/// The background thread will not be spawned until `Executor::start()` is called.
pub fn new() -> Self {
SingleThread(())
}
}
impl Executor for SingleThread {
/// Spawn a new worker thread to complete jobs. The thread will be named such that it
/// can easily be associated with Anterofit.
///
/// If a panic occurs on the worker thread, it will be restarted under the same name.
///
/// ## Panics
/// If the worker thread failed to spawn.
fn start(self, recv: Receiver) {
spawn_thread(0, recv);
}
}
struct Sentinel {
thread: usize,
recv: Receiver
}
impl Drop for Sentinel {
fn drop(&mut self) {
if thread::panicking() {
spawn_thread(self.thread, self.recv.clone());
}
}
}
fn spawn_thread(thread: usize, recv: Receiver) {
let sentinel = Sentinel {
thread: thread,
recv: recv
};
let _ = Builder::new()
.name(format!("anterofit_worker_{}", thread))
.spawn(move ||
for exec in &sentinel.recv {
exec.exec();
}
)
.expect("Failed to spawn Anterofit worker thread");
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/src/executor/mod.rs | src/executor/mod.rs | //! Types which can take a boxed closure and execute it, preferably in the background.
#![cfg_attr(feature="clippy", allow(boxed_local))]
pub mod threaded;
pub use mpmc::{Receiver, RecvIter, RecvIntoIter};
/// The default executor which should be suitable for most use-cases.
pub type DefaultExecutor = threaded::SingleThread;
/// A trait describing a type which can execute tasks (in the background or otherwise).
///
/// Invoking `ExecBox` *may* panic, so the executor should
pub trait Executor {
/// Initialize the executor, polling `recv` for jobs.
///
/// When `Receiver::recv()` returns `None`, the job queue is closed and the executor can quit.
fn start(self, recv: Receiver);
}
/// A wrapper for `FnOnce() + Send + 'static` which can be invoked from a `Box`.
pub trait ExecBox: Send + 'static {
/// Invoke the contained closure.
fn exec(self: Box<Self>);
}
impl ExecBox {
/// Create a new `ExecBox` which does nothing when called.
///
/// Since it is zero-sized, this call should not allocate.
pub fn noop() -> Box<Self> {
Box::new(|| {})
}
}
impl<F> ExecBox for F where F: FnOnce() + Send + 'static {
fn exec(self: Box<Self>) {
(*self)()
}
}
| rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/examples/post_service.rs | examples/post_service.rs | // This example assumes the `serde` and `serde-json` features.
//
// If you are using the `rustc-serialize` feature, use `RustcDecodable` and `RustcEncodable`
// instead of `Deserialize` and `Serialize`, respectively.
#[macro_use] extern crate anterofit;
#[macro_use] extern crate serde_derive;
// The minimum imports needed to get this example working.
//
// You can glob-import if you like, but know that it will shadow `Result`
// which may cause some confusing type-mismatch errors.
use anterofit::{Adapter, Url};
#[derive(Debug, Deserialize)]
struct Post {
pub userid: Option<u64>,
pub id: u64,
pub title: String,
pub body: String
}
/// Used to create a new Post.
#[derive(Debug, Serialize)]
struct NewPost<'a> {
pub userid: u64,
pub title: &'a str,
pub body: &'a str,
}
service! {
trait PostService {
/// Get a Post by id.
fn get_post(&self, id: u64) -> Post {
GET("/posts/{}", id)
}
/// Get all posts.
fn get_posts(&self) -> Vec<Post> {
GET("/posts")
}
/// Get all posts by a specific user
fn posts_by_user(&self, userid: u64) -> Vec<Post> {
GET("/user/{}/posts", userid)
}
// TODO: demonstrate `query!{}`
/// Create a new Post under the given user ID with the given title and body.
fn new_post(&self, userid: u64, title: &str, body: &str) -> Post {
POST("/posts/");
// We use the `EAGER:` keyword so we can use borrowed values in the body.
// This serializes the body value immediately instead of waiting to serialize
// it on the executor.
body!(EAGER: NewPost {
userid: userid,
title: title,
body: body
})
}
}
}
// So we can use `PostService` with `Adapter::arc_service`.
unsizeable!(PostService);
fn main() {
// Navigate to this URL in your browser for details. Very useful test API.
let url = Url::parse("https://jsonplaceholder.typicode.com").unwrap();
let adapter = Adapter::builder()
.base_url(url)
// When your REST API uses JSON in both requests and responses
.serialize_json()
.build();
// You can also get an `Arc<YourService>` like this.
let service = adapter.arc_service::<PostService>();
create_post(&adapter);
fetch_posts(&adapter);
// have to deref-reref because coercion doesn't work as expected
// see https://github.com/rust-lang/rust/issues/39801
user_posts(&*service);
}
/// Create a new Post.
fn create_post<P: PostService>(post_service: &P) {
let post = post_service.new_post(42, "Hello", "World!")
// If you don't want to block, the return value of exec() can be used as a Future
// to poll for the result. However, it does shadow a couple methods of Future
// so that you don't have to import the trait to use them.
// See the docs of Call for more info.
.exec().block()
.unwrap();
println!("Created post: {:?}", post);
}
/// Fetch the top 3 posts in the database.
// Service traits can be object-safe
fn fetch_posts(post_service: &PostService) {
let posts = post_service.get_posts()
// Shorthand for .exec().wait(), but executes the request on the current thread.
.exec_here()
.unwrap();
for post in posts.into_iter().take(3) {
println!("Fetched post: {:?}", post);
}
}
fn user_posts(post_service: &PostService) {
post_service.posts_by_user(1)
// This will be executed asynchronously when the request is completed
.on_complete(|posts| for post in posts { println!("User post: {:?}", post); })
.exec().ignore();
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/service-attr/src/lib.rs | service-attr/src/lib.rs | #![feature(proc_macro)]
extern crate syn;
#[macro_use] extern crate quote;
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::{Tokens, ToTokens};
use syn::*;
use std::iter::Peekable;
#[proc_macro_attribute]
pub fn service(args: TokenStream, input: TokenStream) -> TokenStream {
let item = parse_item(&input.to_string())
.expect("Input required to contain a trait and zero or more `delegate!()` invocations");
let service_trait = ServiceTrait::from_item(item);
assert!(args.to_string().is_empty(), "#[service] attribute does not take arguments");
service_trait.output().parse().expect("Failed to parse output")
}
struct ServiceTrait {
name: Ident,
vis: Visibility,
attrs: Vec<Attribute>,
methods: Vec<ServiceMethod>,
delegates: Vec<Delegate>,
}
fn assert_generics_empty(generics: &Generics) {
assert!(
generics.lifetimes.is_empty() &&
generics.ty_params.is_empty() &&
generics.where_clause.predicates.is_empty(),
"Generics are (currently) not supported on service traits"
)
}
impl ServiceTrait {
fn from_item(item: Item) -> Self {
let items = if let ItemKind::Trait(unsafety, generics, bounds, items) = item.node {
assert_eq!(unsafety, Unsafety::Normal, "Unsafe traits are not supported");
assert_generics_empty(&generics);
assert!(bounds.is_empty(), "Bounds are not supported on service traits");
items
} else {
panic!("Target of `#[service]` attribute must be a trait");
};
let (methods, delegates) = collect_items(items);
ServiceTrait {
name: item.ident,
vis: item.vis,
attrs: item.attrs,
methods: methods,
delegates: delegates,
}
}
fn output(&self) -> Tokens {
let vis = &self.vis;
let name = &self.name;
let attrs = &self.attrs;
let mut out = quote! {
#(#attrs)*
#vis trait #name
};
out.append("{");
for method in &self.methods {
method.decl(&mut out);
}
out.append("}");
if !self.delegates.is_empty() {
for delegate in &self.delegates {
delegate.output(&self.name, &self.methods, &mut out);
}
} else {
let self_ = parse_token_trees("self").unwrap();
out.append("impl<T: ::anterofit::AbsAdapter> ");
self.name.to_tokens(&mut out);
out.append(" for T { ");
for method in &self.methods {
method.method_impl(&self_, &mut out);
}
out.append(" } ");
}
out
}
}
fn collect_items(items: Vec<TraitItem>) -> (Vec<ServiceMethod>, Vec<Delegate>) {
let mut methods = vec![];
let mut delegates = vec![];
for item in items {
match item.node {
TraitItemKind::Method(..) => methods.push(ServiceMethod::from_trait_item(item)),
TraitItemKind::Macro(mac) => delegates.push(Delegate::from_mac(mac)),
_ => panic!("Unsupported item in service trait: {:?}", item),
}
}
(methods, delegates)
}
struct ServiceMethod {
name: Ident,
attrs: Vec<Attribute>,
sig: MethodSig,
body: Vec<Stmt>,
}
impl ServiceMethod {
fn from_trait_item(trait_item: TraitItem) -> Self {
let (sig, block) = if let TraitItemKind::Method(sig, block) = trait_item.node {
let block = block.expect("Every trait method must have a block.");
(sig, block)
} else {
panic!("Unsupported item in service trait (only methods are allowed): {:?}", trait_item)
};
ServiceMethod {
name: trait_item.ident,
attrs: trait_item.attrs,
sig: sig,
body: block.stmts
}
}
fn header(&self, out: &mut Tokens) {
out.append_all(&self.attrs);
out.append("fn");
self.name.to_tokens(out);
self.sig.generics.to_tokens(out);
out.append("(");
out.append_separated(&self.sig.decl.inputs, ",");
out.append(")");
out.append("-> anterofit::Request");
if let FunctionRetTy::Ty(ref ret_ty) = self.sig.decl.output {
out.append("<");
ret_ty.to_tokens(out);
out.append(">");
}
}
fn decl(&self, out: &mut Tokens) {
self.header(out);
out.append(";");
}
fn method_impl(&self, get_adpt: &[TokenTree], out: &mut Tokens) {
self.header(out);
out.append("{ request_impl! { ");
out.append_all(get_adpt);
out.append(";");
out.append_all(&self.body);
out.append(" } } ");
}
}
struct Delegate {
generics: Vec<TokenTree>,
for_type: Vec<TokenTree>,
where_clause: Vec<TokenTree>,
get_adpt: Vec<TokenTree>,
}
impl Delegate {
fn from_mac(mac: Mac) -> Self {
assert_eq!(mac.path, "delegate".into(), "Only `delegate!()` macro invocations are allowed \
inside service traits.");
Self::parse(mac.tts)
}
fn parse(mut tokens: Vec<TokenTree>) -> Self {
let tokens = match tokens.pop() {
Some(TokenTree::Delimited(delimited)) => delimited.tts,
None => panic!("Empty `delegate!()` invocation!"),
Some(token) => {
tokens.push(token);
panic!("Unsupported `delegate!()` invocation: {:?}", tokens)
},
};
let mut parser = DelegateParser(tokens.into_iter().peekable());
parser.expect_keyword("impl");
let generics = parser.get_generics();
parser.expect_keyword("for");
let for_type = parser.get_type();
assert!(!for_type.is_empty(), "Expected type, got nothing");
let where_clause = parser.get_where();
let get_adpt = parser.get_body_inner();
Delegate {
generics: generics,
for_type: for_type,
where_clause: where_clause,
get_adpt: get_adpt,
}
}
fn output(&self, trait_name: &Ident, methods: &[ServiceMethod], out: &mut Tokens) {
out.append("impl");
out.append_all(&self.generics);
trait_name.to_tokens(out);
out.append("for");
out.append_all(&self.for_type);
out.append_all(&self.where_clause);
out.append("{");
for method in methods {
method.method_impl(&self.get_adpt, out);
}
out.append("}");
}
}
struct DelegateParser<I: Iterator>(Peekable<I>);
impl<I: Iterator<Item = TokenTree>> DelegateParser<I> {
fn expect_keyword(&mut self, expect: &str) {
match self.0.next() {
Some(TokenTree::Token(Token::Ident(ref ident))) => assert_eq!(ident.as_ref(), expect),
Some(other) => panic!("Expected keyword {:?}, got {:?}", expect, other),
None => panic!("Expected keyword/ {:?}, found nothing"),
}
}
fn get_generics(&mut self) -> Vec<TokenTree> {
let mut depth = 0;
let ret = self.take_while(|token| {
match *token {
TokenTree::Token(Token::Lt) => depth += 1,
TokenTree::Token(Token::Gt) => {
depth -= 1;
if depth == 0 { return true; }
},
_ => (),
}
depth > 0
});
if depth != 0 {
panic!("Missing closing > on generics in delegate!() invocation: {:?}", ret);
}
ret
}
fn get_type(&mut self) -> Vec<TokenTree> {
self.take_while(|token| match *token {
TokenTree::Delimited(ref delimited) => delimited.delim != DelimToken::Brace,
TokenTree::Token(Token::Ident(ref ident)) => ident != "where",
_ => true,
})
}
fn get_where(&mut self) -> Vec<TokenTree> {
match self.0.peek() {
Some(&TokenTree::Token(Token::Ident(ref ident))) if ident == "where" => (),
_ => return vec![],
}
self.take_while(|token| match *token {
TokenTree::Delimited(ref delimited) => delimited.delim != DelimToken::Brace,
_ => true,
})
}
fn get_body_inner(&mut self) -> Vec<TokenTree> {
let delimited = match self.0.next() {
Some(TokenTree::Delimited(delimited)) => delimited,
Some(TokenTree::Token(token)) => panic!("Expected opening brace, got {:?}", token),
None => panic!("Expected opening brace, got nothing"),
};
assert_eq!(delimited.delim, DelimToken::Brace);
delimited.tts
}
fn take_while<F>(&mut self, mut predicate: F) -> Vec<TokenTree> where F: FnMut(&TokenTree) -> bool {
let mut out = vec![];
loop {
if !self.0.peek().map_or(false, &mut predicate) {
break
}
out.push(self.0.next().unwrap())
}
out
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
abonander/anterofit | https://github.com/abonander/anterofit/blob/19fb87314b4e72a2454fd4724732a04dbac62ef3/service-attr/examples/post_service.rs | service-attr/examples/post_service.rs | // This example assumes the `serde` and `serde-json` features.
//
// If you are using the `rustc-serialize` feature, use `RustcDecodable` and `RustcEncodable`
// instead of `Deserialize` and `Serialize`, respectively.
#![feature(proc_macro)]
#[macro_use] extern crate anterofit;
extern crate anterofit_service_attr;
#[macro_use] extern crate serde_derive;
// The minimum imports needed to get this example working.
//
// You can glob-import if you like, but know that it will shadow `Result`
// which may cause some confusing type-mismatch errors.
use anterofit::{Adapter, Url};
use anterofit_service_attr::service;
#[derive(Debug, Deserialize)]
struct Post {
pub userid: Option<u64>,
pub id: u64,
pub title: String,
pub body: String
}
/// Used to create a new Post.
#[derive(Debug, Serialize)]
struct NewPost<'a> {
pub userid: u64,
pub title: &'a str,
pub body: &'a str,
}
#[service]
trait PostService {
/// Get a Post by id.
fn get_post(&self, id: u64) -> Post {
GET("/posts/{}", id)
}
/// Get all posts.
fn get_posts(&self) -> Vec<Post> {
GET("/posts")
}
/// Get all posts by a specific user
fn posts_by_user(&self, userid: u64) -> Vec<Post> {
GET("/user/{}/posts", userid)
}
// TODO: demonstrate `query!{}`
/// Create a new Post under the given user ID with the given title and body.
fn new_post(&self, userid: u64, title: &str, body: &str) -> Post {
POST("/posts/");
// We use the `EAGER:` keyword so we can use borrowed values in the body.
// This serializes the body value immediately instead of waiting to serialize
// it on the executor.
body!(EAGER: NewPost {
userid: userid,
title: title,
body: body
})
}
// This is the default delegate impl, this doesn't need to be here
delegate!(impl<T: ::anterofit::AbsAdapter> for T { self });
}
fn main() {
// Navigate to this URL in your browser for details. Very useful test API.
let url = Url::parse("https://jsonplaceholder.typicode.com").unwrap();
let adapter = Adapter::builder()
.base_url(url)
// When your REST API uses JSON in both requests and responses
.serialize_json()
.build();
create_post(&adapter);
fetch_posts(&adapter);
}
/// Create a new Post.
// Since the same adapter will implement all service traits, you can arbitrarily concatenate them
// in generic bounds.
fn create_post<T: PostService>(post_service: &T) {
let post = post_service.new_post(42, "Hello", "World!")
// If you don't want to block, the return value of exec() can be used as a Future
// to poll for the result. However, it does shadow a couple methods of Future
// so that you don't have to import the trait to use them.
// See the docs of Call for more info.
.exec().block()
.unwrap();
println!("{:?}", post);
}
/// Fetch the top 3 posts in the database.
// Service traits are object-safe, but you can't concatenate them arbitrarily (language limitation).
// If you use multiple services in the same scope, it might help clarify your intent
// to coerce the same adapter reference into different service trait objects.
fn fetch_posts(post_service: &PostService) {
let posts = post_service.get_posts()
// Shorthand for .exec().wait(), but executes the request on the current thread.
.exec_here()
.unwrap();
for post in posts.into_iter().take(3) {
println!("{:?}", post);
}
} | rust | Apache-2.0 | 19fb87314b4e72a2454fd4724732a04dbac62ef3 | 2026-01-04T20:18:25.482930Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/file_group_item.rs | fclones-gui/src/file_group_item.rs | use adw::glib;
use adw::glib::ObjectExt;
use adw::subclass::prelude::*;
use std::cell::{Ref, RefCell};
use std::collections::BTreeSet;
use std::ops::Deref;
use crate::file_group_item::imp::Selection;
use fclones::{FileHash, FileLen, PartitionedFileGroup};
use crate::file_item::FileItem;
mod imp {
use std::cell::Cell;
use adw::prelude::*;
use relm4::once_cell::sync::Lazy;
use super::*;
use crate::file_item::FileItem;
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub enum Selection {
#[default]
None,
Inconsistent,
Selected,
}
#[derive(Default)]
pub struct FileGroupItem {
pub id: Cell<usize>,
pub file_hash: RefCell<FileHash>,
pub file_len: RefCell<FileLen>,
pub files: RefCell<Vec<FileItem>>,
pub selected: Cell<Selection>,
}
#[glib::object_subclass]
impl ObjectSubclass for FileGroupItem {
const NAME: &'static str = "FileGroupItem";
type Type = super::FileGroupItem;
}
impl ObjectImpl for FileGroupItem {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecBoolean::builder("selected")
.read_only()
.build(),
glib::ParamSpecBoolean::builder("inconsistent")
.read_only()
.build(),
]
});
PROPERTIES.as_ref()
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"selected" => (self.selected.get() == Selection::Selected).to_value(),
"inconsistent" => (self.selected.get() == Selection::Inconsistent).to_value(),
_ => unimplemented!(),
}
}
}
}
glib::wrapper! {
pub struct FileGroupItem(ObjectSubclass<imp::FileGroupItem>);
}
impl FileGroupItem {
pub fn new(id: usize, fg: fclones::FileGroup<fclones::PathAndMetadata>) -> FileGroupItem {
let obj: Self = glib::Object::new(&[]);
let imp = obj.imp();
imp.id.set(id);
imp.file_hash.replace(fg.file_hash);
imp.file_len.replace(fg.file_len);
imp.files
.replace(fg.files.into_iter().map(FileItem::new).collect());
obj
}
pub fn id(&self) -> usize {
self.imp().id.get()
}
pub fn file_hash(&self) -> Ref<'_, FileHash> {
self.imp().file_hash.borrow()
}
pub fn file_len(&self) -> FileLen {
*self.imp().file_len.borrow()
}
pub fn files(&self) -> Ref<'_, Vec<FileItem>> {
self.imp().files.borrow()
}
pub fn total_size(&self) -> FileLen {
self.imp().files.borrow().iter().map(FileItem::len).sum()
}
pub fn selected(&self) -> bool {
self.property("selected")
}
pub fn set_selected(&self, selected: bool) {
if selected {
self.imp().selected.set(Selection::Selected)
} else {
self.imp().selected.set(Selection::None)
}
self.notify("selected");
self.notify("inconsistent");
}
/// Updates selection based on child items selection
pub fn set_selection_from_files(&self) {
let files = self.files();
let checked_count = files.iter().filter(|f| f.is_selected()).count();
let selection = if checked_count > 0 && files.len() - checked_count <= 1 {
Selection::Selected
} else if checked_count > 0 && files.len() - checked_count > 1 {
Selection::Inconsistent
} else {
Selection::None
};
if selection != self.imp().selected.get() {
self.imp().selected.set(selection);
self.notify("selected");
self.notify("inconsistent");
}
}
pub fn remove_many(&self, paths: &BTreeSet<&fclones::Path>) {
let mut self_files = self.imp().files.borrow_mut();
self_files.retain(|f| !paths.contains(f.path().as_ref()));
drop(self_files);
self.set_selection_from_files();
}
pub fn to_partitioned_file_group(&self) -> PartitionedFileGroup {
let mut to_keep = Vec::new();
let mut to_drop = Vec::new();
for file_item in self.files().deref() {
let metadata = file_item.to_path_and_metadata();
if file_item.is_selected() {
to_drop.push(metadata)
} else {
to_keep.push(metadata)
}
}
PartitionedFileGroup { to_keep, to_drop }
}
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/app.rs | fclones-gui/src/app.rs | use adw::gtk::{MessageDialog, MessageType};
use adw::prelude::{DialogExt, WidgetExt};
use std::convert::identity;
use std::path::PathBuf;
use gtk::{CssProvider, StyleContext};
use relm4::gtk::prelude::GtkWindowExt;
use relm4::gtk::{gdk, ButtonsType};
use relm4::Component;
use relm4::ComponentController;
use relm4::WorkerController;
use relm4::{gtk, ComponentParts, ComponentSender, Controller, SimpleComponent};
use fclones::DedupeOp;
use crate::dedupe_worker::{DedupeWorker, DedupeWorkerMsg};
use crate::duplicates::{DuplicatesPageModel, DuplicatesPageWidgets};
use crate::group_worker::GroupWorker;
use crate::group_worker::GroupWorkerMsg;
use crate::input::{InputPageModel, InputPageWidgets};
use crate::progress::{ProgressDialog, ProgressMsg};
#[derive(Debug, Clone)]
pub enum AppMsg {
ActivateInputPage,
AddInputPaths(Vec<PathBuf>),
UpdateInputPath(u32, PathBuf),
RemoveInputPath(u32),
SetMinSize(u64),
SetMaxSize(u64),
FindDuplicates,
AbortSearch,
Deduplicate,
SetDedupeOp(DedupeOp),
ClearFiles,
AddFiles(usize, Vec<fclones::FileGroup<fclones::PathAndMetadata>>),
RemoveFiles(u32, Vec<fclones::Path>),
RemoveGroup(u32),
NoDuplicatesFound,
Progress(ProgressMsg),
ToggleFileSelection(u32, bool),
SelectionPriorityChanged(fclones::Priority),
SelectAllFiles,
SelectFilesInDirectory(PathBuf),
UnselectAllFiles,
UnselectFilesInDirectory(PathBuf),
}
pub enum AppPage {
Input,
Duplicates,
NoDuplicatesMsg,
}
pub struct AppModel {
selected_page: AppPage,
input: InputPageModel,
duplicates: DuplicatesPageModel,
group_worker: WorkerController<GroupWorker>,
dedupe_worker: WorkerController<DedupeWorker>,
progress: Controller<ProgressDialog>,
}
pub struct AppWidgets {
window: gtk::ApplicationWindow,
container: gtk::Stack,
input_page: InputPageWidgets,
duplicates_page: DuplicatesPageWidgets,
no_dupes_msg_dlg: MessageDialog,
}
impl SimpleComponent for AppModel {
type Input = AppMsg;
type Output = ();
type Init = Vec<PathBuf>;
type Root = gtk::ApplicationWindow;
type Widgets = AppWidgets;
fn init_root() -> Self::Root {
load_css();
gtk::ApplicationWindow::builder()
.title("FClones")
.default_width(1200)
.default_height(200)
.build()
}
fn init(
paths: Self::Init,
root: &Self::Root,
sender: ComponentSender<Self>,
) -> ComponentParts<Self> {
let input_page = InputPageWidgets::new(root);
let duplicates_page = DuplicatesPageWidgets::new(root);
let container = gtk::Stack::new();
container.add_child(&input_page.root);
container.add_child(&duplicates_page.root);
let model = AppModel {
selected_page: AppPage::Input,
input: InputPageModel::new(&paths),
duplicates: DuplicatesPageModel::new(),
group_worker: GroupWorker::builder()
.detach_worker(())
.forward(sender.input_sender(), identity),
dedupe_worker: DedupeWorker::builder()
.detach_worker(())
.forward(sender.input_sender(), identity),
progress: ProgressDialog::builder()
.transient_for(root)
.launch(())
.forward(sender.input_sender(), identity),
};
root.set_titlebar(Some(&input_page.header));
root.set_child(Some(&container));
let no_dupes_msg_dlg = MessageDialog::builder()
.modal(true)
.message_type(MessageType::Info)
.transient_for(root)
.text("No duplicates found")
.buttons(ButtonsType::Ok)
.build();
no_dupes_msg_dlg.connect_response(|dlg, _| dlg.hide());
let widgets = AppWidgets {
window: root.clone(),
container,
input_page,
duplicates_page,
no_dupes_msg_dlg,
};
widgets
.input_page
.bind(&model.input, sender.input_sender().clone());
widgets
.duplicates_page
.bind(&model.duplicates, sender.input_sender().clone());
ComponentParts { model, widgets }
}
fn update(&mut self, msg: Self::Input, _sender: ComponentSender<Self>) {
match msg {
AppMsg::ActivateInputPage => {
self.selected_page = AppPage::Input;
}
AppMsg::AddInputPaths(paths) => {
for path in paths {
self.input.add_path(path);
}
}
AppMsg::UpdateInputPath(position, path) => self.input.update_path(position, path),
AppMsg::RemoveInputPath(index) => self.input.remove_path(index),
AppMsg::SetMinSize(value) => self.input.set_min_size(value),
AppMsg::SetMaxSize(value) => self.input.set_max_size(value),
AppMsg::FindDuplicates => {
let config = self.input.group_config();
self.progress.sender().send(ProgressMsg::Show).unwrap();
self.group_worker
.sender()
.send(GroupWorkerMsg::FindDuplicates(config))
.unwrap();
}
AppMsg::Deduplicate => {
let progress_sender = self.progress.sender();
progress_sender.send(ProgressMsg::Progress(None)).unwrap();
progress_sender
.send(ProgressMsg::Message("Removing duplicates...".to_string()))
.unwrap();
progress_sender.send(ProgressMsg::Show).unwrap();
let groups = self.duplicates.partitioned_groups();
let op = self.duplicates.dedupe_op().clone();
self.dedupe_worker
.sender()
.send(DedupeWorkerMsg::RunDedupe(groups, op))
.unwrap()
}
AppMsg::SetDedupeOp(op) => self.duplicates.set_dedupe_op(op),
AppMsg::ClearFiles => {
self.duplicates.clear_files();
}
AppMsg::AddFiles(start_id, files) => {
self.selected_page = AppPage::Duplicates;
self.duplicates.add_files(start_id, files);
self.group_worker
.sender()
.send(GroupWorkerMsg::GetNextChunk)
.unwrap();
}
AppMsg::NoDuplicatesFound => {
self.selected_page = AppPage::NoDuplicatesMsg;
}
AppMsg::RemoveFiles(index, files) => {
self.duplicates.remove_files(index, &files);
}
AppMsg::RemoveGroup(index) => self.duplicates.remove_group(index),
AppMsg::AbortSearch => {
self.duplicates.clear_files();
}
AppMsg::Progress(progress) => self.progress.sender().send(progress).unwrap(),
AppMsg::ToggleFileSelection(position, selected) => {
self.duplicates.select_file_at(position, selected);
}
AppMsg::SelectAllFiles => self.duplicates.select_all_files(),
AppMsg::SelectFilesInDirectory(path) => self.duplicates.select_files_in(path),
AppMsg::UnselectAllFiles => self.duplicates.unselect_all_files(),
AppMsg::UnselectFilesInDirectory(path) => self.duplicates.unselect_files_in(path),
AppMsg::SelectionPriorityChanged(priority) => {
self.duplicates.set_selection_priority(priority)
}
}
}
fn update_view(&self, widgets: &mut Self::Widgets, sender: ComponentSender<Self>) {
match self.selected_page {
AppPage::Input => {
widgets
.container
.set_visible_child(&widgets.input_page.root);
widgets
.window
.set_titlebar(Some(&widgets.input_page.header));
widgets.input_page.update(&self.input);
}
AppPage::Duplicates => {
widgets
.container
.set_visible_child(&widgets.duplicates_page.root);
widgets
.window
.set_titlebar(Some(&widgets.duplicates_page.header));
widgets.duplicates_page.update(&self.duplicates);
}
AppPage::NoDuplicatesMsg => {
widgets.no_dupes_msg_dlg.show();
sender
.input_sender()
.send(AppMsg::ActivateInputPage)
.unwrap_or_default();
}
};
}
}
fn load_css() {
let provider = CssProvider::new();
provider.load_from_data(include_bytes!("style.css"));
StyleContext::add_provider_for_display(
&gdk::Display::default().expect("Could not connect to a display."),
&provider,
gtk::STYLE_PROVIDER_PRIORITY_APPLICATION,
);
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/file_item.rs | fclones-gui/src/file_item.rs | use std::cell::{Cell, Ref};
use std::time::SystemTime;
use fclones::PathAndMetadata;
use relm4::gtk::glib;
use relm4::gtk::glib::Object;
use relm4::gtk::prelude::ObjectExt;
use relm4::gtk::subclass::prelude::ObjectSubclassIsExt;
mod imp {
use relm4::gtk::glib::{ParamSpec, Value};
use relm4::gtk::prelude::{ParamSpecBuilderExt, ToValue};
use relm4::gtk::subclass::prelude::*;
use relm4::once_cell::sync::Lazy;
use std::cell::RefCell;
use super::*;
#[derive(Default)]
pub struct FileItem {
pub inner: RefCell<Option<PathAndMetadata>>,
pub selected: Cell<bool>,
}
#[glib::object_subclass]
impl ObjectSubclass for FileItem {
const NAME: &'static str = "FileItem";
type Type = super::FileItem;
}
impl ObjectImpl for FileItem {
fn properties() -> &'static [ParamSpec] {
static PROPERTIES: Lazy<Vec<ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecString::builder("path").read_only().build(),
glib::ParamSpecUInt::builder("len").read_only().build(),
glib::ParamSpecBoolean::builder("selected").build(),
]
});
PROPERTIES.as_ref()
}
fn set_property(&self, _id: usize, value: &Value, pspec: &ParamSpec) {
match pspec.name() {
"selected" => {
let selected = value.get().unwrap();
self.selected.replace(selected);
}
_ => unimplemented!(),
}
}
fn property(&self, _id: usize, pspec: &ParamSpec) -> Value {
let inner = self.inner.borrow();
let Some(inner) = inner.as_ref() else {
return None::<String>.to_value();
};
match pspec.name() {
"path" => inner.path.to_escaped_string().to_value(),
"len" => inner.metadata.len().0.to_value(),
"selected" => self.selected.get().to_value(),
_ => unimplemented!(),
}
}
}
}
glib::wrapper! {
pub struct FileItem(ObjectSubclass<imp::FileItem>);
}
impl FileItem {
pub fn new(f: fclones::PathAndMetadata) -> Self {
let obj: Self = Object::new(&[]);
let imp = obj.imp();
imp.inner.replace(Some(f));
obj
}
pub fn selected(&self) -> bool {
self.imp().selected.get()
}
pub fn len(&self) -> fclones::FileLen {
self.imp().inner.borrow().as_ref().unwrap().metadata.len()
}
pub fn modified_at(&self) -> Option<SystemTime> {
self.imp()
.inner
.borrow()
.as_ref()
.unwrap()
.metadata
.modified()
.ok()
}
pub fn path(&self) -> Ref<'_, fclones::Path> {
let inner = self.imp().inner.borrow();
Ref::map(inner, |p| &p.as_ref().unwrap().path)
}
pub fn as_ref(&self) -> FileItemRef<'_> {
let inner = self.imp().inner.borrow();
let path_and_metadata = Ref::map(inner, |p| p.as_ref().unwrap());
FileItemRef {
item: self,
path_and_metadata,
}
}
pub fn is_selected(&self) -> bool {
self.imp().selected.get()
}
pub fn set_selected(&self, selected: bool) {
self.imp().selected.set(selected);
self.notify("selected")
}
pub fn to_path_and_metadata(&self) -> PathAndMetadata {
self.imp().inner.borrow().as_ref().unwrap().clone()
}
}
#[derive(Debug)]
pub struct FileItemRef<'a> {
pub item: &'a FileItem,
pub path_and_metadata: Ref<'a, PathAndMetadata>,
}
impl AsRef<PathAndMetadata> for FileItemRef<'_> {
fn as_ref(&self) -> &PathAndMetadata {
self.path_and_metadata.as_ref()
}
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/group_worker.rs | fclones-gui/src/group_worker.rs | use crate::app::AppMsg;
use crate::progress::{LogAdapter, ProgressMsg};
use fclones::config::GroupConfig;
use fclones::PathAndMetadata;
use relm4::{spawn, ComponentSender, Worker};
use std::collections::VecDeque;
use std::time::SystemTime;
pub struct GroupWorker {
group_start_id: usize,
groups: VecDeque<Vec<fclones::FileGroup<fclones::FileInfo>>>,
start_time: SystemTime,
}
impl GroupWorker {
fn find_duplicates(&mut self, config: GroupConfig, sender: ComponentSender<Self>) {
sender.output(AppMsg::ClearFiles).unwrap_or_default();
let (tx, rx) = relm4::channel();
let log = LogAdapter::new(tx);
spawn(rx.forward(sender.output_sender().clone(), AppMsg::Progress));
let groups = fclones::group_files(&config, &log);
if let Ok(groups) = groups {
self.groups = VecDeque::new();
self.group_start_id = 0;
for chunk in groups.chunks(256) {
self.groups.push_back(chunk.to_vec())
}
sender
.output(AppMsg::Progress(ProgressMsg::Hide))
.unwrap_or_default();
self.start_time = SystemTime::now();
if groups.is_empty() {
sender.output(AppMsg::NoDuplicatesFound).unwrap_or_default();
} else {
self.send_next_chunk(sender)
}
}
}
fn send_next_chunk(&mut self, sender: ComponentSender<Self>) {
if let Some(chunk) = self.groups.pop_front() {
let chunk_len = chunk.len();
let chunk = chunk
.iter()
.cloned()
.map(|group| group.filter_map(|f| PathAndMetadata::new(f.path).ok()))
.collect();
sender
.output(AppMsg::AddFiles(self.group_start_id, chunk))
.unwrap_or_default();
self.group_start_id += chunk_len;
}
}
}
#[derive(Debug)]
pub enum GroupWorkerMsg {
FindDuplicates(fclones::config::GroupConfig),
GetNextChunk,
}
impl Worker for GroupWorker {
type Init = ();
type Input = GroupWorkerMsg;
type Output = AppMsg;
fn init(_init: Self::Init, _sender: ComponentSender<Self>) -> Self {
GroupWorker {
group_start_id: 0,
groups: VecDeque::new(),
start_time: SystemTime::now(),
}
}
fn update(&mut self, msg: GroupWorkerMsg, sender: ComponentSender<Self>) {
match msg {
GroupWorkerMsg::FindDuplicates(config) => self.find_duplicates(config, sender),
GroupWorkerMsg::GetNextChunk => self.send_next_chunk(sender),
}
}
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/progress.rs | fclones-gui/src/progress.rs | use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use adw::prelude::*;
use relm4::gtk::prelude::GtkWindowExt;
use relm4::{gtk, view, Component, ComponentParts, ComponentSender, RelmWidgetExt, Sender};
use fclones::log::{Log, LogLevel, ProgressBarLength};
use fclones::progress::ProgressTracker;
use crate::app::AppMsg;
pub struct ProgressDialog {
visible: bool,
message: String,
progress: Option<f64>,
}
pub struct ProgressWidgets {
dialog: adw::Window,
label: gtk::Label,
progress: gtk::ProgressBar,
}
#[derive(Clone, Debug)]
pub enum ProgressMsg {
Show,
Hide,
Abort,
Progress(Option<f64>),
Message(String),
}
impl Component for ProgressDialog {
type CommandOutput = ();
type Input = ProgressMsg;
type Output = AppMsg;
type Init = ();
type Root = adw::Window;
type Widgets = ProgressWidgets;
fn init_root() -> Self::Root {
let dialog = adw::Window::default();
dialog.set_modal(true);
dialog.set_title(Some("Searching for duplicates"));
dialog.set_resizable(false);
dialog
}
fn init(
_init: Self::Init,
root: &Self::Root,
sender: ComponentSender<Self>,
) -> ComponentParts<Self> {
let model = ProgressDialog {
visible: false,
message: "Running".to_owned(),
progress: None,
};
view! {
#[name = "container"]
gtk::Box {
set_orientation: gtk::Orientation::Vertical,
set_margin_all: 20,
set_spacing: 10,
#[name = "label"]
gtk::Label {
set_width_request: 600,
set_height_request: 20,
set_halign: gtk::Align::Center,
set_valign: gtk::Align::Center,
},
#[name = "progress"]
gtk::ProgressBar {
set_width_request: 600,
set_halign: gtk::Align::Center,
set_valign: gtk::Align::Center,
}
}
}
root.set_content(Some(&container));
root.connect_close_request(move |_| {
sender.input(ProgressMsg::Abort);
gtk::Inhibit(false)
});
let widgets = ProgressWidgets {
dialog: root.clone(),
progress,
label,
};
ComponentParts { model, widgets }
}
fn update(&mut self, msg: Self::Input, sender: ComponentSender<Self>, _root: &Self::Root) {
match msg {
ProgressMsg::Show => self.visible = true,
ProgressMsg::Hide => self.visible = false,
ProgressMsg::Abort => sender.output(AppMsg::AbortSearch).unwrap(),
ProgressMsg::Message(s) => self.message = s,
ProgressMsg::Progress(f) => self.progress = f,
}
}
fn update_cmd(
&mut self,
_message: Self::CommandOutput,
_sender: ComponentSender<Self>,
_root: &Self::Root,
) {
}
fn update_view(&self, widgets: &mut Self::Widgets, _sender: ComponentSender<Self>) {
widgets.dialog.set_visible(self.visible);
widgets.label.set_label(self.message.as_str());
match self.progress {
Some(fraction) => widgets.progress.set_fraction(fraction),
None => widgets.progress.pulse(),
}
}
}
pub struct ProgressBar {
count: AtomicU64,
len: Option<u64>,
}
impl ProgressBar {
pub fn new(len: Option<u64>) -> ProgressBar {
Self {
count: AtomicU64::new(0),
len,
}
}
}
impl ProgressBar {
pub fn progress(&self) -> Option<f64> {
self.len
.map(|len| (self.count.load(Ordering::Relaxed) as f64) / (len as f64))
}
}
impl ProgressTracker for ProgressBar {
fn inc(&self, delta: u64) {
self.count.fetch_add(delta, Ordering::Relaxed);
}
}
pub struct LogAdapter {
sender: Sender<ProgressMsg>,
}
impl LogAdapter {
pub fn new(sender: Sender<ProgressMsg>) -> Self {
Self { sender }
}
}
impl Log for LogAdapter {
fn progress_bar(&self, msg: &str, len: ProgressBarLength) -> Arc<dyn ProgressTracker> {
self.sender
.send(ProgressMsg::Message(msg.to_owned()))
.unwrap_or_default();
let progress_bar = match len {
ProgressBarLength::Items(n) => Arc::new(ProgressBar::new(Some(n))),
ProgressBarLength::Bytes(n) => Arc::new(ProgressBar::new(Some(n))),
ProgressBarLength::Unknown => Arc::new(ProgressBar::new(None)),
};
let weak_pb = Arc::downgrade(&progress_bar);
let sender = self.sender.clone();
thread::spawn(move || {
while let Some(pb) = weak_pb.upgrade() {
sender
.send(ProgressMsg::Progress(pb.progress()))
.unwrap_or_default();
thread::sleep(Duration::from_millis(50));
}
});
progress_bar
}
fn log(&self, _level: LogLevel, _msg: String) {}
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/dir_chooser.rs | fclones-gui/src/dir_chooser.rs | use std::path::PathBuf;
use adw::prelude::*;
use relm4::gtk;
use relm4::gtk::gio;
use relm4::gtk::glib;
/// Displays a FileChooserDialog and lets the user select input directories.
/// If the user accepts the selection, passes the selected path to the given `accept_fn`.
pub fn choose_dir(
window: &impl IsA<gtk::Window>,
current_file: Option<PathBuf>,
create_folders: bool,
accept_fn: impl Fn(Vec<PathBuf>) + 'static,
) {
let file_chooser = gtk::FileChooserNative::new(
Some("Select directory"),
Some(&window.clone()),
gtk::FileChooserAction::SelectFolder,
Some("Select"),
Some("Cancel"),
);
file_chooser.set_modal(true);
file_chooser.set_create_folders(create_folders);
file_chooser.set_transient_for(Some(window));
if let Some(file) = current_file {
let file = gio::File::for_path(file);
if let Err(err) = file_chooser.set_file(&file) {
eprintln!("Failed to set FileChooser file: {}", err);
}
}
file_chooser.connect_response(glib::clone!(@strong file_chooser => move |_, result| {
if result == gtk::ResponseType::Accept {
let mut paths = Vec::new();
for f in file_chooser.files().into_iter().flatten() {
if let Ok(f) = f.downcast::<gio::File>() {
if let Some(path) = f.path() {
paths.push(path);
}
}
}
accept_fn(paths);
}
file_chooser.destroy();
}));
file_chooser.show();
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/bytes_entry.rs | fclones-gui/src/bytes_entry.rs | use adw::gtk;
use adw::prelude::*;
use gtk::glib;
const UNLIMITED_STR: &str = "Unlimited";
/** A text field with a slider allowing to enter bytes value. */
pub struct BytesRow {
row: adw::ActionRow,
entry: gtk::EditableLabel,
default: String,
}
impl BytesRow {
pub fn new(title: &str, subtitle: &str, default: &str, unlimited: bool) -> BytesRow {
let entry = gtk::EditableLabel::builder()
.xalign(1.0)
.valign(gtk::Align::Center)
.halign(gtk::Align::End)
.hexpand(false)
.build();
let adjustment = gtk::Adjustment::builder()
.lower(0.0)
.upper(12.0)
.page_increment(1.0)
.step_increment(0.05)
.build();
let scale = gtk::Scale::new(gtk::Orientation::Horizontal, Some(&adjustment));
scale.set_width_request(240);
bind(&scale, &entry, unlimited);
entry.set_text(default);
let row = adw::ActionRow::builder()
.title(title)
.subtitle(subtitle)
.build();
row.add_suffix(&entry);
row.add_suffix(&scale);
row.set_activatable_widget(Some(&entry));
row.connect_activate(glib::clone!(@strong entry => move |_| {
entry.grab_focus();
entry.start_editing();
}));
BytesRow {
row,
entry,
default: default.to_owned(),
}
}
pub fn row(&self) -> &adw::ActionRow {
&self.row
}
pub fn set_value(&self, bytes: u64) {
set_bytes(bytes, &self.entry);
}
pub fn on_change(&self, action: impl Fn(u64) + 'static) {
let default_value = self.default.clone();
// Rewrite the value to use nicer byte units
self.entry.connect_editing_notify(move |entry| {
if entry.is_editing() {
let text = entry.text();
entry.set_text(text.as_str());
entry.select_region(0, text.len() as i32);
}
// This will trigger only after the user stops editing the label entry
if !entry.is_editing() {
match parse_size::parse_size(entry.text()) {
Ok(bytes) => set_bytes(bytes, entry),
Err(_) => entry.set_text(default_value.as_str()),
}
}
});
// Invoke the action on every text entry change:
self.entry.connect_changed(move |entry| {
if entry.text() == UNLIMITED_STR {
action(u64::MAX)
} else if let Ok(bytes) = parse_size::parse_size(entry.text()) {
action(bytes);
}
});
}
}
fn set_bytes(bytes: u64, entry: >k::EditableLabel) {
if !entry.is_editing() {
let value_str = if bytes == u64::MAX {
UNLIMITED_STR.to_owned()
} else {
let bytes = byte_unit::Byte::from_bytes(bytes as u128);
let adjusted = bytes.get_appropriate_unit(false);
adjusted.get_value();
format!("{} {}", adjusted.get_value(), adjusted.get_unit())
};
if entry.text() != value_str.as_str() {
entry.set_text(value_str.as_str());
}
}
}
fn bind(scale: >k::Scale, entry: >k::EditableLabel, unlimited: bool) {
scale.connect_change_value(
glib::clone!(@weak entry => @default-panic, move |scale, _, value| {
if unlimited && value == scale.adjustment().upper() {
entry.set_text(UNLIMITED_STR);
}
else {
let bytes = scale_to_bytes(value);
set_bytes(bytes, &entry);
}
gtk::Inhibit(false)
}),
);
entry.connect_changed(glib::clone!(@weak scale => @default-panic, move |value| {
let text = value.text();
if unlimited && text == UNLIMITED_STR {
scale.set_value(scale.adjustment().upper());
}
else if let Ok(bytes) = parse_size::parse_size(text) {
scale.set_value(bytes_to_scale(bytes));
}
}));
}
fn bytes_to_scale(bytes: u64) -> f64 {
let round_to = 0.05;
(((bytes + 1) as f64).log10() / round_to).round() * round_to
}
fn scale_to_bytes(value: f64) -> u64 {
let mut bytes = 10.0f64.powf(value);
if bytes < 10.0 {
bytes = (bytes - 1.0) * (10.0 / 9.0);
}
round(bytes, 2)
}
fn round(x: f64, n: u32) -> u64 {
let d = x.log10().ceil();
let power = n as f64 - d;
let magnitude = 10f64.powf(power);
let shifted = (x * magnitude).round();
(shifted / magnitude).round() as u64
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/dedupe_worker.rs | fclones-gui/src/dedupe_worker.rs | use std::cmp;
use std::collections::HashMap;
use std::sync::Mutex;
use rayon::iter::ParallelBridge;
use rayon::iter::ParallelIterator;
use relm4::{spawn, ComponentSender, Worker};
use fclones::log::{Log, ProgressBarLength};
use fclones::{DedupeOp, PartitionedFileGroup};
use crate::app::AppMsg;
use crate::progress::{LogAdapter, ProgressMsg};
pub struct DedupeWorker {}
impl DedupeWorker {
pub fn dedupe(
&mut self,
groups: Vec<PartitionedFileGroup>,
op: DedupeOp,
sender: ComponentSender<Self>,
) {
let devices = fclones::DiskDevices::new(&HashMap::new());
let (tx, rx) = relm4::channel();
let log = LogAdapter::new(tx);
spawn(rx.forward(sender.output_sender().clone(), AppMsg::Progress));
// Remembers which groups should be removed from the duplicates view.
// We cannot remove them as we go, because removing shifts the indexes.
let removed_groups = Mutex::new(Vec::new());
let progress = log.progress_bar(
"Removing duplicates...",
ProgressBarLength::Items(groups.len() as u64),
);
groups
.into_iter()
.enumerate()
.par_bridge()
.for_each_with(progress, |progress, (i, g)| {
let group_size = g.to_keep.len() + g.to_drop.len();
let mut removed_files = Vec::new();
let commands = g.dedupe_script(&op, &devices);
for cmd in commands {
if cmd.execute(false, &log).is_ok() {
removed_files.push(cmd.file_to_remove().clone());
}
}
progress.inc(1);
if group_size - removed_files.len() < 2 {
removed_groups.lock().unwrap().push(i as u32);
} else {
sender
.output(AppMsg::RemoveFiles(i as u32, removed_files))
.unwrap();
}
});
let mut removed_groups = removed_groups.into_inner().unwrap();
removed_groups.sort_by_key(|&v| cmp::Reverse(v));
for i in removed_groups {
sender.output(AppMsg::RemoveGroup(i)).unwrap();
}
sender.output(AppMsg::Progress(ProgressMsg::Hide)).unwrap();
}
}
#[derive(Debug)]
pub enum DedupeWorkerMsg {
RunDedupe(Vec<PartitionedFileGroup>, DedupeOp),
}
impl Worker for DedupeWorker {
type Init = ();
type Input = DedupeWorkerMsg;
type Output = AppMsg;
fn init(_init: Self::Init, _sender: ComponentSender<Self>) -> Self {
DedupeWorker {}
}
fn update(&mut self, msg: DedupeWorkerMsg, sender: ComponentSender<Self>) {
match msg {
DedupeWorkerMsg::RunDedupe(groups, op) => self.dedupe(groups, op, sender),
}
}
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/main.rs | fclones-gui/src/main.rs | use std::env;
use relm4::RelmApp;
mod app;
mod bytes_entry;
mod dedupe_worker;
mod dir_chooser;
mod duplicates;
mod file_group_item;
mod file_item;
mod group_worker;
mod input;
mod progress;
fn main() {
adw::init().unwrap();
let relm = RelmApp::new("io.github.pkolaczk.Fclones");
relm.run::<app::AppModel>(env::current_dir().iter().cloned().collect());
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/duplicates.rs | fclones-gui/src/duplicates.rs | use std::cell::{Cell, RefCell};
use std::collections::BTreeSet;
use std::ops::Deref;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use fclones::{sort_by_priority, FileLen};
use fclones::{DedupeOp, FileSubGroup};
use fclones::{PartitionedFileGroup, Path};
use adw::gio::{ListStore, SimpleAction};
use adw::glib::BoxedAnyObject;
use adw::gtk::{ButtonsType, ColumnViewColumn, MessageDialog};
use adw::prelude::*;
use gtk::gio;
use gtk::glib;
use itertools::Itertools;
use relm4::gtk;
use relm4::gtk::prelude::{ActionMapExt, ButtonExt};
use relm4::gtk::MessageType;
use crate::app::AppMsg;
use crate::dir_chooser::choose_dir;
use crate::file_group_item::FileGroupItem;
use crate::file_item::FileItem;
pub struct DuplicatesPageModel {
files: gtk::TreeListModel,
file_selection: gtk::SingleSelection,
total_size: FileLen,
total_count: usize,
selected_size: Cell<FileLen>,
selected_count: Rc<Cell<usize>>,
selection_priority: Cell<fclones::Priority>,
last_chosen_directory: Rc<RefCell<Option<PathBuf>>>,
dedupe_op: DedupeOp,
tree_model_extend_reverses_order: bool,
}
impl DuplicatesPageModel {
pub fn new() -> DuplicatesPageModel {
let files = create_files_model();
let file_selection = gtk::SingleSelection::new(Some(&files));
DuplicatesPageModel {
files,
file_selection,
total_count: 0,
total_size: FileLen(0),
selected_count: Rc::new(Cell::new(0)),
selected_size: Cell::new(FileLen(0)),
selection_priority: Cell::new(fclones::Priority::Top),
last_chosen_directory: Rc::new(RefCell::new(None)),
dedupe_op: DedupeOp::Remove,
tree_model_extend_reverses_order: Self::tree_model_extend_reverses_order(),
}
}
/// In some versions of GTK, TreeListModel reverses the order of items
/// added with `extend`. This method returns true if we're linked against such buggy GTK version.
fn tree_model_extend_reverses_order() -> bool {
let ls = ListStore::new(BoxedAnyObject::static_type());
let tree = gtk::TreeListModel::new(&ls, false, true, |_| None);
let mut model: ListStore = tree.model().downcast().unwrap();
let v1 = BoxedAnyObject::new(1);
let v2 = BoxedAnyObject::new(2);
model.extend(&[&v1, &v2]);
tree.child_row(0).unwrap().item().unwrap() != v1
}
pub fn clear_files(&mut self) {
self.root_store().remove_all();
self.total_count = 0;
self.total_size = FileLen(0);
self.selected_count.set(0);
self.selected_size.set(FileLen(0));
}
pub fn add_files(
&mut self,
start_id: usize,
files: Vec<fclones::FileGroup<fclones::PathAndMetadata>>,
) {
let count: usize = files.iter().map(fclones::FileGroup::file_count).sum();
let size = files.iter().map(fclones::FileGroup::total_size).sum();
let files = files
.into_iter()
.enumerate()
.map(|(id, g)| FileGroupItem::new(start_id + id, g));
self.total_count += count;
self.total_size += size;
if self.tree_model_extend_reverses_order {
// workaround for https://gitlab.gnome.org/GNOME/gtk/-/issues/5920
// workaround for https://gitlab.gnome.org/GNOME/gtk/-/issues/5707
self.root_store().extend(files.rev());
} else {
self.root_store().extend(files);
}
}
pub(crate) fn remove_files(&mut self, position: u32, paths: &[Path]) {
let paths = paths.iter().collect::<BTreeSet<_>>();
let row = self.files.child_row(position).unwrap();
row.item()
.unwrap()
.downcast::<FileGroupItem>()
.unwrap()
.remove_many(&paths);
// If the row was expanded, let's remove items from the children model as well.
// I guess there is a better way to do that automatically with GTK bindings,
// but I haven't found it yet.
if let Some(children) = row.children() {
let children: ListStore = children.downcast().unwrap();
for i in (0..children.n_items()).rev() {
if let Some(child) = children.item(i) {
let file_item = child.downcast::<FileItem>().unwrap();
if paths.contains(file_item.path().as_ref()) {
self.total_count -= 1;
self.total_size -= file_item.len();
self.select_file(&file_item, false);
children.remove(i);
}
}
}
}
}
/// Removes group of files at given position
pub fn remove_group(&mut self, position: u32) {
let Some(row) = self.files.child_row(position) else {
return;
};
let group = row.item().unwrap().downcast::<FileGroupItem>().unwrap();
for file in group.files().iter() {
self.select_file(file, false);
}
self.total_count -= group.files().len();
self.total_size -= group.total_size();
self.root_store().remove(position);
}
/// Selects or unselects the file at given table row
pub fn select_file_at(&self, position: u32, selected: bool) {
let Some(row) = self.files.item(position) else {
return;
};
let row: gtk::TreeListRow = row.downcast().unwrap();
if let Ok(file) = row.item().unwrap().downcast::<FileItem>() {
if file.selected() != selected {
self.last_chosen_directory
.replace(Some(file.path().parent().unwrap().to_path_buf()));
let group: FileGroupItem =
row.parent().unwrap().item().unwrap().downcast().unwrap();
self.select_file(&file, selected);
self.unselect_at_least_one(&group, &[&file]);
group.set_selection_from_files();
}
} else if let Ok(group) = row.item().unwrap().downcast::<FileGroupItem>() {
if group.selected() != selected {
if selected {
self.select_group(&group);
} else {
self.unselect_group(&group);
}
}
}
}
pub fn select_files_in(&self, path: PathBuf) {
self.last_chosen_directory.replace(Some(path.clone()));
let path = Path::from(path);
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast::<FileGroupItem>() {
let files = group.files();
let files = self.sorted_by_priority(&files);
let mut selected_files = vec![];
for &file in files.iter().rev() {
if path.is_prefix_of(file.path().as_ref())
&& selected_files.len() + 1 < files.len()
{
self.select_file(file, true);
selected_files.push(file);
}
}
self.unselect_at_least_one(&group, &selected_files);
group.set_selection_from_files();
}
}
}
pub fn unselect_files_in(&self, path: PathBuf) {
self.last_chosen_directory.replace(Some(path.clone()));
let path = Path::from(path);
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast::<FileGroupItem>() {
let files = group.files();
for file in files.iter() {
if path.is_prefix_of(file.path().as_ref()) {
self.select_file(file, false);
}
}
group.set_selection_from_files();
}
}
}
/// Makes sure at least one file in the group is unselected
fn unselect_at_least_one(&self, group: &FileGroupItem, ignore_files: &[&FileItem]) {
let files = group.files();
let files = self.sorted_by_priority(&files);
let selected_count = files.iter().filter(|f| f.is_selected()).count();
if selected_count == files.len() && !files.is_empty() {
let to_unselect = files.iter().find(|&f| !ignore_files.contains(f));
if let Some(to_unselect) = to_unselect {
self.select_file(to_unselect, false);
}
}
}
pub fn set_selection_priority(&self, priority: fclones::Priority) {
self.selection_priority.set(priority);
// Re-select all already selected groups so they match the new priority
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast::<FileGroupItem>() {
if group.selected() {
self.select_group(&group);
}
}
}
}
pub fn select_all_files(&self) {
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast() {
self.select_group(&group);
}
}
}
pub fn unselect_all_files(&self) {
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast::<FileGroupItem>() {
self.unselect_group(&group);
}
}
}
fn unselect_group(&self, group: &FileGroupItem) {
group.set_selected(false);
for file in group.files().iter() {
self.select_file(file, false);
}
group.set_selection_from_files();
}
fn select_group(&self, group: &FileGroupItem) {
group.set_selected(true);
let files = group.files();
let files = self.sorted_by_priority(&files);
let mut first = true;
for sg in files.iter() {
self.select_file(sg, !first);
first = false;
}
}
fn sorted_by_priority<'a>(&self, files: &'a [FileItem]) -> Vec<&'a FileItem> {
let mut files = files
.iter()
.map(FileItem::as_ref)
.map(FileSubGroup::single)
.collect_vec();
sort_by_priority(&mut files, &self.selection_priority.get());
files
.into_iter()
.map(|f| f.files.into_iter().next().unwrap().item)
.collect_vec()
}
fn select_file(&self, file: &FileItem, selected: bool) {
match (file.is_selected(), selected) {
(false, true) => {
self.selected_count.set(self.selected_count.get() + 1);
self.selected_size
.set(self.selected_size.get() + file.len());
file.set_selected(selected);
}
(true, false) => {
self.selected_count.set(self.selected_count.get() - 1);
self.selected_size
.set(self.selected_size.get() - file.len());
file.set_selected(selected);
}
(_, _) => {}
}
}
/// Returns information on selected and not-selected files in each group.
/// Used for performing a deduplicate action on selected files.
pub fn partitioned_groups(&self) -> Vec<PartitionedFileGroup> {
let mut groups = Vec::new();
for row in self.files.iter::<gtk::TreeListRow>().unwrap() {
if let Ok(group) = row.unwrap().item().unwrap().downcast::<FileGroupItem>() {
groups.push(group.to_partitioned_file_group());
}
}
groups
}
fn root_store(&self) -> gio::ListStore {
self.files.model().downcast().unwrap()
}
pub fn dedupe_op(&self) -> &DedupeOp {
&self.dedupe_op
}
pub fn set_dedupe_op(&mut self, op: DedupeOp) {
self.dedupe_op = op;
}
}
fn create_files_model() -> gtk::TreeListModel {
let files = gio::ListStore::new(FileGroupItem::static_type());
gtk::TreeListModel::new(&files, false, true, |parent| {
if let Some(group) = parent.downcast_ref::<FileGroupItem>() {
let mut store = gio::ListStore::new(FileItem::static_type());
store.extend(group.files().iter());
Some(store.upcast())
} else {
None
}
})
}
pub struct DuplicatesPageWidgets {
pub root: gtk::Box,
pub header: gtk::HeaderBar,
window: gtk::ApplicationWindow,
files: gtk::ColumnView,
name_col: gtk::ColumnViewColumn,
modified_col: gtk::ColumnViewColumn,
size_col: gtk::ColumnViewColumn,
select_col: gtk::ColumnViewColumn,
file_stats: gtk::Statusbar,
selection_stats: gtk::Statusbar,
prev_page_btn: gtk::Button,
deduplicate_btn: adw::SplitButton,
selection_priority: gtk::DropDown,
}
const SELECTION_PRIORITIES: [(&str, fclones::Priority); 10] = [
("Top", fclones::Priority::Top),
("Bottom", fclones::Priority::Bottom),
("Newest", fclones::Priority::Newest),
("Oldest", fclones::Priority::Oldest),
(
"Most recently modified",
fclones::Priority::MostRecentlyModified,
),
(
"Least recently modified",
fclones::Priority::LeastRecentlyModified,
),
(
"Most recently accessed",
fclones::Priority::MostRecentlyAccessed,
),
(
"Least recently accessed",
fclones::Priority::LeastRecentlyAccessed,
),
("Most nested", fclones::Priority::MostNested),
("Least nested", fclones::Priority::LeastNested),
];
// Menu label, button label, tooltip, action
const DEDUPE_MENU_ITEMS: [(&str, &str, &str, &str); 5] = [
(
"Remove",
"Remove",
"Remove selected duplicates",
"win.remove",
),
(
"Move to...",
"Move",
"Move selected duplicates to",
"win.move",
),
(
"Link",
"Link",
"Replace selected duplicates with hard links",
"win.hard_link",
),
(
"Symbolic link",
"Symbolic link",
"Replace selected duplicates with symbolic links",
"win.symbolic_link",
),
(
"Deduplicate",
"Deduplicate",
"Make duplicates share data using reflink",
"win.dedupe",
),
];
impl DuplicatesPageWidgets {
pub fn new(window: >k::ApplicationWindow) -> DuplicatesPageWidgets {
// --------------------
// Header bar
// --------------------
let prev_page_btn = gtk::Button::builder()
.icon_name("go-previous-symbolic")
.tooltip_text("Back to input page")
.build();
let deduplicate_btn_menu = gio::Menu::new();
for (label, _, _, action) in DEDUPE_MENU_ITEMS {
deduplicate_btn_menu.append(Some(label), Some(action));
}
let deduplicate_btn = adw::SplitButton::builder()
.label(DEDUPE_MENU_ITEMS[0].1)
.css_classes(vec!["destructive-action".to_string()])
.menu_model(&deduplicate_btn_menu)
.tooltip_text(DEDUPE_MENU_ITEMS[0].2)
.build();
let selection_model = gtk::StringList::new(&SELECTION_PRIORITIES.map(|p| p.0));
let selection_priority = gtk::DropDown::builder()
.model(&selection_model)
.tooltip_text("Selection priority")
.build();
let select_btn_menu = gio::Menu::new();
select_btn_menu.append(Some("Select all duplicates"), Some("win.select"));
select_btn_menu.append(
Some("Select all duplicates in directory..."),
Some("win.select_in_directory"),
);
select_btn_menu.append(
Some("Select all duplicates matching pattern..."),
Some("win.select_by_name"),
);
let select_btn = gtk::MenuButton::builder()
.icon_name("checkbox-checked-symbolic")
.tooltip_text("Select files")
.menu_model(&select_btn_menu)
.build();
let unselect_btn_menu = gio::Menu::new();
unselect_btn_menu.append(Some("Unselect all"), Some("win.unselect"));
unselect_btn_menu.append(
Some("Unselect all in directory..."),
Some("win.unselect_in_directory"),
);
unselect_btn_menu.append(
Some("Unselect all matching pattern..."),
Some("win.unselect_by_name"),
);
let unselect_btn = gtk::MenuButton::builder()
.icon_name("checkbox-symbolic")
.menu_model(&unselect_btn_menu)
.tooltip_text("Unselect files")
.build();
let select_box = gtk::Box::new(gtk::Orientation::Horizontal, 0);
select_box.add_css_class("linked");
select_box.append(&selection_priority);
select_box.append(&select_btn);
select_box.append(&unselect_btn);
let header = gtk::HeaderBar::default();
header.pack_start(&prev_page_btn);
header.pack_start(&deduplicate_btn);
header.pack_end(&select_box);
// --------------------
// Window content
// --------------------
let name_column = ColumnViewColumn::builder()
.title("Name / Hash")
.expand(true)
.build();
let size_column = ColumnViewColumn::builder().title("Size").build();
let modified_column = ColumnViewColumn::builder().title("Modified").build();
let select_column = ColumnViewColumn::default();
select_column.set_fixed_width(50);
let duplicates_view = gtk::ColumnView::builder()
.vexpand(true)
.hexpand(true)
.single_click_activate(true)
.build();
duplicates_view.add_css_class("data-table");
duplicates_view.append_column(&name_column);
duplicates_view.append_column(&modified_column);
duplicates_view.append_column(&size_column);
duplicates_view.append_column(&select_column);
column_header(&duplicates_view, 1).set_halign(gtk::Align::End);
column_header(&duplicates_view, 2).set_halign(gtk::Align::End);
let duplicates_scroll = gtk::ScrolledWindow::default();
duplicates_scroll.set_hscrollbar_policy(gtk::PolicyType::Never);
duplicates_scroll.set_child(Some(&duplicates_view));
let status_bar = gtk::Statusbar::new();
status_bar.set_hexpand(true);
let file_stats = gtk::Statusbar::new();
let selection_stats = gtk::Statusbar::new();
let status_box = gtk::Box::new(gtk::Orientation::Horizontal, 0);
status_box.set_hexpand(true);
status_box.append(&status_bar);
status_box.append(&file_stats);
status_box.append(>k::Separator::default());
status_box.append(&selection_stats);
let root = gtk::Box::builder()
.orientation(gtk::Orientation::Vertical)
.build();
root.append(&duplicates_scroll);
root.append(>k::Separator::default());
root.append(&status_box);
DuplicatesPageWidgets {
window: window.clone(),
root,
files: duplicates_view,
name_col: name_column,
size_col: size_column,
select_col: select_column,
header,
prev_page_btn,
deduplicate_btn,
file_stats,
selection_stats,
modified_col: modified_column,
selection_priority,
}
}
pub fn bind(&self, model: &DuplicatesPageModel, sender: relm4::Sender<AppMsg>) {
self.name_col.set_factory(Some(&name_factory()));
self.modified_col
.set_factory(Some(&modification_time_factory()));
self.size_col.set_factory(Some(&size_factory()));
self.select_col
.set_factory(Some(&checkbox_factory(sender.clone())));
self.files.set_model(Some(&model.file_selection));
self.files
.connect_activate(glib::clone!(@strong sender => move |list_view, position| {
let row = list_view
.model()
.unwrap()
.item(position)
.unwrap()
.downcast::<gtk::TreeListRow>()
.unwrap();
row.set_expanded(!row.is_expanded());
if let Ok(file) = row.item().unwrap().downcast::<FileItem>() {
sender.send(AppMsg::ToggleFileSelection(
row.position(),
!file.selected(),
)).unwrap();
};
}));
self.prev_page_btn
.connect_clicked(glib::clone!(@strong sender => move |_|
sender.send(AppMsg::ActivateInputPage).unwrap_or_default()));
self.bind_select_action("unselect", AppMsg::UnselectAllFiles, &sender);
self.bind_select_action("select", AppMsg::SelectAllFiles, &sender);
self.bind_select_files_in_directory(model, &sender);
self.bind_unselect_files_in_directory(model, &sender);
self.selection_priority
.connect_selected_notify(glib::clone!(@strong sender => move |dd| {
let priority = SELECTION_PRIORITIES[dd.selected() as usize].1;
sender.send(AppMsg::SelectionPriorityChanged(priority)).unwrap_or_default();
}));
self.bind_dedupe_op_select_action(
"remove",
0,
AppMsg::SetDedupeOp(DedupeOp::Remove),
&sender,
);
self.bind_dedupe_op_select_move_action(1, model, &sender);
self.bind_dedupe_op_select_action(
"hard_link",
2,
AppMsg::SetDedupeOp(DedupeOp::HardLink),
&sender,
);
self.bind_dedupe_op_select_action(
"symbolic_link",
3,
AppMsg::SetDedupeOp(DedupeOp::SymbolicLink),
&sender,
);
self.bind_dedupe_op_select_action(
"dedupe",
4,
AppMsg::SetDedupeOp(DedupeOp::RefLink),
&sender,
);
let selected_count = model.selected_count.clone();
let window = &self.window;
self.deduplicate_btn.connect_clicked(
glib::clone!(@strong sender, @weak window => move |_| {
if selected_count.get() == 0 {
MessageDialog::builder()
.text("No files selected")
.secondary_text("Please select some duplicates first")
.decorated(true)
.modal(true)
.message_type(MessageType::Error)
.transient_for(&window)
.buttons(ButtonsType::Ok)
.build()
.run_async(|dlg, _| { dlg.destroy() });
} else {
sender.send(AppMsg::Deduplicate).unwrap_or_default();
}
}),
);
}
fn bind_select_action(&self, action_name: &str, msg: AppMsg, sender: &relm4::Sender<AppMsg>) {
let select_action = SimpleAction::new(action_name, None);
let sender = sender.clone();
select_action.connect_activate(move |_, _| sender.send(msg.clone()).unwrap());
self.window.add_action(&select_action);
}
fn bind_dedupe_op_select_action(
&self,
action_name: &str,
menu_index: usize,
msg: AppMsg,
sender: &relm4::Sender<AppMsg>,
) {
let dedupe_action = SimpleAction::new(action_name, None);
let sender = sender.clone();
let deduplicate_btn = self.deduplicate_btn.clone();
dedupe_action.connect_activate(glib::clone!(@weak deduplicate_btn => move |_, _| {
deduplicate_btn.set_label(DEDUPE_MENU_ITEMS[menu_index].1);
deduplicate_btn.set_tooltip_text(Some(DEDUPE_MENU_ITEMS[menu_index].2));
sender.send(msg.clone()).unwrap()
}));
self.window.add_action(&dedupe_action);
}
fn bind_dedupe_op_select_move_action(
&self,
menu_index: usize,
model: &DuplicatesPageModel,
sender: &relm4::Sender<AppMsg>,
) {
let dedupe_action = SimpleAction::new("move", None);
let sender = sender.clone();
let deduplicate_btn = self.deduplicate_btn.clone();
let window = self.window.clone();
let last_selection = model.last_chosen_directory.clone();
dedupe_action.connect_activate(glib::clone!(@weak deduplicate_btn => move |_, _| {
let sender = sender.clone();
let start_dir = last_selection
.deref()
.borrow()
.as_ref()
.map(|f| f.to_path_buf());
choose_dir(&window, start_dir, true, move |paths| {
let path = paths.into_iter().next().unwrap();
let path = Arc::new(fclones::Path::from(path));
deduplicate_btn.set_label(
format!("Move to {}", path.file_name().unwrap_or_default().to_string_lossy())
.as_str());
deduplicate_btn.set_tooltip_text(Some(
format!("{} {}", DEDUPE_MENU_ITEMS[menu_index].2, path.display()).as_str()));
let msg = AppMsg::SetDedupeOp(DedupeOp::Move(path));
sender.send(msg).unwrap();
});
}));
self.window.add_action(&dedupe_action);
}
fn bind_select_files_in_directory(
&self,
model: &DuplicatesPageModel,
sender: &relm4::Sender<AppMsg>,
) {
let select_action = SimpleAction::new("select_in_directory", None);
let sender = sender.clone();
let window = self.window.clone();
let last_selection = model.last_chosen_directory.clone();
select_action.connect_activate(move |_, _| {
let sender = sender.clone();
let start_dir = last_selection
.deref()
.borrow()
.as_ref()
.map(|f| f.to_path_buf());
choose_dir(&window, start_dir, false, move |paths| {
sender
.send(AppMsg::SelectFilesInDirectory(
paths.into_iter().next().unwrap(),
))
.unwrap()
});
});
self.window.add_action(&select_action);
}
fn bind_unselect_files_in_directory(
&self,
model: &DuplicatesPageModel,
sender: &relm4::Sender<AppMsg>,
) {
let select_action = SimpleAction::new("unselect_in_directory", None);
let sender = sender.clone();
let window = self.window.clone();
let last_selection = model.last_chosen_directory.clone();
select_action.connect_activate(move |_, _| {
let sender = sender.clone();
let start_dir = last_selection
.deref()
.borrow()
.as_ref()
.map(|f| f.to_path_buf());
choose_dir(&window, start_dir, false, move |paths| {
sender
.send(AppMsg::UnselectFilesInDirectory(
paths.into_iter().next().unwrap(),
))
.unwrap()
});
});
self.window.add_action(&select_action);
}
pub fn update(&self, model: &DuplicatesPageModel) {
let file_stats = format!(
"Total {} in {} {}",
model.total_size,
model.total_count,
if model.total_count == 1 {
"file"
} else {
"files"
}
);
let selection_stats = format!(
"Selected {} in {} {}",
model.selected_size.get(),
model.selected_count.get(),
if model.selected_count.get() == 1 {
"file"
} else {
"files"
}
);
let id = self.file_stats.context_id("stats");
self.file_stats.remove_all(id);
self.file_stats.push(id, file_stats.as_str());
let id = self.selection_stats.context_id("stats");
self.selection_stats.remove_all(id);
self.selection_stats.push(id, selection_stats.as_str());
}
}
fn column_header(view: >k::ColumnView, n: usize) -> gtk::Box {
// A hack to get N-th ColumnView header. A pity Gtk4 doesn't support this directly.
let mut column = view.first_child().unwrap().first_child().unwrap();
for _ in 0..n {
column = column.next_sibling().unwrap();
}
column.first_child().unwrap().downcast().unwrap()
}
fn name_factory() -> gtk::SignalListItemFactory {
let factory = gtk::SignalListItemFactory::new();
factory.connect_setup(move |_, list_item| {
// We must wrap the TreeExpander in a Box, due to a weird scrolling bug
// in GTK+ 4.6.6. If the TreeExpander is not wrapped in a Box, sometimes clicking a row
// will cause the view to be scrolled up so that the row appears as the first row.
let expander = gtk::TreeExpander::new();
let b = gtk::Box::default();
b.append(&expander);
list_item.set_child(Some(&b));
});
factory.connect_bind(move |_, list_item| {
let row = list_item
.item()
.unwrap()
.downcast::<gtk::TreeListRow>()
.unwrap();
let expander = list_item
.child()
.unwrap()
.first_child()
.unwrap()
.downcast::<gtk::TreeExpander>()
.unwrap();
expander.set_list_row(Some(&row));
if let Ok(file) = row.item().unwrap().downcast::<FileItem>() {
let label = gtk::Label::builder()
.ellipsize(gtk::pango::EllipsizeMode::Middle)
.label(file.path().to_escaped_string().as_str())
.build();
expander.set_child(Some(&label));
};
if let Ok(group) = row.item().unwrap().downcast::<FileGroupItem>() {
let group_name = format!("Group {}", group.id() + 1);
let label = gtk::Label::new(Some(group_name.as_str()));
label.add_css_class("heading");
expander.set_child(Some(&label));
};
});
factory
}
fn size_factory() -> gtk::SignalListItemFactory {
let factory = gtk::SignalListItemFactory::new();
factory.connect_setup(move |_, list_item| {
let label = gtk::Label::new(None);
label.set_xalign(1.0);
label.add_css_class("light");
label.add_css_class("numeric");
label.set_margin_start(5);
list_item.set_child(Some(&label));
});
factory.connect_bind(move |_, list_item| {
let row = list_item
.item()
.unwrap()
.downcast::<gtk::TreeListRow>()
.unwrap();
let label = list_item.child().unwrap().downcast::<gtk::Label>().unwrap();
if let Ok(file) = row.item().unwrap().downcast::<FileItem>() {
label.set_label(file.len().to_string().as_str());
}
if let Ok(group) = row.item().unwrap().downcast::<FileGroupItem>() {
label.set_label(group.total_size().to_string().as_str());
}
});
factory
}
fn modification_time_factory() -> gtk::SignalListItemFactory {
let factory = gtk::SignalListItemFactory::new();
factory.connect_setup(move |_, list_item| {
let label = gtk::Label::new(None);
label.set_xalign(1.0);
label.add_css_class("light");
label.add_css_class("numeric");
label.set_margin_start(5);
list_item.set_child(Some(&label));
});
factory.connect_bind(move |_, list_item| {
let row = list_item
.item()
.unwrap()
.downcast::<gtk::TreeListRow>()
.unwrap();
let label = list_item.child().unwrap().downcast::<gtk::Label>().unwrap();
label.set_label("");
if let Ok(file) = row.item().unwrap().downcast::<FileItem>() {
if let Some(modified_at) = file.modified_at() {
let datetime: chrono::DateTime<chrono::Local> = modified_at.into();
label.set_label(format!("{}", datetime.format("%Y-%m-%d")).as_str());
}
}
});
factory
}
fn checkbox_factory(sender: relm4::Sender<AppMsg>) -> gtk::SignalListItemFactory {
let factory = gtk::SignalListItemFactory::new();
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | true |
pkolaczk/fclones-gui | https://github.com/pkolaczk/fclones-gui/blob/f70e70ea43ea4c6858780307df112922ab550332/fclones-gui/src/input.rs | fclones-gui/src/input.rs | use relm4::gtk;
use relm4::RelmWidgetExt;
use std::cell::Cell;
use std::cell::RefCell;
use std::path::PathBuf;
use std::rc::Rc;
use adw::prelude::*;
use gtk::gio;
use gtk::glib;
use crate::app::AppMsg;
use crate::bytes_entry;
use crate::bytes_entry::*;
use crate::dir_chooser::choose_dir;
pub struct InputPageModel {
// Input paths panel:
current_dir: Rc<RefCell<Option<PathBuf>>>,
input_paths: gio::ListStore,
input_paths_selection: gtk::SingleSelection,
// Search options:
hidden_files: Rc<Cell<bool>>,
respect_ignore: Rc<Cell<bool>>,
follow_links: Rc<Cell<bool>>,
match_links: Rc<Cell<bool>>,
isolate: Rc<Cell<bool>>,
min_rf: Rc<Cell<usize>>,
min_size: Rc<Cell<u64>>,
max_size: Rc<Cell<u64>>,
// symbolic_links: Rc<Cell<bool>>,
name_pattern: Rc<RefCell<String>>,
}
impl InputPageModel {
pub fn new(paths: &[PathBuf]) -> InputPageModel {
let input_paths = gio::ListStore::new(gtk::StringObject::static_type());
for p in paths {
input_paths.append(>k::StringObject::new(&p.to_string_lossy()))
}
let input_paths_selection = gtk::SingleSelection::new(Some(&input_paths));
InputPageModel {
current_dir: Rc::new(RefCell::new(None)),
input_paths,
input_paths_selection,
hidden_files: Rc::new(Cell::new(false)),
respect_ignore: Rc::new(Cell::new(false)),
follow_links: Rc::new(Cell::new(false)),
match_links: Rc::new(Cell::new(false)),
isolate: Rc::new(Cell::new(false)),
min_rf: Rc::new(Cell::new(2)),
min_size: Rc::new(Cell::new(0)),
max_size: Rc::new(Cell::new(u64::MAX)),
// symbolic_links: Rc::new(Cell::new(false)),
name_pattern: Rc::new(RefCell::new(String::from(""))),
}
}
pub fn add_path(&self, path: PathBuf) {
self.current_dir.replace(Some(path.clone()));
self.input_paths
.append(>k::StringObject::new(&path.to_string_lossy()));
self.input_paths_selection
.select_item(self.input_paths.n_items() - 1, true);
}
pub fn update_path(&self, position: u32, path: PathBuf) {
self.current_dir.replace(Some(path.clone()));
self.input_paths.remove(position);
self.input_paths
.insert(position, >k::StringObject::new(&path.to_string_lossy()));
self.input_paths_selection.select_item(position, true);
}
pub fn remove_path(&self, position: u32) {
self.input_paths.remove(position)
}
pub fn set_min_size(&self, value: u64) {
self.min_size.set(value);
if self.max_size.get() < value {
self.max_size.set(value)
}
}
pub fn set_max_size(&self, value: u64) {
self.max_size.set(value);
if self.min_size.get() > value {
self.min_size.set(value)
}
}
pub(crate) fn group_config(&self) -> fclones::config::GroupConfig {
fclones::config::GroupConfig {
paths: self.paths(),
follow_links: self.follow_links.get(),
hidden: self.hidden_files.get(),
isolate: self.isolate.get(),
match_links: self.match_links.get(),
min_size: fclones::FileLen(self.min_size.get()),
max_size: Some(fclones::FileLen(self.max_size.get())),
name_patterns: self.name_patterns(),
no_ignore: !self.respect_ignore.get(),
regex: false,
..Default::default()
}
}
fn paths(&self) -> Vec<fclones::Path> {
self.input_paths
.iter::<gtk::StringObject>()
.unwrap()
.map(|p| fclones::Path::from(p.unwrap().string().as_str()))
.collect()
}
fn name_patterns(&self) -> Vec<String> {
let name_pattern = self.name_pattern.borrow().clone();
if name_pattern.is_empty() || name_pattern == "*" {
vec![]
} else {
vec![name_pattern]
}
}
}
pub struct InputPageWidgets {
pub root: gtk::Box,
pub header: gtk::HeaderBar,
window: gtk::ApplicationWindow,
find_duplicates: gtk::Button,
input_list: gtk::ListView,
add_input_path: gtk::Button,
hidden_files: gtk::Switch,
respect_ignore: gtk::Switch,
follow_links: gtk::Switch,
match_links: gtk::Switch,
isolate: gtk::Switch,
min_rf: gtk::SpinButton,
min_size: bytes_entry::BytesRow,
max_size: bytes_entry::BytesRow,
// symbolic_links: gtk::Switch,
name_pattern: gtk::EditableLabel,
}
impl InputPageWidgets {
pub fn new(window: >k::ApplicationWindow) -> Self {
// ------------------------------------
// Header
// ------------------------------------
let find_duplicates = gtk::Button::builder()
.label("Find duplicates")
.css_classes(vec!["suggested-action".to_string()])
.build();
let header = gtk::HeaderBar::new();
header.pack_start(&find_duplicates);
// ------------------------------------
// Content
// ------------------------------------
let input_list = gtk::ListView::builder().hexpand(true).build();
input_list.set_single_click_activate(true);
input_list.add_css_class("rich-list");
input_list.add_css_class("frame");
let add_input_path = gtk::Button::builder()
.child(
&adw::ButtonContent::builder()
.label("Add")
.icon_name("list-add-symbolic")
.build(),
)
.has_frame(false)
.build();
let input_list_group = adw::PreferencesGroup::builder()
.title("Directories to scan")
.header_suffix(&add_input_path)
.build();
input_list_group.add(&input_list);
let scan_preferences_group = adw::PreferencesGroup::builder()
.title("Scan options")
.build();
let scan_preferences_list = gtk::ListBox::default();
scan_preferences_list.add_css_class("boxed-list");
scan_preferences_group.add(&scan_preferences_list);
let hidden_files = add_bool_preference(
"Hidden",
"Searches files whose names start with a dot",
&scan_preferences_list,
);
let no_ignore = add_bool_preference(
"Respect ignore lists",
"Respects .gitignore and .fdignore",
&scan_preferences_list,
);
no_ignore.set_active(true);
let follow_links = add_bool_preference(
"Follow links",
"Follows symbolic links to directories",
&scan_preferences_list,
);
// TODO: Uncomment once symbolic links are handled by the dedupe panel safely
// let symbolic_links = add_bool_preference(
// "Symbolic links",
// "Includes symbolic links to files",
// &scan_preferences_list,
// );
let min_size = BytesRow::new("Min size", "", "0 B", false);
scan_preferences_list.append(min_size.row());
let max_size = BytesRow::new("Max size", "", "Unlimited", true);
scan_preferences_list.append(max_size.row());
let name_pattern = add_text_preference(
"Name pattern",
"Includes only files whose name matches a glob pattern",
&scan_preferences_list,
);
let match_preferences_group = adw::PreferencesGroup::builder()
.title("Duplicate match options")
.build();
let match_preferences_list = gtk::ListBox::default();
match_preferences_list.add_css_class("boxed-list");
match_preferences_group.add(&match_preferences_list);
let min_rf = add_spin_btn_preference(
"Match count",
"Minimum number of identical files to report",
&match_preferences_list,
);
let adjustment = gtk::Adjustment::builder()
.lower(2.0)
.upper(100.0)
.page_increment(1.0)
.step_increment(1.0)
.build();
min_rf.configure(Some(&adjustment), 1.0, 0);
min_rf.set_value(2.0);
let match_links = add_bool_preference(
"Match links",
"Treats hard and symbolic links as ordinary files",
&match_preferences_list,
);
let isolate = add_bool_preference(
"Isolate",
"Counts duplicates contained within a single input directory as one file",
&match_preferences_list,
);
let preferences_columns = gtk::Box::new(gtk::Orientation::Horizontal, 20);
preferences_columns.append(&scan_preferences_group);
preferences_columns.append(&match_preferences_group);
let root = gtk::Box::new(gtk::Orientation::Vertical, 20);
root.set_margin_all(20);
root.append(&input_list_group);
root.append(&preferences_columns);
InputPageWidgets {
window: window.clone(),
root,
input_list,
header,
find_duplicates,
add_input_path,
hidden_files,
respect_ignore: no_ignore,
follow_links,
// symbolic_links,
name_pattern,
match_links,
isolate,
min_rf,
min_size,
max_size,
}
}
pub fn bind(&self, model: &InputPageModel, sender: relm4::Sender<AppMsg>) {
self.find_duplicates
.connect_clicked(glib::clone!(@strong sender => move |_|
sender.send(AppMsg::FindDuplicates).unwrap()
));
let factory = gtk::SignalListItemFactory::new();
factory.connect_setup(glib::clone!(
@strong sender,
@strong self.input_list as list,
@strong self.window as window => move |_, item|
{
let row = gtk::Box::builder()
.orientation(gtk::Orientation::Horizontal)
.build();
let label = gtk::Label::builder()
.halign(gtk::Align::Start)
.hexpand(true)
.ellipsize(gtk::pango::EllipsizeMode::Middle)
.build();
let remove_btn = gtk::Button::builder()
.has_frame(false)
.tooltip_text("Remove")
.icon_name("list-remove-symbolic")
.build();
let edit_btn = gtk::Button::builder()
.has_frame(false)
.tooltip_text("Edit")
.icon_name("edit-symbolic")
.build();
let row_buttons = gtk::Box::new(gtk::Orientation::Horizontal, 0);
row_buttons.append(&edit_btn);
row_buttons.append(&remove_btn);
row.append(&label);
row.append(&row_buttons);
item.set_child(Some(&row));
let pos_expression = item.property_expression("position");
remove_btn.connect_clicked(glib::clone!(@strong sender, @strong pos_expression => move |_| {
if let Some(pos) = pos_expression.evaluate(None::<&glib::Object>) {
let pos = pos.get::<u32>().unwrap();
sender.send(AppMsg::RemoveInputPath(pos)).unwrap()
}
}));
edit_btn.connect_clicked(glib::clone!(
@strong sender,
@strong window,
@strong list => move |_|
{
if let Some(pos) = pos_expression.evaluate(None::<&glib::Object>) {
let pos = pos.get::<u32>().unwrap();
edit_input_path(&window, &list, pos, &sender)
}
}));
item.property_expression("item")
.chain_property::<gtk::StringObject>("string")
.bind(&label, "label", gtk::Widget::NONE);
}));
self.input_list.set_factory(Some(&factory));
self.input_list
.set_model(Some(&model.input_paths_selection));
self.input_list.connect_activate(
glib::clone!(@strong sender, @strong self.window as window =>
move |list, pos| edit_input_path(&window, list, pos, &sender)
),
);
let current_dir = model.current_dir.clone();
self.add_input_path.connect_clicked(glib::clone!(
@strong sender,
@strong current_dir,
@strong self.window as window => move |_|
{
let sender = sender.clone();
choose_dir(
&window,
current_dir.borrow().clone(),
false,
move |paths| {
sender.send(AppMsg::AddInputPaths(paths)).unwrap();
}
);
}));
self.min_size
.on_change(glib::clone!(@strong sender => move |value|
sender.send(AppMsg::SetMinSize(value)).unwrap()
));
self.max_size
.on_change(glib::clone!(@strong sender => move |value|
sender.send(AppMsg::SetMaxSize(value)).unwrap()
));
bind_bool(&self.hidden_files, &model.hidden_files);
bind_bool(&self.follow_links, &model.follow_links);
// bind_bool(&self.symbolic_links, &model.symbolic_links);
bind_bool(&self.isolate, &model.isolate);
bind_bool(&self.match_links, &model.match_links);
bind_bool(&self.respect_ignore, &model.respect_ignore);
bind_string(&self.name_pattern, &model.name_pattern);
bind_spin_btn(&self.min_rf, &model.min_rf);
}
pub fn update(&self, model: &InputPageModel) {
self.min_size.set_value(model.min_size.get());
self.max_size.set_value(model.max_size.get());
self.find_duplicates
.set_sensitive(model.input_paths.n_items() > 0);
}
}
fn add_bool_preference(title: &str, subtitle: &str, list_box: >k::ListBox) -> gtk::Switch {
let button = gtk::Switch::builder()
.valign(gtk::Align::Center)
.hexpand(false)
.build();
let row = adw::ActionRow::builder()
.title(title)
.subtitle(subtitle)
.build();
row.add_suffix(&button);
row.set_activatable_widget(Some(&button));
list_box.append(&row);
button
}
fn add_spin_btn_preference(
title: &str,
subtitle: &str,
list_box: >k::ListBox,
) -> gtk::SpinButton {
let button = gtk::SpinButton::builder()
.valign(gtk::Align::Center)
.hexpand(false)
.build();
let row = adw::ActionRow::builder()
.title(title)
.subtitle(subtitle)
.build();
row.add_suffix(&button);
list_box.append(&row);
button
}
fn add_text_preference(title: &str, subtitle: &str, list_box: >k::ListBox) -> gtk::EditableLabel {
let entry = gtk::EditableLabel::builder()
.xalign(1.0)
.valign(gtk::Align::Center)
.halign(gtk::Align::End)
.hexpand(false)
.text("foo")
.build();
let edit_btn = gtk::Button::builder()
.icon_name("edit-symbolic")
.vexpand(false)
.valign(gtk::Align::Center)
.has_frame(false)
.build();
let row = adw::ActionRow::builder()
.title(title)
.subtitle(subtitle)
.build();
row.add_suffix(&entry);
row.add_suffix(&edit_btn);
row.set_activatable_widget(Some(&entry));
row.connect_activated(glib::clone!(@strong entry => move |_| {
entry.grab_focus();
entry.start_editing();
}));
edit_btn.connect_clicked(glib::clone!(@strong entry => move |_| {
entry.grab_focus();
entry.start_editing();
}));
list_box.append(&row);
entry
}
fn bind_bool(switch: >k::Switch, model: &Rc<Cell<bool>>) {
let model = model.clone();
switch.set_active(model.get());
switch.connect_active_notify(move |switch| {
model.set(switch.is_active());
});
}
fn bind_string(entry: >k::EditableLabel, model: &Rc<RefCell<String>>) {
let model = model.clone();
entry.set_text(model.borrow().as_str());
entry.connect_changed(move |entry| {
*model.borrow_mut() = entry.text().to_string();
});
}
fn bind_spin_btn(btn: >k::SpinButton, model: &Rc<Cell<usize>>) {
let model = model.clone();
btn.set_value(model.get() as f64);
btn.connect_changed(move |btn| {
model.set(btn.value() as usize);
});
}
fn edit_input_path(
window: &impl IsA<gtk::Window>,
list: >k::ListView,
position: u32,
sender: &relm4::Sender<AppMsg>,
) {
let current_item = list
.model()
.unwrap()
.item(position)
.unwrap()
.downcast::<gtk::StringObject>()
.unwrap();
let current_path = PathBuf::from(current_item.string().as_str());
let sender = sender.clone();
choose_dir(window, Some(current_path), false, move |paths| {
sender
.send(AppMsg::UpdateInputPath(
position,
paths.into_iter().next().unwrap(),
))
.unwrap()
});
}
| rust | MIT | f70e70ea43ea4c6858780307df112922ab550332 | 2026-01-04T20:18:25.948130Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/build.rs | webapp/build.rs | use std::fs;
use std::process::Command;
fn main() {
// ensure that the directory exists which needs to be embedded in our binary
let directory_path = "./frontend/build/web";
if fs::create_dir_all(directory_path).is_err() {
std::process::exit(1);
}
let output = Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.expect("To be able to get commit hash");
let git_hash = String::from_utf8(output.stdout).expect("To be a valid string");
let output = Command::new("git")
.args(["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.expect("To be able to get branch name");
let branch_name = String::from_utf8(output.stdout).expect("To be a valid string");
println!("cargo:rustc-env=COMMIT_HASH={}", git_hash);
println!("cargo:rustc-env=BRANCH_NAME={}", branch_name);
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/session.rs | webapp/src/session.rs | use axum::async_trait;
use axum_login::tower_sessions::session::Id;
use axum_login::tower_sessions::session::Record;
use axum_login::tower_sessions::session_store;
use axum_login::tower_sessions::ExpiredDeletion;
use axum_login::tower_sessions::SessionStore;
use parking_lot::RwLock;
use std::collections::HashMap;
use std::sync::Arc;
use time::OffsetDateTime;
#[derive(Debug, Clone)]
pub struct InMemorySessionStore {
sessions: Arc<RwLock<HashMap<Id, Record>>>,
}
#[async_trait]
impl SessionStore for InMemorySessionStore {
async fn save(&self, session_record: &Record) -> session_store::Result<()> {
self.sessions
.write()
.insert(session_record.id, session_record.clone());
Ok(())
}
async fn load(&self, session_id: &Id) -> session_store::Result<Option<Record>> {
Ok(self.sessions.read().get(session_id).cloned())
}
async fn delete(&self, session_id: &Id) -> session_store::Result<()> {
self.sessions.write().remove(session_id);
Ok(())
}
}
#[async_trait]
impl ExpiredDeletion for InMemorySessionStore {
async fn delete_expired(&self) -> session_store::Result<()> {
let mut expired_session_ids = vec![];
{
let sessions = self.sessions.read();
for session in sessions.iter() {
if OffsetDateTime::now_utc() >= session.1.expiry_date {
expired_session_ids.push(*session.0);
}
}
}
for expired_session_id in expired_session_ids.iter() {
self.sessions.write().remove(expired_session_id);
}
Ok(())
}
}
impl InMemorySessionStore {
pub(crate) fn new() -> Self {
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
}
}
pub(crate) async fn continuously_delete_expired(
self,
period: tokio::time::Duration,
) -> session_store::Result<()> {
let mut interval = tokio::time::interval(period);
loop {
self.delete_expired().await?;
interval.tick().await;
}
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/logger.rs | webapp/src/logger.rs | use anyhow::Context;
use anyhow::Result;
use time::macros::format_description;
use tracing::metadata::LevelFilter;
use tracing_subscriber::filter::Directive;
use tracing_subscriber::fmt::time::UtcTime;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::EnvFilter;
use tracing_subscriber::Layer;
const RUST_LOG_ENV: &str = "RUST_LOG";
// Configure and initialise tracing subsystem
pub fn init_tracing(level: LevelFilter, json_format: bool, tokio_console: bool) -> Result<()> {
if level == LevelFilter::OFF {
return Ok(());
}
let is_terminal = atty::is(atty::Stream::Stderr);
let filter = EnvFilter::new("")
.add_directive(Directive::from(level))
.add_directive("hyper=warn".parse()?)
.add_directive("rustls=warn".parse()?)
.add_directive("sled=warn".parse()?)
.add_directive("h2=warn".parse()?)
.add_directive("tower_sessions_core=warn".parse()?)
.add_directive("axum_login=warn".parse()?)
.add_directive("bdk=warn".parse()?) // bdk is quite spamy on debug
.add_directive("lightning_transaction_sync=warn".parse()?)
.add_directive("lightning::ln::peer_handler=debug".parse()?)
.add_directive("lightning=trace".parse()?)
.add_directive("ureq=info".parse()?);
let mut filter = if tokio_console {
filter
.add_directive("tokio=trace".parse()?)
.add_directive("runtime=trace".parse()?)
} else {
filter
};
let console_layer = if tokio_console {
Some(
console_subscriber::ConsoleLayer::builder()
.server_addr(([0, 0, 0, 0], 6669))
.spawn(),
)
} else {
None
};
// Parse additional log directives from env variable
let filter = match std::env::var_os(RUST_LOG_ENV).map(|s| s.into_string()) {
Some(Ok(env)) => {
for directive in env.split(',') {
#[allow(clippy::print_stdout)]
match directive.parse() {
Ok(d) => filter = filter.add_directive(d),
Err(e) => println!("WARN ignoring log directive: `{directive}`: {e}"),
};
}
filter
}
_ => filter,
};
let fmt_layer = tracing_subscriber::fmt::layer()
.with_writer(std::io::stderr)
.with_ansi(is_terminal);
let fmt_layer = if json_format {
fmt_layer.json().with_timer(UtcTime::rfc_3339()).boxed()
} else {
fmt_layer
.with_timer(UtcTime::new(format_description!(
"[year]-[month]-[day] [hour]:[minute]:[second]"
)))
.boxed()
};
tracing_subscriber::registry()
.with(filter)
.with(console_layer)
.with(fmt_layer)
.try_init()
.context("Failed to init tracing")?;
tracing::info!("Initialized logger");
Ok(())
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/cli.rs | webapp/src/cli.rs | use anyhow::ensure;
use anyhow::Result;
use clap::Parser;
use sha2::digest::FixedOutput;
use sha2::Digest;
use sha2::Sha256;
use std::env::current_dir;
use std::path::PathBuf;
use std::str::FromStr;
#[derive(Parser)]
pub struct Opts {
#[clap(
long,
default_value = "02dd6abec97f9a748bf76ad502b004ce05d1b2d1f43a9e76bd7d85e767ffb022c9@127.0.0.1:9045"
)]
pub coordinator_endpoint: String,
#[clap(long, default_value = "8000")]
pub coordinator_http_port: u16,
/// Where to permanently store data, defaults to the current working directory.
#[clap(long)]
data_dir: Option<PathBuf>,
#[clap(value_enum, default_value = "regtest")]
pub network: Network,
/// The address to connect to the Electrs API.
#[clap(long, default_value = "http://localhost:3000", aliases = ["esplora"])]
pub electrs: String,
/// The endpoint of the p2p-derivatives oracle
#[clap(
long,
default_value = "16f88cf7d21e6c0f46bcbc983a4e3b19726c6c98858cc31c83551a88fde171c0@http://127.0.0.1:8081"
)]
oracle: String,
/// Where to find the cert and key pem files
#[clap(long)]
cert_dir: Option<PathBuf>,
#[clap(long, default_value = "satoshi")]
password: String,
#[clap(long)]
pub secure: bool,
#[clap(long)]
pub whitelist_withdrawal_addresses: bool,
/// The whitelisted bitcoin addresses the wallet should be allowed to send to. Only honoured if
/// the [`whitelist_withdrawal_addresses`] flag is set to true.
#[arg(num_args(0..))]
#[clap(long)]
pub withdrawal_address: Vec<String>,
/// The location where our memes are hosted
#[clap(long, default_value = "https://localhost:8080/memes/")]
pub meme_endpoint: String,
}
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
pub enum Network {
Regtest,
Signet,
Testnet,
Mainnet,
}
impl From<Network> for bitcoin::Network {
fn from(network: Network) -> Self {
match network {
Network::Regtest => bitcoin::Network::Regtest,
Network::Signet => bitcoin::Network::Signet,
Network::Testnet => bitcoin::Network::Testnet,
Network::Mainnet => bitcoin::Network::Bitcoin,
}
}
}
impl Opts {
// use this method to parse the options from the cli.
pub fn read() -> Opts {
Opts::parse()
}
pub fn network(&self) -> bitcoin::Network {
self.network.into()
}
pub fn password(&self) -> String {
let mut hasher = Sha256::new();
hasher.update(self.password.as_bytes());
hex::encode(hasher.finalize_fixed())
}
pub fn data_dir(&self) -> Result<PathBuf> {
let data_dir = match self.data_dir.clone() {
None => current_dir()?.join("data"),
Some(path) => path,
}
.join("webapp");
Ok(data_dir)
}
pub fn cert_dir(&self) -> Result<PathBuf> {
let cert_dir = match self.cert_dir.clone() {
None => current_dir()?.join("webapp/certs"),
Some(path) => path,
};
Ok(cert_dir)
}
pub fn coordinator_pubkey(&self) -> Result<String> {
let coordinator: Vec<&str> = self.coordinator_endpoint.split('@').collect();
ensure!(coordinator.len() == 2, "invalid coordinator endpoint");
Ok(coordinator
.first()
.expect("valid coordinator endpoint")
.to_string())
}
pub fn coordinator_endpoint(&self) -> Result<String> {
let coordinator: Vec<&str> = self.coordinator_endpoint.split('@').collect();
ensure!(coordinator.len() == 2, "invalid coordinator endpoint");
let coordinator = coordinator
.get(1)
.expect("valid coordinator endpoint")
.to_string();
let coordinator: Vec<&str> = coordinator.split(':').collect();
ensure!(coordinator.len() == 2, "invalid coordinator endpoint");
Ok(coordinator
.first()
.expect("valid coordinator endpoint")
.to_string())
}
pub fn coordinator_p2p_port(&self) -> Result<u16> {
let coordinator: Vec<&str> = self.coordinator_endpoint.split('@').collect();
ensure!(coordinator.len() == 2, "invalid coordinator endpoint");
let coordinator = coordinator
.get(1)
.expect("valid coordinator endpoint")
.to_string();
let coordinator: Vec<&str> = coordinator.split(':').collect();
ensure!(coordinator.len() == 2, "invalid coordinator endpoint");
Ok(
u16::from_str(coordinator.get(1).expect("valid coordinator endpoint"))
.expect("valid coordinator endpoint"),
)
}
pub fn oracle_pubkey(&self) -> Result<String> {
let oracle: Vec<&str> = self.oracle.split('@').collect();
ensure!(oracle.len() == 2, "invalid oracle endpoint");
Ok(oracle.first().expect("valid oracle endpoint").to_string())
}
pub fn oracle_endpoint(&self) -> Result<String> {
let oracle: Vec<&str> = self.oracle.split('@').collect();
ensure!(oracle.len() == 2, "invalid oracle endpoint");
Ok(oracle.get(1).expect("valid oracle endpoint").to_string())
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/api.rs | webapp/src/api.rs | use crate::AppState;
use anyhow::anyhow;
use anyhow::Context;
use anyhow::Result;
use axum::extract::Path;
use axum::extract::Query;
use axum::extract::State;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use axum::response::Response;
use axum::routing::get;
use axum::routing::post;
use axum::Json;
use axum::Router;
use bitcoin::Amount;
use native::api::FeeConfig;
use native::api::WalletHistoryItemType;
use native::calculations::calculate_pnl;
use native::channel_trade_constraints;
use native::dlc;
use native::state::try_get_tentenone_config;
use native::trade::order::FailureReason;
use native::trade::order::InvalidSubchannelOffer;
use rust_decimal::prelude::ToPrimitive;
use rust_decimal::Decimal;
use serde::de;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use std::fmt;
use std::str::FromStr;
use std::sync::Arc;
use time::OffsetDateTime;
use utoipa::ToSchema;
use uuid::Uuid;
use xxi_node::commons;
use xxi_node::commons::order_matching_fee;
use xxi_node::commons::ChannelOpeningParams;
pub fn router(app_state: AppState) -> Router {
Router::new()
.route("/api/balance", get(get_balance))
.route("/api/newaddress", get(get_unused_address))
.route("/api/sendpayment", post(send_payment))
.route("/api/history", get(get_onchain_payment_history))
.route("/api/orders", get(get_orders).post(post_new_order))
.route("/api/positions", get(get_positions))
.route("/api/quotes/:contract_symbol", get(get_best_quote))
.route("/api/node", get(get_node_id))
.route("/api/sync", post(post_sync))
.route("/api/seed", get(get_seed_phrase))
.route("/api/channels", get(get_channels).delete(close_channel))
.route("/api/tradeconstraints", get(get_trade_constraints))
.with_state(Arc::new(app_state))
}
#[derive(ToSchema)]
pub struct AppError(anyhow::Error);
impl IntoResponse for AppError {
fn into_response(self) -> Response {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Something went wrong: {}", self.0),
)
.into_response()
}
}
impl<E> From<E> for AppError
where
E: Into<anyhow::Error>,
{
fn from(err: E) -> Self {
Self(err.into())
}
}
#[derive(Serialize, ToSchema)]
pub struct Version {
version: String,
commit_hash: String,
branch: String,
}
#[utoipa::path(
get,
path = "/api/version",
responses(
(status = 200, description = "Returns the current build version", body = Version)
)
)]
pub async fn version() -> Json<Version> {
Json(Version {
version: env!("CARGO_PKG_VERSION").to_string(),
commit_hash: env!("COMMIT_HASH").to_string(),
branch: env!("BRANCH_NAME").to_string(),
})
}
#[utoipa::path(
get,
path = "/api/newaddress",
responses(
(status = 200, description = "Returns an unused on-chain address", body = String)
)
)]
pub async fn get_unused_address() -> Result<impl IntoResponse, AppError> {
let address = dlc::get_unused_address()?;
Ok(address)
}
#[derive(Serialize, ToSchema)]
pub struct Balance {
on_chain: u64,
off_chain: Option<u64>,
}
#[utoipa::path(
get,
path = "/api/balance",
responses(
(status = 200, description = "Returns on-chain and off-chain balance", body = Balance)
)
)]
pub async fn get_balance(
State(state): State<Arc<AppState>>,
) -> Result<Json<Option<Balance>>, AppError> {
let subscribers = &state.subscribers;
let balance = subscribers.wallet_info().map(|wallet_info| Balance {
on_chain: wallet_info.balances.on_chain,
off_chain: wallet_info.balances.off_chain,
});
Ok(Json(balance))
}
#[derive(Serialize, ToSchema)]
pub struct OnChainPayment {
flow: String,
amount: u64,
timestamp: u64,
txid: String,
confirmations: u64,
fee: Option<u64>,
}
#[utoipa::path(
get,
path = "/api/history",
responses(
(status = 200, description = "Retrieves on-chain payment history", body = [OnChainPayment])
)
)]
pub async fn get_onchain_payment_history(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<OnChainPayment>>, AppError> {
let subscribers = &state.subscribers;
let history = match subscribers.wallet_info() {
Some(wallet_info) => wallet_info
.history
.into_iter()
.filter_map(|item| match item.wallet_type {
WalletHistoryItemType::OnChain {
txid,
fee_sats,
confirmations,
} => Some(OnChainPayment {
flow: item.flow.to_string(),
amount: item.amount_sats,
timestamp: item.timestamp,
txid,
confirmations,
fee: fee_sats,
}),
_ => None,
})
.collect::<Vec<OnChainPayment>>(),
None => vec![],
};
Ok(Json(history))
}
#[derive(Deserialize, ToSchema)]
pub struct Payment {
address: String,
amount: u64,
fee_rate: f32,
}
#[utoipa::path(
post,
path = "/api/sendpayment",
request_body = Payment,
responses(
(status = 200, description = "On-chain payment sent successfully", body = ())
)
)]
pub async fn send_payment(
State(state): State<Arc<AppState>>,
Json(params): Json<Payment>,
) -> Result<(), AppError> {
if !state.withdrawal_addresses.contains(¶ms.address)
&& !dlc::is_address_mine(¶ms.address)?
&& state.whitelist_withdrawal_addresses
{
// if whitelisting is configured, the address is not whitelisted and not our own address we
// prevent the withdrawal.
return Err(anyhow!("Withdrawal address is not whitelisted!").into());
}
dlc::send_payment(
params.amount,
params.address,
FeeConfig::FeeRate {
sats_per_vbyte: params.fee_rate,
},
)
.await?;
dlc::refresh_wallet_info().await?;
Ok(())
}
#[utoipa::path(
get,
path = "/api/node",
responses(
(status = 200, description = "Get node id", body = String)
)
)]
pub async fn get_node_id() -> impl IntoResponse {
dlc::get_node_pubkey().to_string()
}
#[derive(Serialize, ToSchema)]
pub struct Seed {
seed: Vec<String>,
}
#[utoipa::path(
get,
path = "/api/seed",
responses(
(status = 200, description = "Return seed phrase", body = Seed)
)
)]
pub async fn get_seed_phrase() -> Json<Seed> {
Json(Seed {
seed: dlc::get_seed_phrase(),
})
}
#[derive(Serialize, ToSchema)]
pub struct OrderId {
id: Uuid,
}
#[derive(Serialize, Deserialize, ToSchema, Clone, Copy, Debug)]
pub enum Direction {
Long,
Short,
}
impl From<commons::Direction> for Direction {
fn from(value: commons::Direction) -> Self {
match value {
commons::Direction::Long => Direction::Long,
commons::Direction::Short => Direction::Short,
}
}
}
impl From<Direction> for commons::Direction {
fn from(value: Direction) -> Self {
match value {
Direction::Long => commons::Direction::Long,
Direction::Short => commons::Direction::Short,
}
}
}
#[derive(Deserialize, Clone, ToSchema)]
pub struct NewOrderParams {
#[serde(with = "rust_decimal::serde::float")]
pub leverage: Decimal,
#[serde(with = "rust_decimal::serde::float")]
pub quantity: Decimal,
pub direction: Direction,
/// Coordinator reserve in sats
pub coordinator_reserve: Option<u64>,
/// Trader reserve in sats
pub trader_reserve: Option<u64>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash, ToSchema)]
pub enum ContractSymbol {
BtcUsd,
}
impl From<ContractSymbol> for commons::ContractSymbol {
fn from(value: ContractSymbol) -> Self {
match value {
ContractSymbol::BtcUsd => commons::ContractSymbol::BtcUsd,
}
}
}
impl From<commons::ContractSymbol> for ContractSymbol {
fn from(value: commons::ContractSymbol) -> Self {
match value {
commons::ContractSymbol::BtcUsd => ContractSymbol::BtcUsd,
}
}
}
impl TryFrom<NewOrderParams> for native::trade::order::Order {
type Error = anyhow::Error;
fn try_from(value: NewOrderParams) -> Result<Self> {
Ok(native::trade::order::Order {
id: Uuid::new_v4(),
leverage: value
.leverage
.to_f32()
.context("To be able to parse leverage into f32")?,
quantity: value
.quantity
.to_f32()
.context("To be able to parse leverage into f32")?,
contract_symbol: ContractSymbol::BtcUsd.into(),
direction: value.direction.into(),
// We only support market orders for now
order_type: OrderType::Market.into(),
state: native::trade::order::OrderState::Initial,
creation_timestamp: OffsetDateTime::now_utc(),
// We do not support setting order expiry from the frontend for now
order_expiry_timestamp: OffsetDateTime::now_utc() + time::Duration::minutes(1),
reason: native::trade::order::OrderReason::Manual,
stable: false,
failure_reason: None,
})
}
}
#[utoipa::path(
post,
path = "/api/orders",
request_body = NewOrderParams,
responses(
(status = 200, description = "Returns order id of successfully created order", body = OrderId)
)
)]
pub async fn post_new_order(params: Json<NewOrderParams>) -> Result<Json<OrderId>, AppError> {
let order: native::trade::order::Order = params
.clone()
.0
.try_into()
.context("Could not parse order request")?;
let is_dlc_channel_confirmed = dlc::check_if_signed_channel_is_confirmed().await?;
let channel_opening_params = if is_dlc_channel_confirmed {
None
} else {
Some(ChannelOpeningParams {
coordinator_reserve: Amount::from_sat(params.coordinator_reserve.unwrap_or_default()),
trader_reserve: Amount::from_sat(params.trader_reserve.unwrap_or_default()),
pre_image: None,
})
};
let order_id =
native::trade::order::handler::submit_order(order, channel_opening_params).await?;
Ok(Json(OrderId { id: order_id }))
}
#[derive(Debug, Clone, Serialize, ToSchema)]
pub struct Position {
pub leverage: f32,
pub quantity: f32,
pub contract_symbol: ContractSymbol,
pub direction: Direction,
pub average_entry_price: f32,
pub liquidation_price: f32,
pub position_state: PositionState,
pub collateral: u64,
#[serde(with = "time::serde::rfc3339")]
pub expiry: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub updated: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub created: OffsetDateTime,
pub stable: bool,
pub pnl_sats: Option<i64>,
/// Closing fee in sats
pub closing_fee: Option<u64>,
}
impl From<(native::trade::position::Position, Option<Price>)> for Position {
fn from((position, price): (native::trade::position::Position, Option<Price>)) -> Self {
let res = price.map(|price| match (price.ask, price.bid) {
(Some(ask), Some(bid)) => {
let price = match position.direction {
commons::Direction::Long => price.bid,
commons::Direction::Short => price.ask,
};
// FIXME: A from implementation should not contain this kind of logic.
let fee_rate = dlc::get_order_matching_fee_rate(true);
(
calculate_pnl(
position.average_entry_price,
commons::Price { bid, ask },
position.quantity,
position.leverage,
position.direction,
)
.ok(),
price
.map(|price| Some(order_matching_fee(position.quantity, price, fee_rate)))
.and_then(|price| price),
)
}
_ => (None, None),
});
let (pnl_sats, closing_fee) = match res {
None => (None, None),
Some((pnl_sats, closing_fee)) => (pnl_sats, closing_fee),
};
Position {
leverage: position.leverage,
quantity: position.quantity,
contract_symbol: position.contract_symbol.into(),
direction: position.direction.into(),
average_entry_price: position.average_entry_price,
liquidation_price: position.liquidation_price,
position_state: position.position_state.into(),
collateral: position.collateral,
expiry: position.expiry,
updated: position.updated,
created: position.created,
stable: position.stable,
pnl_sats,
closing_fee: closing_fee.map(|amount| amount.to_sat()),
}
}
}
#[utoipa::path(
get,
path = "/api/positions",
responses(
(status = 200, description = "Returns open positions (if any)", body = [Position])
)
)]
pub async fn get_positions(
State(state): State<Arc<AppState>>,
) -> Result<Json<Vec<Position>>, AppError> {
let subscribers = &state.subscribers;
let ask_price = subscribers.ask_price();
let bid_price = subscribers.ask_price();
let positions = native::trade::position::handler::get_positions()?
.into_iter()
.map(|position| {
let quotes = if let (Some(ask), Some(bid)) = (ask_price, bid_price) {
Some(Price {
bid: Some(bid),
ask: Some(ask),
})
} else {
None
};
// TODO: we should clean this annoying into up sometimes
(position, quotes).into()
})
.collect::<Vec<Position>>();
Ok(Json(positions))
}
#[derive(Debug, Clone, Copy, PartialEq, Serialize, ToSchema)]
pub enum OrderType {
Market,
Limit { price: f32 },
}
impl From<native::trade::order::OrderType> for OrderType {
fn from(value: native::trade::order::OrderType) -> Self {
match value {
native::trade::order::OrderType::Market => OrderType::Market,
native::trade::order::OrderType::Limit { price } => OrderType::Limit { price },
}
}
}
impl From<OrderType> for native::trade::order::OrderType {
fn from(value: OrderType) -> Self {
match value {
OrderType::Market => native::trade::order::OrderType::Market,
OrderType::Limit { price } => native::trade::order::OrderType::Limit { price },
}
}
}
#[derive(Serialize, Debug, ToSchema)]
pub struct Order {
pub id: Uuid,
pub leverage: f32,
pub quantity: f32,
/// An order only has a price if it either was filled or if it was a limit order (which is not
/// implemented yet).
pub price: Option<f32>,
pub contract_symbol: ContractSymbol,
pub direction: Direction,
pub order_type: OrderType,
pub state: OrderState,
#[serde(with = "time::serde::rfc3339")]
pub creation_timestamp: OffsetDateTime,
#[serde(with = "time::serde::rfc3339")]
pub order_expiry_timestamp: OffsetDateTime,
pub failure_reason: Option<String>,
}
#[derive(Serialize, Debug, Clone, ToSchema)]
pub enum OrderState {
/// Not submitted to orderbook yet
Initial,
/// Rejected by the orderbook upon submission
Rejected,
/// Successfully submit to orderbook
Open,
/// The orderbook has matched the order and it is being filled
Filling,
/// The order failed to be filled
Failed,
/// Successfully set up trade
Filled,
}
impl From<native::trade::order::OrderState> for OrderState {
fn from(value: native::trade::order::OrderState) -> Self {
match value {
native::trade::order::OrderState::Initial => OrderState::Initial,
native::trade::order::OrderState::Rejected => OrderState::Rejected,
native::trade::order::OrderState::Open => OrderState::Open,
native::trade::order::OrderState::Filling { .. } => OrderState::Filling,
native::trade::order::OrderState::Failed { .. } => OrderState::Failed,
native::trade::order::OrderState::Filled { .. } => OrderState::Filled,
}
}
}
impl From<&native::trade::order::Order> for Order {
fn from(value: &native::trade::order::Order) -> Self {
let failure_reason = match &value.failure_reason {
None => None,
Some(reason) => {
let reason = match reason {
FailureReason::FailedToSetToFilling => "FailedToSetToFilling",
FailureReason::TradeRequest => "TradeRequestFailed",
FailureReason::TradeResponse(error) => error.as_str(),
FailureReason::CollabRevert => "CollabRevert",
FailureReason::OrderNotAcceptable => "OrderNotAcceptable",
FailureReason::TimedOut => "TimedOut",
FailureReason::InvalidDlcOffer(error) => match error {
InvalidSubchannelOffer::Outdated => "OfferOutdated",
InvalidSubchannelOffer::UndeterminedMaturityDate => {
"OfferUndeterminedMaturityDate"
}
InvalidSubchannelOffer::Unacceptable => "OfferUnacceptable",
},
FailureReason::OrderRejected(_) => "OrderRejected",
FailureReason::Unknown => "Unknown",
}
.to_string();
Some(reason)
}
};
let mut price = None;
if let native::trade::order::OrderType::Limit { price: limit_price } = value.order_type {
price.replace(limit_price);
}
// Note: we might overwrite a limit price here but this is not an issue because if a limit
// order has been filled the limit price will be filled price and vice versa
if let native::trade::order::OrderState::Filled {
execution_price, ..
} = value.state
{
price.replace(execution_price);
}
Order {
id: value.id,
leverage: value.leverage,
quantity: value.quantity,
price,
contract_symbol: value.contract_symbol.into(),
direction: value.direction.into(),
order_type: value.order_type.into(),
state: value.state.clone().into(),
creation_timestamp: value.creation_timestamp,
order_expiry_timestamp: value.order_expiry_timestamp,
failure_reason,
}
}
}
#[utoipa::path(
post,
path = "/api/sync",
responses(
(status = 200, description = "On-chain sync triggered", body = ())
)
)]
pub async fn post_sync() -> Result<(), AppError> {
dlc::refresh_wallet_info().await?;
Ok(())
}
#[utoipa::path(
get,
path = "/api/orders",
responses(
(status = 200, description = "Returns personal orders", body = [Order])
)
)]
pub async fn get_orders() -> Result<Json<Vec<Order>>, AppError> {
let orders = native::trade::order::handler::get_orders_for_ui()
.await?
.iter()
.map(|order| order.into())
.collect();
Ok(Json(orders))
}
#[derive(Serialize, ToSchema)]
pub struct BestQuote {
#[serde(flatten)]
price: Price,
#[serde(with = "rust_decimal::serde::float")]
fee: Decimal,
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, ToSchema)]
pub struct Price {
pub bid: Option<Decimal>,
pub ask: Option<Decimal>,
}
#[utoipa::path(
get,
path = "/api/quotes/{contract_symbol}",
params(
("contract_symbol" = String, Path, description = "Contract symbol, e.g. BtcUsd")
),
responses(
(status = 200, description = "Returns the best quotes for both bids and asks", body = BestQuote)
)
)]
pub async fn get_best_quote(
State(state): State<Arc<AppState>>,
// todo: once we support multiple pairs we should use this
Path(_contract_symbol): Path<ContractSymbol>,
) -> Result<Json<Option<BestQuote>>, AppError> {
let subscribers = &state.subscribers;
let ask_price = subscribers.ask_price();
let bid_price = subscribers.bid_price();
let quotes = BestQuote {
price: Price {
bid: bid_price,
ask: ask_price,
},
fee: dlc::get_order_matching_fee_rate(true),
};
Ok(Json(Some(quotes)))
}
#[derive(Serialize, Default, ToSchema)]
pub struct DlcChannel {
pub dlc_channel_id: Option<String>,
pub contract_id: Option<String>,
pub channel_state: Option<ChannelState>,
pub buffer_txid: Option<String>,
pub settle_txid: Option<String>,
pub claim_txid: Option<String>,
pub close_txid: Option<String>,
pub punish_txid: Option<String>,
pub fund_txid: Option<String>,
pub fund_txout: Option<usize>,
pub fee_rate: Option<u64>,
pub signed_channel_state: Option<SignedChannelState>,
}
#[derive(Serialize, ToSchema)]
pub enum ChannelState {
Offered,
Accepted,
Signed,
Closing,
SettledClosing,
Closed,
CounterClosed,
ClosedPunished,
CollaborativelyClosed,
FailedAccept,
FailedSign,
Cancelled,
}
#[derive(Serialize, ToSchema)]
pub enum SignedChannelState {
Established,
SettledOffered,
SettledReceived,
SettledAccepted,
SettledConfirmed,
Settled,
RenewOffered,
RenewAccepted,
RenewConfirmed,
RenewFinalized,
Closing,
CollaborativeCloseOffered,
}
#[utoipa::path(
get,
path = "/api/channels",
responses(
(status = 200, description = "A list of your dlc channels and their states", body = [DlcChannel])
)
)]
pub async fn get_channels() -> Result<Json<Vec<DlcChannel>>, AppError> {
let channels = dlc::list_dlc_channels()?
.iter()
.map(DlcChannel::from)
.collect();
Ok(Json(channels))
}
impl From<&dlc_manager::channel::Channel> for DlcChannel {
fn from(value: &dlc_manager::channel::Channel) -> Self {
match value {
dlc_manager::channel::Channel::Offered(o) => DlcChannel {
contract_id: Some(hex::encode(o.offered_contract_id)),
channel_state: Some(ChannelState::Offered),
..DlcChannel::default()
},
dlc_manager::channel::Channel::Accepted(a) => DlcChannel {
dlc_channel_id: Some(hex::encode(a.channel_id)),
contract_id: Some(hex::encode(a.accepted_contract_id)),
channel_state: Some(ChannelState::Accepted),
..DlcChannel::default()
},
dlc_manager::channel::Channel::Signed(s) => {
let (signed_channel_state, settle_tx, buffer_tx, close_tx) = match &s.state {
dlc_manager::channel::signed_channel::SignedChannelState::Established {
buffer_transaction,
..
} => (
SignedChannelState::Established,
None,
Some(buffer_transaction),
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::SettledOffered {
..
} => (SignedChannelState::SettledOffered, None, None, None),
dlc_manager::channel::signed_channel::SignedChannelState::SettledReceived {
..
} => (SignedChannelState::SettledReceived, None, None, None),
dlc_manager::channel::signed_channel::SignedChannelState::SettledAccepted {
settle_tx,
..
} => (
SignedChannelState::SettledAccepted,
Some(settle_tx),
None,
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::SettledConfirmed { settle_tx, .. } => (
SignedChannelState::SettledConfirmed,
Some(settle_tx),
None,
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::Settled { settle_tx, .. } => {
(SignedChannelState::Settled, Some(settle_tx), None, None)
}
dlc_manager::channel::signed_channel::SignedChannelState::RenewOffered { .. } => {
(SignedChannelState::RenewOffered, None, None, None)
}
dlc_manager::channel::signed_channel::SignedChannelState::RenewAccepted {
buffer_transaction, ..
} => (
SignedChannelState::RenewAccepted,
None,
Some(buffer_transaction),
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::RenewConfirmed {
buffer_transaction, ..
} => (
SignedChannelState::RenewConfirmed,
None,
Some(buffer_transaction),
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::RenewFinalized {
buffer_transaction, ..
} => (
SignedChannelState::RenewFinalized,
None,
Some(buffer_transaction),
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::Closing {
buffer_transaction, ..
} => (
SignedChannelState::Closing,
None,
Some(buffer_transaction),
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::SettledClosing {
settle_transaction, ..
} => (
SignedChannelState::Closing,
Some(settle_transaction),
None,
None,
),
dlc_manager::channel::signed_channel::SignedChannelState::CollaborativeCloseOffered { close_tx, .. } => (
SignedChannelState::CollaborativeCloseOffered,
None,
None,
Some(close_tx),
),
};
DlcChannel {
dlc_channel_id: Some(hex::encode(s.channel_id)),
contract_id: s.get_contract_id().map(hex::encode),
channel_state: Some(ChannelState::Signed),
fund_txid: Some(s.fund_tx.txid().to_string()),
fund_txout: Some(s.fund_output_index),
fee_rate: Some(s.fee_rate_per_vb),
buffer_txid: buffer_tx.map(|tx| tx.txid().to_string()),
settle_txid: settle_tx.map(|tx| tx.txid().to_string()),
close_txid: close_tx.map(|tx| tx.txid().to_string()),
signed_channel_state: Some(signed_channel_state),
..DlcChannel::default()
}
}
dlc_manager::channel::Channel::Closing(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
contract_id: Some(hex::encode(c.contract_id)),
channel_state: Some(ChannelState::Closing),
buffer_txid: Some(c.buffer_transaction.txid().to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::SettledClosing(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::SettledClosing),
settle_txid: Some(c.settle_transaction.txid().to_string()),
claim_txid: Some(c.claim_transaction.txid().to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::Closed(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::Closed),
close_txid: Some(c.closing_txid.to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::CounterClosed(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::CounterClosed),
close_txid: Some(c.closing_txid.to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::ClosedPunished(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::ClosedPunished),
punish_txid: Some(c.punish_txid.to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::CollaborativelyClosed(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::CollaborativelyClosed),
close_txid: Some(c.closing_txid.to_string()),
..DlcChannel::default()
},
dlc_manager::channel::Channel::FailedAccept(_) => DlcChannel {
channel_state: Some(ChannelState::FailedAccept),
..DlcChannel::default()
},
dlc_manager::channel::Channel::FailedSign(c) => DlcChannel {
dlc_channel_id: Some(hex::encode(c.channel_id)),
channel_state: Some(ChannelState::FailedSign),
..DlcChannel::default()
},
dlc_manager::channel::Channel::Cancelled(o) => DlcChannel {
contract_id: Some(hex::encode(o.offered_contract_id)),
channel_state: Some(ChannelState::Cancelled),
..DlcChannel::default()
},
}
}
}
#[derive(Debug, Deserialize, ToSchema)]
pub struct DeleteChannel {
#[serde(default, deserialize_with = "empty_string_as_none")]
force: Option<bool>,
}
#[utoipa::path(
delete,
path = "/api/channels",
request_body = DeleteChannel,
responses(
(status = 200, description = "Channel successfully closed", body = ())
)
)]
pub async fn close_channel(Query(params): Query<DeleteChannel>) -> Result<(), AppError> {
dlc::close_channel(params.force.unwrap_or_default()).await?;
Ok(())
}
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromStr,
T::Err: fmt::Display,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None | Some("") => Ok(None),
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
}
}
#[derive(Serialize, Copy, Clone, Debug, ToSchema)]
pub struct TradeConstraints {
pub max_local_balance_sats: u64,
pub max_counterparty_balance_sats: u64,
pub coordinator_leverage: f32,
pub min_quantity: u64,
pub is_channel_balance: bool,
pub min_margin_sats: u64,
pub estimated_funding_tx_fee_sats: u64,
pub channel_fee_reserve_sats: u64,
pub max_leverage: u8,
}
#[utoipa::path(
get,
path = "/api/tradeconstraints",
responses(
(status = 200, description = "Returns trade constraints", body = TradeConstraints)
)
)]
pub async fn get_trade_constraints() -> Result<Json<TradeConstraints>, AppError> {
let trade_constraints = channel_trade_constraints::channel_trade_constraints()?;
let ten_one_config = try_get_tentenone_config().context("Could not read 10101 config")?;
let fee = dlc::estimated_funding_tx_fee()?;
let channel_fee_reserve = dlc::estimated_fee_reserve()?;
Ok(Json(TradeConstraints {
max_local_balance_sats: trade_constraints.max_local_balance_sats,
max_counterparty_balance_sats: trade_constraints.max_counterparty_balance_sats,
coordinator_leverage: trade_constraints.coordinator_leverage,
min_quantity: trade_constraints.min_quantity,
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | true |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/auth.rs | webapp/src/auth.rs | use axum::async_trait;
use axum::routing::get;
use axum::routing::post;
use axum::Router;
use axum_login::AuthUser;
use axum_login::AuthnBackend;
use axum_login::UserId;
use serde::Deserialize;
use sha2::digest::FixedOutput;
use sha2::Digest;
use sha2::Sha256;
use std::error::Error;
use std::fmt::Display;
use std::fmt::Formatter;
use utoipa::ToSchema;
#[derive(Clone)]
pub struct Backend {
pub(crate) hashed_password: String,
}
#[derive(Clone, Debug)]
pub struct User {
password: String,
}
#[derive(Clone, Deserialize, ToSchema)]
pub struct Credentials {
pub password: String,
}
#[derive(std::fmt::Debug)]
pub struct BackendError(String);
impl Display for BackendError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.to_string().fmt(f)
}
}
impl Error for BackendError {}
#[async_trait]
impl AuthnBackend for Backend {
type User = User;
type Credentials = Credentials;
type Error = BackendError;
async fn authenticate(
&self,
creds: Self::Credentials,
) -> Result<Option<Self::User>, Self::Error> {
let mut hasher = Sha256::new();
hasher.update(creds.password.as_bytes());
let hashed_password = hex::encode(hasher.finalize_fixed());
let user = match hashed_password == self.hashed_password {
true => Some(User {
password: self.hashed_password.clone(),
}),
false => None,
};
Ok(user)
}
async fn get_user(&self, _user_id: &UserId<Self>) -> Result<Option<Self::User>, Self::Error> {
Ok(Some(User {
password: self.hashed_password.clone(),
}))
}
}
impl AuthUser for User {
type Id = u64;
fn id(&self) -> Self::Id {
0
}
fn session_auth_hash(&self) -> &[u8] {
self.password.as_bytes()
}
}
pub fn router() -> Router {
Router::new()
.route("/api/login", post(post::login))
.route("/api/logout", get(get::logout))
}
pub mod post {
use super::*;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use axum::Json;
use axum_login::AuthSession;
#[utoipa::path(
post,
path = "/api/login",
request_body = Credentials,
responses(
(status = 200, description = "If login was successful", body = ())
)
)]
pub async fn login(
mut auth_session: AuthSession<Backend>,
creds: Json<Credentials>,
) -> impl IntoResponse {
let user = match auth_session.authenticate(creds.0).await {
Ok(Some(user)) => user,
Ok(None) => {
return StatusCode::UNAUTHORIZED.into_response();
}
Err(_) => return StatusCode::INTERNAL_SERVER_ERROR.into_response(),
};
if auth_session.login(&user).await.is_err() {
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
StatusCode::OK.into_response()
}
}
pub mod get {
use crate::api::AppError;
use crate::auth::Backend;
use axum_login::AuthSession;
#[utoipa::path(
get,
path = "/api/logout",
responses(
(status = 200, description = "If logout was successful", body = ())
)
)]
pub async fn logout(mut auth_session: AuthSession<Backend>) -> Result<(), AppError> {
auth_session.logout().await?;
Ok(())
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/subscribers.rs | webapp/src/subscribers.rs | use native::event::api::WalletInfo;
use native::event::subscriber::Subscriber;
use native::event::EventInternal;
use native::event::EventType;
use parking_lot::Mutex;
use rust_decimal::Decimal;
use std::sync::Arc;
use tokio::sync::watch;
pub struct Senders {
wallet_info: watch::Sender<Option<WalletInfo>>,
ask_price_info: watch::Sender<Option<Decimal>>,
bid_price_info: watch::Sender<Option<Decimal>>,
}
/// Subscribes to events destined for the frontend (typically Flutter app) and
/// provides a convenient way to access the current state.
pub struct AppSubscribers {
wallet_info: watch::Receiver<Option<WalletInfo>>,
ask_price_info: watch::Receiver<Option<Decimal>>,
bid_price_info: watch::Receiver<Option<Decimal>>,
}
impl AppSubscribers {
pub async fn new() -> (Self, ThreadSafeSenders) {
let (wallet_info_tx, wallet_info_rx) = watch::channel(None);
let (ask_price_info_tx, ask_price_info_rx) = watch::channel(None);
let (bid_price_info_tx, bid_price_info_rx) = watch::channel(None);
let senders = Senders {
wallet_info: wallet_info_tx,
ask_price_info: ask_price_info_tx,
bid_price_info: bid_price_info_tx,
};
let subscriber = Self {
wallet_info: wallet_info_rx,
ask_price_info: ask_price_info_rx,
bid_price_info: bid_price_info_rx,
};
(subscriber, ThreadSafeSenders(Arc::new(Mutex::new(senders))))
}
pub fn wallet_info(&self) -> Option<WalletInfo> {
self.wallet_info.borrow().as_ref().cloned()
}
pub fn ask_price(&self) -> Option<Decimal> {
self.ask_price_info.borrow().as_ref().cloned()
}
pub fn bid_price(&self) -> Option<Decimal> {
self.bid_price_info.borrow().as_ref().cloned()
}
}
impl Subscriber for Senders {
fn notify(&self, event: &EventInternal) {
if let Err(e) = self.handle_event(event) {
tracing::error!(?e, ?event, "Failed to handle event");
}
}
fn events(&self) -> Vec<EventType> {
vec![
EventType::Init,
EventType::WalletInfoUpdateNotification,
EventType::OrderUpdateNotification,
EventType::PositionUpdateNotification,
EventType::PositionClosedNotification,
EventType::AskPriceUpdateNotification,
EventType::BidPriceUpdateNotification,
EventType::ServiceHealthUpdate,
EventType::ChannelStatusUpdate,
]
}
}
impl Senders {
fn handle_event(&self, event: &EventInternal) -> anyhow::Result<()> {
tracing::trace!(?event, "Received event");
if let EventInternal::WalletInfoUpdateNotification(wallet_info) = event {
self.wallet_info.send(Some(wallet_info.clone()))?;
}
if let EventInternal::AskPriceUpdateNotification(price) = event {
self.ask_price_info.send(Some(*price))?;
}
if let EventInternal::BidPriceUpdateNotification(price) = event {
self.bid_price_info.send(Some(*price))?;
}
Ok(())
}
}
#[derive(Clone)]
pub struct ThreadSafeSenders(Arc<Mutex<Senders>>);
impl Subscriber for ThreadSafeSenders {
fn notify(&self, event: &EventInternal) {
self.0.lock().notify(event)
}
fn events(&self) -> Vec<EventType> {
self.0.lock().events()
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/webapp/src/main.rs | webapp/src/main.rs | mod api;
mod auth;
mod cli;
mod logger;
mod session;
mod subscribers;
use crate::api::version;
use crate::auth::Backend;
use crate::cli::Opts;
use crate::session::InMemorySessionStore;
use crate::subscribers::AppSubscribers;
use anyhow::Context;
use anyhow::Result;
use axum::http::header;
use axum::http::Request;
use axum::http::StatusCode;
use axum::http::Uri;
use axum::response::Html;
use axum::response::IntoResponse;
use axum::response::Response;
use axum::routing::get;
use axum::Router;
use axum_login::login_required;
use axum_login::tower_sessions::Expiry;
use axum_login::tower_sessions::SessionManagerLayer;
use axum_login::AuthManagerLayerBuilder;
use bitcoin::Network;
use hyper::body::Incoming;
use hyper_util::rt::TokioExecutor;
use hyper_util::rt::TokioIo;
use rust_embed::RustEmbed;
use rustls_pemfile::certs;
use rustls_pemfile::pkcs8_private_keys;
use std::fs::File;
use std::io::BufReader;
use std::net::SocketAddr;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use tokio_rustls::rustls::Certificate;
use tokio_rustls::rustls::PrivateKey;
use tokio_rustls::rustls::ServerConfig;
use tokio_rustls::TlsAcceptor;
use tower::Service;
use tower_http::classify::ServerErrorsFailureClass;
use tower_http::cors::CorsLayer;
use tower_http::trace::TraceLayer;
use tracing::level_filters::LevelFilter;
use tracing::Span;
use utoipa::OpenApi;
use utoipa_rapidoc::RapiDoc;
use utoipa_redoc::Redoc;
use utoipa_redoc::Servable;
use utoipa_swagger_ui::SwaggerUi;
#[tokio::main]
async fn main() -> Result<()> {
logger::init_tracing(LevelFilter::DEBUG, false, false)?;
let opts = Opts::read();
let network = opts.network();
let data_dir = opts.data_dir()?;
let data_dir = data_dir.join(network.to_string());
if !data_dir.exists() {
std::fs::create_dir_all(&data_dir)
.context(format!("Could not create data dir for {network}"))?;
}
let data_dir = data_dir.clone().to_string_lossy().to_string();
tracing::info!("Data-dir: {data_dir:?}");
let cert_dir = opts.cert_dir()?;
tracing::info!("Cert-dir: {cert_dir:?}");
let coordinator_endpoint = opts.coordinator_endpoint()?;
let coordinator_p2p_port = opts.coordinator_p2p_port()?;
let coordinator_pubkey = opts.coordinator_pubkey()?;
let oracle_endpoint = opts.oracle_endpoint()?;
let oracle_pubkey = opts.oracle_pubkey()?;
let password = opts.password();
let coordinator_http_port = opts.coordinator_http_port;
let electrs_endpoint = opts.electrs;
let secure = opts.secure;
let meme_endpoint = opts.meme_endpoint;
let config = native::config::api::Config {
coordinator_pubkey,
electrs_endpoint,
host: coordinator_endpoint,
p2p_port: coordinator_p2p_port,
http_port: coordinator_http_port,
network: network.to_string(),
oracle_endpoint,
oracle_pubkey,
health_check_interval_secs: 60,
meme_endpoint,
};
let seed_dir = data_dir.clone();
native::api::set_config(config, data_dir.clone(), seed_dir.clone()).expect("to set config");
let _handle = tokio::task::spawn_blocking({
let seed_dir = seed_dir.clone();
move || native::api::run_in_test(seed_dir).expect("to start backend")
})
.await;
let (rx, tx) = AppSubscribers::new().await;
native::event::subscribe(tx);
let session_store = InMemorySessionStore::new();
let deletion_task = tokio::task::spawn(
session_store
.clone()
.continuously_delete_expired(Duration::from_secs(60)),
);
let session_layer = SessionManagerLayer::new(session_store.clone())
.with_secure(matches!(network, Network::Bitcoin))
.with_expiry(Expiry::OnInactivity(time::Duration::hours(1)));
let auth_layer = AuthManagerLayerBuilder::new(
Backend {
hashed_password: password,
},
session_layer,
)
.build();
let app_state = AppState {
whitelist_withdrawal_addresses: opts.whitelist_withdrawal_addresses,
withdrawal_addresses: opts.withdrawal_address,
subscribers: Arc::new(rx),
};
let app = api::router(app_state)
.route_layer(login_required!(Backend))
.merge(auth::router())
.merge(router(network))
.layer(auth_layer);
// run https server
let addr = SocketAddr::from(([0, 0, 0, 0], 3001));
tracing::debug!("listening on {}", addr);
match secure {
false => {
let listener = tokio::net::TcpListener::bind(addr).await?;
axum::serve(listener, app.into_make_service()).await
}
true => {
// configure certificate and private key used by https
let rustls_config =
rustls_server_config(cert_dir.join("key.pem"), cert_dir.join("cert.pem"))?;
let tls_acceptor = TlsAcceptor::from(rustls_config);
let listener = tokio::net::TcpListener::bind(addr).await?;
loop {
let tower_service = app.clone();
let tls_acceptor = tls_acceptor.clone();
// Wait for new tcp connection
let (cnx, addr) = listener.accept().await?;
tokio::spawn(async move {
// Wait for tls handshake to happen
let Ok(stream) = tls_acceptor.accept(cnx).await else {
tracing::error!("error during tls handshake connection from {}", addr);
return;
};
// Hyper has its own `AsyncRead` and `AsyncWrite` traits and doesn't use tokio.
// `TokioIo` converts between them.
let stream = TokioIo::new(stream);
// Hyper also has its own `Service` trait and doesn't use tower. We can use
// `hyper::service::service_fn` to create a hyper `Service` that calls our app
// through `tower::Service::call`.
let hyper_service =
hyper::service::service_fn(move |request: Request<Incoming>| {
// We have to clone `tower_service` because hyper's `Service` uses
// `&self` whereas tower's `Service`
// requires `&mut self`.
//
// We don't need to call `poll_ready` since `Router` is always ready.
tower_service.clone().call(request)
});
let ret = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new())
.serve_connection_with_upgrades(stream, hyper_service)
.await;
if let Err(err) = ret {
tracing::warn!("error serving connection from {}: {}", addr, err);
}
});
}
}
}?;
deletion_task.await??;
Ok(())
}
pub struct AppState {
pub whitelist_withdrawal_addresses: bool,
pub withdrawal_addresses: Vec<String>,
pub subscribers: Arc<AppSubscribers>,
}
fn router(network: Network) -> Router {
#[derive(OpenApi)]
#[openapi(
paths(
auth::post::login,
auth::get::logout,
api::version,
api::get_balance,
api::get_unused_address,
api::send_payment,
api::get_onchain_payment_history,
api::get_orders,
api::post_new_order,
api::get_positions,
api::get_best_quote,
api::get_node_id,
api::post_sync,
api::get_seed_phrase,
api::get_channels,
api::close_channel,
api::get_trade_constraints,
),
components(schemas(
auth::Credentials,
api::AppError,
api::Version,
api::Balance,
api::OnChainPayment,
api::Payment,
api::Seed,
api::OrderId,
api::NewOrderParams,
api::Position,
api::Order,
api::BestQuote,
api::DlcChannel,
api::DeleteChannel,
api::TradeConstraints,
api::Price,
api::ContractSymbol,
api::Direction,
api::PositionState,
api::OrderType,
api::OrderState,
api::ChannelState,
api::SignedChannelState,
))
)]
struct ApiDoc;
let router = Router::new()
.route("/", get(index_handler))
.route("/main.dart.js", get(main_dart_handler))
.route("/flutter.js", get(flutter_js))
.route("/index.html", get(index_handler))
.route("/assets/*file", get(static_handler))
.route("/api/version", get(version))
.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", ApiDoc::openapi()))
.merge(Redoc::with_url("/redoc", ApiDoc::openapi()))
.merge(RapiDoc::new("/api-docs/openapi.json").path("/rapidoc"))
.fallback_service(get(not_found))
.layer(
TraceLayer::new_for_http()
.on_request(|request: &Request<axum::body::Body>, _span: &Span| {
tracing::debug!(
method = request.method().to_string(),
uri = request.uri().path(),
"request"
)
})
.on_response(())
.on_body_chunk(())
.on_eos(())
.on_failure(
|error: ServerErrorsFailureClass, _latency: Duration, _span: &Span| {
tracing::error!("something went wrong : {error:#}")
},
),
);
if matches!(network, Network::Bitcoin) {
router
} else {
router.layer(CorsLayer::very_permissive())
}
}
// We use static route matchers ("/" and "/index.html") to serve our home
// page.
async fn index_handler() -> impl IntoResponse {
let result = "/index.html".parse::<Uri>().expect("to be a valid uri");
static_handler(result).await
}
// We use static route matchers ("/main_dart.js") to serve our js library
async fn main_dart_handler() -> impl IntoResponse {
static_handler("/main.dart.js".parse::<Uri>().expect("to be a valid uri")).await
}
// We use static route matchers ("/flutter.js") to serve our js library
async fn flutter_js() -> impl IntoResponse {
static_handler("/flutter.js".parse::<Uri>().expect("to be a valid uri")).await
}
// We use a wildcard matcher ("/dist/*file") to match against everything
// within our defined assets directory. This is the directory on our Asset
// struct below, where folder = "examples/public/".
async fn static_handler(uri: Uri) -> impl IntoResponse {
let path = uri.path().trim_start_matches('/').to_string();
StaticFile(path)
}
// Finally, we use a fallback route for anything that didn't match.
async fn not_found() -> Html<&'static str> {
Html("<h1>404</h1><p>Not Found</p>")
}
#[derive(RustEmbed)]
#[folder = "frontend/build/web"]
struct Asset;
pub struct StaticFile<T>(pub T);
impl<T> IntoResponse for StaticFile<T>
where
T: Into<String>,
{
fn into_response(self) -> Response {
let path = self.0.into();
match Asset::get(path.as_str()) {
Some(content) => {
let mime = mime_guess::from_path(path).first_or_octet_stream();
([(header::CONTENT_TYPE, mime.as_ref())], content.data).into_response()
}
None => (StatusCode::NOT_FOUND, "404 Not Found").into_response(),
}
}
}
fn rustls_server_config(
key: impl AsRef<Path>,
cert: impl AsRef<Path>,
) -> Result<Arc<ServerConfig>> {
let mut key_reader = BufReader::new(File::open(key)?);
let mut cert_reader = BufReader::new(File::open(cert)?);
let key = PrivateKey(pkcs8_private_keys(&mut key_reader)?.remove(0));
let certs = certs(&mut cert_reader)?
.into_iter()
.map(Certificate)
.collect();
let mut config = ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs, key)
.expect("bad certificate/key");
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
Ok(Arc::new(config))
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/lib.rs | mobile/native/src/lib.rs | // These modules need to be define at the top so that FRB doesn't try to import from them.
pub mod api;
pub mod calculations;
pub mod channel_trade_constraints;
pub mod commons;
pub mod config;
pub mod db;
pub mod dlc;
pub mod event;
pub mod health;
pub mod logger;
pub mod schema;
pub mod state;
pub mod trade;
pub mod watcher;
mod backup;
mod cipher;
mod destination;
mod dlc_channel;
mod emergency_kit;
mod max_quantity;
mod names;
mod orderbook;
mod polls;
mod report_error;
mod storage;
pub use dlc::get_maintenance_margin_rate;
pub use report_error::report_error_to_coordinator;
#[allow(
clippy::all,
clippy::unwrap_used,
unused_import_braces,
unused_qualifications
)]
mod bridge_generated;
mod hodl_invoice;
mod position;
mod unfunded_channel_opening_order;
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/names.rs | mobile/native/src/names.rs | pub fn get_new_name() -> String {
let names = petname::Petnames::default();
let input = names.generate_one(2, " ");
return uppercase_first_characters(input.as_str(), ' ');
}
fn uppercase_first_characters(input: &str, separator: char) -> String {
let mut parts = input.splitn(2, separator);
if let (Some(first), Some(second)) = (parts.next(), parts.next()) {
return format!(
"{} {}",
uppercase_first_character(first),
uppercase_first_character(second)
);
}
input.to_string()
}
fn uppercase_first_character(s: &str) -> String {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/cipher.rs | mobile/native/src/cipher.rs | use aes_gcm_siv::AeadInPlace;
use aes_gcm_siv::Aes256GcmSiv;
use aes_gcm_siv::KeyInit;
use aes_gcm_siv::Nonce;
use anyhow::anyhow;
use anyhow::Result;
use bitcoin::secp256k1::ecdsa::Signature;
use bitcoin::secp256k1::rand;
use bitcoin::secp256k1::rand::Rng;
use bitcoin::secp256k1::PublicKey;
use bitcoin::secp256k1::SecretKey;
use bitcoin::secp256k1::SECP256K1;
use xxi_node::commons;
#[derive(Clone)]
pub struct AesCipher {
secret_key: SecretKey,
inner: Aes256GcmSiv,
}
impl AesCipher {
pub fn new(secret_key: SecretKey) -> Self {
let cipher = Aes256GcmSiv::new_from_slice(secret_key.secret_bytes().as_slice())
.expect("secret key to have correct key size");
Self {
secret_key,
inner: cipher,
}
}
pub fn encrypt(&self, value: Vec<u8>) -> Result<Vec<u8>> {
let nonce = generate_nonce();
let nonce = Nonce::from_slice(&nonce);
let mut buffer: Vec<u8> = vec![];
buffer.extend_from_slice(value.as_slice());
// Encrypt `buffer` in-place, replacing the plaintext contents with ciphertext
self.inner
.encrypt_in_place(nonce, b"", &mut buffer)
.map_err(|e| anyhow!("{e:#}"))?;
let mut cipher_text = nonce.to_vec();
cipher_text.extend_from_slice(buffer.as_slice());
Ok(cipher_text)
}
pub fn decrypt(&self, value: Vec<u8>) -> Result<Vec<u8>> {
let nonce = Nonce::from_slice(&value[0..12]);
let mut buffer: Vec<u8> = Vec::new();
buffer.extend_from_slice(&value[12..]);
// Decrypt `buffer` in-place, replacing its ciphertext context with the original plaintext
self.inner
.decrypt_in_place(nonce, b"", &mut buffer)
.map_err(|e| anyhow!("{e:#}"))?;
Ok(buffer.to_vec())
}
pub fn sign(&self, value: Vec<u8>) -> Result<Signature> {
let message = commons::create_sign_message(value);
Ok(self.secret_key.sign_ecdsa(message))
}
pub fn public_key(&self) -> PublicKey {
self.secret_key.public_key(SECP256K1)
}
}
fn generate_nonce() -> [u8; 12] {
let mut rng = rand::thread_rng();
let mut nonce = [0u8; 12];
rng.fill(&mut nonce);
nonce
}
#[cfg(test)]
mod tests {
use crate::cipher::AesCipher;
use bitcoin::secp256k1;
use bitcoin::secp256k1::SecretKey;
use bitcoin::secp256k1::SECP256K1;
use xxi_node::commons;
#[test]
fn cipher_backup_value() {
let secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng());
let cipher = AesCipher::new(secret_key);
let message = b"10101";
let encrypted_message = cipher.encrypt(message.to_vec()).unwrap();
assert_ne!(encrypted_message, message);
let decrypted_message = cipher.decrypt(encrypted_message).unwrap();
assert_eq!(decrypted_message, message);
}
#[test]
fn sign_backup_value() {
let secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng());
let cipher = AesCipher::new(secret_key);
let message = b"10101";
let signature = cipher.sign(message.to_vec()).unwrap();
let message = commons::create_sign_message(message.to_vec());
signature
.verify(&message, &secret_key.public_key(SECP256K1))
.unwrap()
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/logger.rs | mobile/native/src/logger.rs | use anyhow::Context;
use anyhow::Result;
use flutter_rust_bridge::StreamSink;
use std::collections::BTreeMap;
use std::sync::Arc;
use std::sync::Once;
use tracing_log::LogTracer;
use tracing_subscriber::filter::Directive;
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::time;
use tracing_subscriber::fmt::time::UtcTime;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::EnvFilter;
use tracing_subscriber::Layer;
const RUST_LOG_ENV: &str = "RUST_LOG";
static INIT_LOGGER_ONCE: Once = Once::new();
// Tracing log directives config
pub fn log_base_directives(env: EnvFilter, level: LevelFilter) -> Result<EnvFilter> {
let filter = env
.add_directive(Directive::from(level))
.add_directive("hyper=warn".parse()?)
.add_directive("sqlx=warn".parse()?) // sqlx logs all queries on INFO
.add_directive("reqwest=warn".parse()?)
.add_directive("rustls=warn".parse()?)
// set to debug to show ldk logs (they're also in logs.txt)
.add_directive("sled=warn".parse()?)
.add_directive("bdk=warn".parse()?) // bdk is quite spamy on debug
.add_directive("lightning_transaction_sync=warn".parse()?)
.add_directive("lightning::ln::peer_handler=debug".parse()?)
.add_directive("lightning=trace".parse()?)
.add_directive("ureq=info".parse()?);
Ok(filter)
}
/// Struct to expose logs from Rust to Flutter
pub struct LogEntry {
pub msg: String,
pub target: String,
pub level: String,
pub file: String,
pub line: String,
pub module_path: String,
pub data: String,
}
pub fn create_log_stream(sink: StreamSink<LogEntry>) {
crate::state::set_log_stream_sink(Arc::new(sink));
INIT_LOGGER_ONCE.call_once(|| {
init_tracing(LevelFilter::DEBUG, false).expect("Logger to initialise");
});
}
/// Tracing layer responsible for sending tracing events into
struct DartSendLayer;
impl<S> Layer<S> for DartSendLayer
where
S: tracing::Subscriber,
{
fn on_event(
&self,
event: &tracing::Event<'_>,
_ctx: tracing_subscriber::layer::Context<'_, S>,
) {
let mut fields = BTreeMap::new();
let mut visitor = Visitor(&mut fields);
event.record(&mut visitor);
let target = fields.remove("log.target").unwrap_or("".to_string());
let msg = fields.remove("message").unwrap_or("".to_string());
let file = fields.remove("log.file").unwrap_or("".to_string());
let line = fields.remove("log.line").unwrap_or("".to_string());
let module_path = fields.remove("log.module_path").unwrap_or("".to_string());
let data = fields
.iter()
.map(|field| format!("{}: {}", field.0, field.1))
.collect::<Vec<String>>()
.join(",");
crate::state::try_get_log_stream_sink()
.expect("StreamSink from Flutter to be initialised")
.add(LogEntry {
msg,
target,
level: event.metadata().level().to_string(),
file,
line,
module_path,
data,
});
}
}
struct Visitor<'a>(&'a mut BTreeMap<String, String>);
impl<'a> tracing::field::Visit for Visitor<'a> {
fn record_f64(&mut self, field: &tracing::field::Field, value: f64) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_i64(&mut self, field: &tracing::field::Field, value: i64) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_u64(&mut self, field: &tracing::field::Field, value: u64) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_bool(&mut self, field: &tracing::field::Field, value: bool) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_str(&mut self, field: &tracing::field::Field, value: &str) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_error(
&mut self,
field: &tracing::field::Field,
value: &(dyn std::error::Error + 'static),
) {
self.0.insert(field.name().to_string(), value.to_string());
}
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) {
self.0
.insert(field.name().to_string(), format!("{value:?}"));
}
}
// Configure and initialise tracing subsystem
pub fn init_tracing(level: LevelFilter, json_format: bool) -> Result<()> {
if level == LevelFilter::OFF {
return Ok(());
}
// Parse additional log directives from env variable
let filter = match std::env::var_os(RUST_LOG_ENV).map(|s| s.into_string()) {
Some(Ok(env)) => {
let mut filter = log_base_directives(EnvFilter::new(""), level)?;
for directive in env.split(',') {
#[allow(clippy::print_stdout)]
match directive.parse() {
Ok(d) => filter = filter.add_directive(d),
Err(e) => println!("WARN ignoring log directive: `{directive}`: {e}"),
};
}
filter
}
_ => log_base_directives(EnvFilter::from_env(RUST_LOG_ENV), level)?,
};
let fmt_layer = tracing_subscriber::fmt::layer().with_writer(std::io::stderr);
let fmt_layer = if json_format {
fmt_layer.json().with_timer(UtcTime::rfc_3339()).boxed()
} else {
fmt_layer.with_timer(time::UtcTime::rfc_3339()).boxed()
};
tracing_subscriber::registry()
.with(filter)
.with(DartSendLayer)
.with(fmt_layer)
.try_init()
.context("Failed to init tracing")?;
LogTracer::init()?;
tracing::info!("Initialized logger");
Ok(())
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/emergency_kit.rs | mobile/native/src/emergency_kit.rs | use crate::calculations::calculate_liquidation_price;
use crate::config;
use crate::db;
use crate::db::connection;
use crate::dlc;
use crate::event;
use crate::event::EventInternal;
use crate::get_maintenance_margin_rate;
use crate::state::get_node;
use crate::trade::position::Position;
use crate::trade::position::PositionState;
use anyhow::bail;
use anyhow::ensure;
use anyhow::Context;
use anyhow::Result;
use bitcoin::secp256k1::SecretKey;
use bitcoin::Amount;
use dlc_manager::channel::signed_channel::SignedChannelState;
use dlc_manager::contract::Contract;
use dlc_manager::DlcChannelId;
use dlc_manager::Signer;
use dlc_messages::channel::SettleFinalize;
use hex::FromHex;
use lightning::ln::chan_utils::build_commitment_secret;
use time::OffsetDateTime;
use xxi_node::bitcoin_conversion::to_secp_sk_29;
use xxi_node::commons::ContractSymbol;
use xxi_node::commons::OrderReason;
use xxi_node::message_handler::TenTenOneMessage;
use xxi_node::message_handler::TenTenOneSettleFinalize;
use xxi_node::node::event::NodeEvent;
pub fn set_filling_orders_to_failed() -> Result<()> {
tracing::warn!("Executing emergency kit! Setting orders in state Filling to Failed!");
let mut conn = connection()?;
db::models::Order::set_all_filling_orders_to_failed(&mut conn)
}
pub fn delete_dlc_channel(dlc_channel_id: String) -> Result<()> {
tracing::warn!(
dlc_channel_id,
"Executing emergency kit! Deleting dlc channel"
);
let dlc_channel_id = DlcChannelId::from_hex(dlc_channel_id)?;
dlc::delete_dlc_channel(&dlc_channel_id)
}
pub fn delete_position() -> Result<()> {
tracing::warn!("Executing emergency kit! Deleting position!");
db::delete_positions()?;
event::publish(&EventInternal::PositionCloseNotification(
ContractSymbol::BtcUsd,
));
Ok(())
}
pub fn recreate_position() -> Result<()> {
tracing::warn!("Executing emergency kit! Recreating position!");
let node = get_node();
let counterparty = config::get_coordinator_info().pubkey;
let channel = node.inner.get_signed_channel_by_trader_id(counterparty)?;
ensure!(
matches!(channel.state, SignedChannelState::Established { .. }),
"A position can only be recreated from an established signed channel state"
);
let positions = db::get_positions()?;
let position = positions.first();
ensure!(
position.is_none(),
"Can't recreate a position if there is already a position"
);
let order = db::get_last_failed_order()?.context("Couldn't find last failed order!")?;
let average_entry_price = order.execution_price().context("Missing execution price")?;
tracing::debug!("Creating position from established signed dlc channel and last failed order");
let contract_id = channel.get_contract_id().context("Missing contract id")?;
let contract = node
.inner
.get_contract_by_id(&contract_id)?
.context("Missing contract")?;
let (collateral, expiry) = match contract {
Contract::Signed(contract) | Contract::Confirmed(contract) => {
let trader_reserve = node
.inner
.get_dlc_channel_usable_balance(&channel.channel_id)?;
let oracle_event = &contract
.accepted_contract
.offered_contract
.contract_info
.first()
.context("missing contract info")?
.oracle_announcements
.first()
.context("missing oracle info")?
.oracle_event;
let expiry_timestamp =
OffsetDateTime::from_unix_timestamp(oracle_event.event_maturity_epoch as i64)?;
(
contract.accepted_contract.accept_params.collateral - trader_reserve.to_sat(),
expiry_timestamp,
)
}
_ => {
bail!("Contract in unexpected state: {:?}", contract);
}
};
let maintenance_margin_rate = get_maintenance_margin_rate();
let liquidation_price = calculate_liquidation_price(
average_entry_price,
order.leverage,
order.direction,
maintenance_margin_rate,
);
let position = Position {
leverage: order.leverage,
quantity: order.quantity,
contract_symbol: order.contract_symbol,
direction: order.direction,
average_entry_price,
liquidation_price,
position_state: PositionState::Open,
collateral,
expiry,
updated: OffsetDateTime::now_utc(),
created: OffsetDateTime::now_utc(),
stable: false,
order_matching_fees: order.matching_fee().unwrap_or(Amount::ZERO),
};
db::insert_position(position)?;
event::publish(&EventInternal::PositionUpdateNotification(position));
Ok(())
}
pub fn resend_settle_finalize_message() -> Result<()> {
tracing::warn!("Executing emergency kit! Resending settle finalize message");
let coordinator_pubkey = config::get_coordinator_info().pubkey;
let node = get_node();
let signed_channel = node
.inner
.get_signed_channel_by_trader_id(coordinator_pubkey)?;
ensure!(
matches!(signed_channel.state, SignedChannelState::Settled { .. }),
"Signed channel state must be settled to resend settle finalize message!"
);
let per_update_seed_pk = signed_channel.own_per_update_seed;
let per_update_seed = node
.inner
.dlc_wallet
.get_secret_key_for_pubkey(&per_update_seed_pk)?;
let prev_per_update_secret = SecretKey::from_slice(&build_commitment_secret(
per_update_seed.as_ref(),
signed_channel.update_idx + 1,
))?;
// We assume the relevant order to be in filling.
let order = db::get_order_in_filling()?.context("Couldn't find order in filling")?;
let msg = TenTenOneMessage::SettleFinalize(TenTenOneSettleFinalize {
order_id: order.id,
order_reason: OrderReason::Manual,
settle_finalize: SettleFinalize {
channel_id: signed_channel.channel_id,
prev_per_update_secret: to_secp_sk_29(prev_per_update_secret),
reference_id: signed_channel.reference_id,
},
});
node.inner.event_handler.publish(NodeEvent::SendDlcMessage {
peer: coordinator_pubkey,
msg: msg.clone(),
});
Ok(())
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/report_error.rs | mobile/native/src/report_error.rs | use crate::commons::reqwest_client;
use crate::config;
use crate::state::get_node;
use crate::state::get_or_create_tokio_runtime;
use reqwest::Url;
pub fn report_error_to_coordinator<E: ToString>(error: &E) {
let version = env!("CARGO_PKG_VERSION").to_string();
let client = reqwest_client();
let pk = get_node().inner.info.pubkey;
let url = Url::parse(&format!("http://{}", config::get_http_endpoint()))
.expect("valid URL")
.join("/api/report-error")
.expect("valid URL");
let error_string = error.to_string();
match get_or_create_tokio_runtime() {
Ok(runtime) => {
runtime.spawn(async move {
if let Err(e) = client
.post(url)
.json(&xxi_node::commons::ReportedError {
trader_pk: pk,
msg: error_string,
version: Some(version),
})
.send()
.await
{
tracing::error!("Failed to report error to coordinator: {e}");
}
});
}
Err(e) => {
tracing::error!("Failed to report error to coordinator, missing runtime: {e}");
}
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/state.rs | mobile/native/src/state.rs | use crate::config::ConfigInternal;
use crate::dlc::node::Node;
use crate::logger::LogEntry;
use crate::storage::TenTenOneNodeStorage;
use anyhow::Result;
use flutter_rust_bridge::StreamSink;
use parking_lot::RwLock;
use state::Storage;
use std::sync::Arc;
use tokio::runtime::Runtime;
use tokio::sync::broadcast::Sender;
use xxi_node::commons::OrderbookRequest;
use xxi_node::commons::TenTenOneConfig;
use xxi_node::seed::Bip39Seed;
/// For testing we need the state to be mutable as otherwise we can't start another app after
/// stopping the first one. Note, running two apps at the same time will not work as the states
/// below are static and will be used for both apps.
/// TODO(holzeis): Check if there is a way to bind the state to the lifetime of the app (node).
static CONFIG: Storage<RwLock<ConfigInternal>> = Storage::new();
static NODE: Storage<RwLock<Arc<Node>>> = Storage::new();
static SEED: Storage<RwLock<Bip39Seed>> = Storage::new();
static STORAGE: Storage<RwLock<TenTenOneNodeStorage>> = Storage::new();
static RUNTIME: Storage<Runtime> = Storage::new();
static WEBSOCKET: Storage<RwLock<Sender<OrderbookRequest>>> = Storage::new();
static LOG_STREAM_SINK: Storage<RwLock<Arc<StreamSink<LogEntry>>>> = Storage::new();
static TENTENONE_CONFIG: Storage<RwLock<TenTenOneConfig>> = Storage::new();
static LN_PAYMENT_WATCHER: Storage<RwLock<Sender<String>>> = Storage::new();
pub fn set_config(config: ConfigInternal) {
match CONFIG.try_get() {
Some(c) => *c.write() = config,
None => {
CONFIG.set(RwLock::new(config));
}
}
}
pub fn get_config() -> ConfigInternal {
CONFIG.get().read().clone()
}
pub fn set_node(node: Arc<Node>) {
match NODE.try_get() {
Some(n) => *n.write() = node,
None => {
NODE.set(RwLock::new(node));
}
}
}
pub fn get_node() -> Arc<Node> {
NODE.get().read().clone()
}
pub fn try_get_node() -> Option<Arc<Node>> {
NODE.try_get().map(|n| n.read().clone())
}
pub fn set_seed(seed: Bip39Seed) {
match SEED.try_get() {
Some(s) => *s.write() = seed,
None => {
SEED.set(RwLock::new(seed));
}
}
}
pub fn get_seed() -> Bip39Seed {
SEED.get().read().clone()
}
pub fn try_get_seed() -> Option<Bip39Seed> {
SEED.try_get().map(|s| s.read().clone())
}
pub fn set_storage(storage: TenTenOneNodeStorage) {
match STORAGE.try_get() {
Some(s) => *s.write() = storage,
None => {
STORAGE.set(RwLock::new(storage));
}
}
}
pub fn get_storage() -> TenTenOneNodeStorage {
STORAGE.get().read().clone()
}
pub fn try_get_storage() -> Option<TenTenOneNodeStorage> {
STORAGE.try_get().map(|s| s.read().clone())
}
/// Lazily creates a multi threaded runtime with the the number of worker threads corresponding to
/// the number of available cores.
pub fn get_or_create_tokio_runtime() -> Result<&'static Runtime> {
if RUNTIME.try_get().is_none() {
let runtime = Runtime::new()?;
RUNTIME.set(runtime);
}
Ok(RUNTIME.get())
}
pub fn set_websocket(websocket: Sender<OrderbookRequest>) {
match WEBSOCKET.try_get() {
Some(s) => *s.write() = websocket,
None => {
WEBSOCKET.set(RwLock::new(websocket));
}
}
}
pub fn get_websocket() -> Sender<OrderbookRequest> {
WEBSOCKET.get().read().clone()
}
pub fn try_get_websocket() -> Option<Sender<OrderbookRequest>> {
WEBSOCKET.try_get().map(|w| w.read().clone())
}
pub fn set_log_stream_sink(sink: Arc<StreamSink<LogEntry>>) {
match LOG_STREAM_SINK.try_get() {
Some(l) => *l.write() = sink,
None => {
LOG_STREAM_SINK.set(RwLock::new(sink));
}
}
}
pub fn try_get_log_stream_sink() -> Option<Arc<StreamSink<LogEntry>>> {
LOG_STREAM_SINK.try_get().map(|l| l.read().clone())
}
pub fn set_tentenone_config(config: TenTenOneConfig) {
match TENTENONE_CONFIG.try_get() {
None => {
TENTENONE_CONFIG.set(RwLock::new(config));
}
Some(s) => {
*s.write() = config;
}
}
}
pub fn try_get_tentenone_config() -> Option<TenTenOneConfig> {
TENTENONE_CONFIG.try_get().map(|w| w.read().clone())
}
pub fn set_ln_payment_watcher(ln_payment_watcher: Sender<String>) {
match LN_PAYMENT_WATCHER.try_get() {
None => {
LN_PAYMENT_WATCHER.set(RwLock::new(ln_payment_watcher));
}
Some(s) => {
*s.write() = ln_payment_watcher;
}
}
}
pub fn get_ln_payment_watcher() -> Sender<String> {
LN_PAYMENT_WATCHER.get().read().clone()
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/watcher.rs | mobile/native/src/watcher.rs | use crate::event::subscriber::Subscriber;
use crate::event::EventInternal;
use crate::event::EventType;
use crate::state;
use anyhow::Result;
use bitcoin::Address;
use bitcoin::Amount;
use std::time::Duration;
use tokio::sync::broadcast::Sender;
#[derive(Clone)]
pub struct InvoiceWatcher {
pub sender: Sender<String>,
}
impl Subscriber for InvoiceWatcher {
fn notify(&self, event: &EventInternal) {
let runtime = match state::get_or_create_tokio_runtime() {
Ok(runtime) => runtime,
Err(e) => {
tracing::error!("Failed to get tokio runtime. Error: {e:#}");
return;
}
};
let r_hash = match event {
EventInternal::LnPaymentReceived { r_hash } => r_hash,
_ => return,
};
runtime.spawn({
let r_hash = r_hash.clone();
let sender = self.sender.clone();
async move {
if let Err(e) = sender.send(r_hash.clone()) {
tracing::error!(%r_hash, "Failed to send accepted invoice event. Error: {e:#}");
}
}
});
}
fn events(&self) -> Vec<EventType> {
vec![EventType::LnPaymentReceived]
}
}
pub(crate) async fn watch_lightning_payment(watched_r_hash: String) -> Result<()> {
tracing::debug!(%watched_r_hash, "Watching for lightning payment.");
let mut subscriber = state::get_ln_payment_watcher().subscribe();
loop {
match subscriber.recv().await {
Ok(r_hash) => {
if watched_r_hash.eq(&r_hash) {
tracing::debug!(%watched_r_hash, "Received a watched lightning payment event.");
return Ok(());
}
tracing::debug!(%r_hash, %watched_r_hash, "Received a lightning payment event for an unknown lightning invoice.");
}
Err(e) => {
tracing::error!("Failed to receive lighting payment received event. Error: {e:#}");
break;
}
}
}
tracing::debug!(%watched_r_hash, "Stopping lightning payment watch.");
Ok(())
}
/// Watches for the funding address to receive the given amount.
pub(crate) async fn watch_funding_address(
funding_address: Address,
funding_amount: Amount,
) -> Result<()> {
let node = state::get_node().clone();
let bdk_node = node.inner.clone();
loop {
match bdk_node.get_unspent_txs(&funding_address).await {
Ok(ref v) if v.is_empty() => {
tracing::debug!(%funding_address, %funding_amount, "No tx found for address");
}
Ok(txs) => {
// we sum up the total value in this output and check if it is big enough
// for the order
let total_unspent_amount_received = txs
.into_iter()
.map(|(_, amount)| amount.to_sat())
.sum::<u64>();
if total_unspent_amount_received >= funding_amount.to_sat() {
tracing::info!(
amount = total_unspent_amount_received.to_string(),
address = funding_address.to_string(),
"Address has been funded enough"
);
return Ok(());
}
tracing::debug!(
amount = total_unspent_amount_received.to_string(),
address = funding_address.to_string(),
"Address has not enough funds yet"
);
}
Err(err) => {
tracing::error!("Could not get utxo for address {err:?}");
return Ok(());
}
}
tokio::time::sleep(Duration::from_secs(10)).await;
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/destination.rs | mobile/native/src/destination.rs | use crate::api::Destination;
use anyhow::anyhow;
use anyhow::ensure;
use anyhow::Context;
use anyhow::Result;
use bitcoin::address::NetworkUnchecked;
use bitcoin::Address;
use bitcoin::Amount;
use bitcoin::Network;
use std::str::FromStr;
pub fn decode_destination(destination: String) -> Result<Destination> {
let node = crate::state::get_node();
let network = node.inner.network;
decode_bip21(&destination, network)
.or(decode_address(destination))
.context("Failed to parse destination as Bolt11 invoice, Bip21 URI, or on chain address")
}
fn decode_bip21(request: &str, network: Network) -> Result<Destination> {
let uri: bip21::Uri<'_, NetworkUnchecked, bip21::NoExtras> = request
.try_into()
.map_err(|_| anyhow!("request is not valid BIP-21 URI"))?;
let uri = uri
.require_network(network)
.map_err(|e| anyhow!("Invalid network: {e:?}"))?;
Ok(Destination::Bip21 {
address: uri.address.to_string(),
label: uri
.label
.and_then(|l| l.try_into().ok())
.unwrap_or_default(),
message: uri
.message
.and_then(|m| m.try_into().ok())
.unwrap_or_default(),
amount_sats: uri.amount.map(Amount::to_sat),
})
}
fn decode_address(request: String) -> Result<Destination> {
ensure!(
Address::from_str(&request).is_ok(),
"request is not valid on-chain address"
);
Ok(Destination::OnChainAddress(request))
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/storage.rs | mobile/native/src/storage.rs | use crate::backup::RemoteBackupClient;
use crate::backup::DB_BACKUP_KEY;
use crate::backup::DB_BACKUP_NAME;
use crate::backup::DLC_BACKUP_KEY;
use crate::cipher::AesCipher;
use crate::db;
use anyhow::Result;
use bitcoin::secp256k1::SecretKey;
use bitcoin::Network;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use xxi_node::storage::sled::SledStorageProvider;
use xxi_node::storage::DlcStoreProvider;
use xxi_node::storage::KeyValue;
#[derive(Clone)]
pub struct TenTenOneNodeStorage {
pub client: RemoteBackupClient,
pub dlc_storage: Arc<SledStorageProvider>,
pub data_dir: String,
pub backup_dir: String,
pub network: Network,
}
impl TenTenOneNodeStorage {
pub fn new(data_dir: String, network: Network, secret_key: SecretKey) -> TenTenOneNodeStorage {
let mut data_dir = PathBuf::from(data_dir);
data_dir.push(network.to_string());
if !data_dir.exists() {
fs::create_dir_all(data_dir.as_path()).expect("Failed to create data dir");
}
let backup_dir = data_dir.join(Path::new("backup"));
if !backup_dir.exists() {
fs::create_dir_all(backup_dir.as_path()).expect("Failed to create backup dir");
}
let backup_dir = backup_dir.to_string_lossy().to_string();
tracing::info!("Created backup dir at {backup_dir}");
let data_dir = data_dir.to_string_lossy().to_string();
let dlc_storage = Arc::new(SledStorageProvider::new(&data_dir));
let client = RemoteBackupClient::new(AesCipher::new(secret_key));
TenTenOneNodeStorage {
dlc_storage,
data_dir,
backup_dir,
network,
client,
}
}
/// Creates a full backup of the lightning and dlc data.
pub async fn full_backup(&self) -> Result<()> {
tracing::info!("Running full backup");
let mut handles = vec![];
let db_backup = db::back_up()?;
let value = fs::read(db_backup)?;
let handle = self
.client
.backup(format!("{DB_BACKUP_KEY}/{DB_BACKUP_NAME}"), value);
handles.push(handle);
for dlc_backup in self.dlc_storage.export().into_iter() {
let key = [
DLC_BACKUP_KEY,
&hex::encode([dlc_backup.kind]),
&hex::encode(dlc_backup.key),
]
.join("/");
let handle = self.client.backup(key, dlc_backup.value);
handles.push(handle);
}
futures::future::join_all(handles).await;
tracing::info!("Successfully created a full backup!");
Ok(())
}
}
impl DlcStoreProvider for TenTenOneNodeStorage {
fn read(&self, kind: u8, key: Option<Vec<u8>>) -> Result<Vec<KeyValue>> {
self.dlc_storage.read(kind, key)
}
fn write(&self, kind: u8, key: Vec<u8>, value: Vec<u8>) -> Result<()> {
self.dlc_storage.write(kind, key.clone(), value.clone())?;
let key = [DLC_BACKUP_KEY, &hex::encode([kind]), &hex::encode(key)].join("/");
// Let the backup run asynchronously we don't really care if it is successful or not as the
// next write may fix the issue. Note, if we want to handle failed backup attempts we
// would need to remember those remote handles and handle a failure accordingly.
self.client.backup(key, value).forget();
Ok(())
}
fn delete(&self, kind: u8, key: Option<Vec<u8>>) -> Result<()> {
self.dlc_storage.delete(kind, key.clone())?;
let key = match key {
Some(key) => [DLC_BACKUP_KEY, &hex::encode([kind]), &hex::encode(key)].join("/"),
None => [DLC_BACKUP_KEY, &hex::encode([kind])].join("/"),
};
// Let the delete backup run asynchronously we don't really care if it is successful or not.
// We may end up with a key that should have been deleted. That should hopefully not
// be a problem. Note, if we want to handle failed backup attempts we would need to
// remember those remote handles and handle a failure accordingly.
self.client.delete(key).forget();
Ok(())
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/orderbook.rs | mobile/native/src/orderbook.rs | use crate::config;
use crate::dlc;
use crate::event;
use crate::event::BackgroundTask;
use crate::event::EventInternal;
use crate::event::TaskStatus;
use crate::health::ServiceStatus;
use crate::state;
use crate::trade::funding_fee_event;
use crate::trade::funding_fee_event::FundingFeeEvent;
use crate::trade::order;
use crate::trade::order::FailureReason;
use crate::trade::position;
use anyhow::Context;
use anyhow::Result;
use bitcoin::secp256k1::SecretKey;
use bitcoin::secp256k1::SECP256K1;
use futures::SinkExt;
use futures::TryStreamExt;
use itertools::Itertools;
use parking_lot::Mutex;
use rust_decimal::Decimal;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use tokio::runtime::Runtime;
use tokio::sync::broadcast;
use tokio::sync::broadcast::error::RecvError;
use tokio::sync::watch;
use tokio_tungstenite_wasm as tungstenite;
use xxi_node::commons::best_ask_price;
use xxi_node::commons::best_bid_price;
use xxi_node::commons::ContractSymbol;
use xxi_node::commons::Direction;
use xxi_node::commons::Message;
use xxi_node::commons::Order;
use xxi_node::commons::OrderState;
use xxi_node::commons::OrderbookRequest;
use xxi_node::commons::Signature;
// Set to the same timeout as the p2p connection reconnect
const WS_RECONNECT_TIMEOUT: Duration = Duration::from_secs(1);
pub fn subscribe(
secret_key: SecretKey,
runtime: &Runtime,
orderbook_status: watch::Sender<ServiceStatus>,
fcm_token: String,
tx_websocket: broadcast::Sender<OrderbookRequest>,
) -> Result<()> {
runtime.spawn(async move {
let url = format!(
"ws://{}/api/orderbook/websocket",
config::get_http_endpoint()
);
let pubkey = secret_key.public_key(SECP256K1);
let authenticate = move |msg| {
let signature = secret_key.sign_ecdsa(msg);
Signature { pubkey, signature }
};
// Need a Mutex as it's being accessed from websocket stream and pruning task
let orders = Arc::new(Mutex::new(Vec::<Order>::new()));
let fcm_token = if fcm_token.is_empty() {
None
} else {
Some(fcm_token)
};
loop {
let url = url.clone();
let fcm_token = fcm_token.clone();
let version = env!("CARGO_PKG_VERSION").to_string();
let os = std::env::consts::OS.to_string();
match orderbook_client::subscribe_with_authentication(url, authenticate, fcm_token, Some(version), Some(os))
.await
{
Ok((mut sink, mut stream)) => {
if let Err(e) = orderbook_status.send(ServiceStatus::Online) {
tracing::warn!("Cannot update orderbook status: {e:#}");
};
let handle = tokio::spawn({
let tx_websocket = tx_websocket.clone();
async move {
let mut receiver = tx_websocket.subscribe();
loop {
match receiver.recv().await {
Ok(message) => {
let message = tungstenite::Message::try_from(message).expect("to fit into message");
if let Err(e) = sink.send(message).await {
tracing::error!("Failed to send message on websocket. {e:#}");
}
}
Err(RecvError::Lagged(skip)) => {
tracing::warn!(%skip, "Lagging behind on orderbook requests.");
}
Err(RecvError::Closed) => {
tracing::error!(
"Orderbook requests sender died! Channel closed."
);
break;
}
}
}
}
});
let mut cached_best_price: HashMap<Direction, Decimal> = HashMap::new();
loop {
let msg = match stream.try_next().await {
Ok(Some(msg)) => msg,
Ok(None) => {
tracing::warn!("Orderbook WS stream closed");
break;
}
Err(error) => {
tracing::warn!(%error, "Orderbook WS stream closed with error");
break;
}
};
if let Err(e) =
handle_orderbook_message(orders.clone(), &mut cached_best_price, msg)
.await
{
tracing::error!("Failed to handle event: {e:#}");
}
}
// abort handler on sending messages over a lost websocket connection.
handle.abort();
}
Err(e) => {
tracing::error!("Could not start up orderbook client: {e:#}");
}
};
if let Err(e) = orderbook_status.send(ServiceStatus::Offline) {
tracing::warn!("Cannot update orderbook status: {e:#}");
};
// Retry at least every second. We do this as it the p2p connection is not debouncing,
// thus it could happen after a restart that the p2p connection is established, but the
// websocket connection is still waiting to retry. This could have implications when the
// coordinator returns an error on the websocket which the app is not ready to process.
//
// Note, this is the same issue for why we originally moved to 10101 Messages, we should
// think about a similar way to return protocol errors via the p2p connection.
tracing::debug!(
?WS_RECONNECT_TIMEOUT,
"Reconnecting to orderbook WS after timeout"
);
tokio::time::sleep(WS_RECONNECT_TIMEOUT).await;
}
});
Ok(())
}
async fn handle_orderbook_message(
orders: Arc<Mutex<Vec<Order>>>,
cached_best_price: &mut HashMap<Direction, Decimal>,
msg: String,
) -> Result<()> {
let msg =
serde_json::from_str::<Message>(&msg).context("Could not deserialize orderbook message")?;
tracing::trace!(%msg, "New orderbook message");
match msg {
Message::Authenticated(config) => {
tracing::info!(
referral_status = ?config.referral_status,
"Successfully logged in to 10101 websocket api!");
state::set_tentenone_config(config.clone());
event::publish(&EventInternal::Authenticated(config));
}
Message::AllOrders(initial_orders) => {
let mut orders = orders.lock();
if !orders.is_empty() {
tracing::debug!(
"Received new set of initial orders from orderbook, \
replacing the previously stored orders"
);
} else {
tracing::debug!(?orders, "Received all orders from orderbook");
}
*orders = initial_orders;
// if we receive a full set of new orders, we can clear the cached best price as it is
// outdated information.
cached_best_price.clear();
update_both_prices_if_needed(cached_best_price, &orders);
}
Message::NewOrder(order) => {
let mut orders = orders.lock();
let direction = order.direction;
orders.push(order);
match direction {
Direction::Long => update_bid_price_if_needed(cached_best_price, orders.as_slice()),
Direction::Short => {
update_ask_price_if_needed(cached_best_price, orders.as_slice())
}
}
}
Message::DeleteOrder(order_id) => {
let mut orders = orders.lock();
let old_length = orders.len();
orders.retain(|order| order.id != order_id);
let new_length = orders.len();
if old_length == new_length {
tracing::warn!(%order_id, "Could not remove non-existing order");
}
update_both_prices_if_needed(cached_best_price, &orders);
}
Message::Update(updated_order) => {
let mut orders = orders.lock();
let old_length = orders.len();
orders.retain(|order| order.id != updated_order.id);
let new_length = orders.len();
if old_length == new_length {
tracing::warn!(?updated_order, "Update without prior knowledge of order");
}
if updated_order.order_state == OrderState::Open {
orders.push(updated_order);
}
update_both_prices_if_needed(cached_best_price, &orders);
}
Message::AllFundingFeeEvents(funding_fee_events) => {
let funding_fee_events = funding_fee_events
.into_iter()
.map(FundingFeeEvent::from)
.collect_vec();
let new_funding_fee_events =
funding_fee_event::handler::handle_unpaid_funding_fee_events(&funding_fee_events)
.context("Failed to handle funding fee events from coordinator")?;
position::handler::handle_funding_fee_events(&new_funding_fee_events)
.context("Failed to apply all funding fee events from coordinator")?;
}
Message::NextFundingRate(funding_rate) => {
tracing::info!(?funding_rate, "Got next funding rate");
event::publish(&EventInternal::NextFundingRate(funding_rate));
}
Message::FundingFeeEvent(funding_fee_event) => {
let new_funding_fee_events =
funding_fee_event::handler::handle_unpaid_funding_fee_events(&[
funding_fee_event.into()
])
.context("Failed to handle funding fee event from coordinator")?;
position::handler::handle_funding_fee_events(&new_funding_fee_events)
.context("Failed to apply new funding fee event from coordinator")?;
}
Message::DlcChannelCollaborativeRevert {
channel_id,
coordinator_address,
coordinator_amount,
trader_amount,
execution_price,
} => {
tracing::debug!(
channel_id = %hex::encode(channel_id),
"Received request to revert channel"
);
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CollabRevert(TaskStatus::Pending),
));
if let Err(err) = dlc::collaborative_revert_channel(
channel_id,
coordinator_address,
coordinator_amount,
trader_amount,
execution_price,
) {
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CollabRevert(TaskStatus::Failed(format!("{err:#}"))),
));
tracing::error!("Could not collaboratively revert channel: {err:#}");
} else {
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CollabRevert(TaskStatus::Success),
));
}
}
msg @ Message::InvalidAuthentication(_) => {
tracing::debug!(?msg, "Skipping message from orderbook");
}
Message::TradeError { order_id, error } => {
order::handler::order_failed(
Some(order_id),
FailureReason::TradeResponse(error.to_string()),
error.into(),
)
.context("Could not set order to failed")?;
}
Message::RolloverError { error } => {
tracing::error!("Failed to rollover position: {error:#}");
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::Rollover(TaskStatus::Failed(format!("{error:#}"))),
));
}
Message::LnPaymentReceived { r_hash, amount } => {
tracing::info!(r_hash, %amount, "Received a payment received event.");
event::publish(&EventInternal::LnPaymentReceived { r_hash })
}
};
Ok(())
}
fn update_both_prices_if_needed(
cached_best_price: &mut HashMap<Direction, Decimal>,
orders: &[Order],
) {
update_bid_price_if_needed(cached_best_price, orders);
update_ask_price_if_needed(cached_best_price, orders);
}
fn update_bid_price_if_needed(
cached_best_price: &mut HashMap<Direction, Decimal>,
orders: &[Order],
) {
let bid_price = best_bid_price(orders, ContractSymbol::BtcUsd);
update_price_if_needed(cached_best_price, bid_price, Direction::Long);
}
fn update_ask_price_if_needed(
cached_best_price: &mut HashMap<Direction, Decimal>,
orders: &[Order],
) {
let ask_price = best_ask_price(orders, ContractSymbol::BtcUsd);
update_price_if_needed(cached_best_price, ask_price, Direction::Short);
}
fn update_price_if_needed(
cached_best_price: &mut HashMap<Direction, Decimal>,
new_price: Option<Decimal>,
direction: Direction,
) {
if let Some(new_price) = new_price {
if let Some(cached_price) = cached_best_price.get(&direction) {
if *cached_price != new_price {
update_price(direction, new_price);
cached_best_price.insert(direction, new_price);
}
} else {
update_price(direction, new_price);
}
}
}
fn update_price(direction: Direction, new_price: Decimal) {
tracing::trace!(%new_price, direction = %direction, "New price");
match direction {
Direction::Long => {
event::publish(&EventInternal::BidPriceUpdateNotification(new_price));
}
Direction::Short => {
event::publish(&EventInternal::AskPriceUpdateNotification(new_price));
}
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/hodl_invoice.rs | mobile/native/src/hodl_invoice.rs | use crate::commons::reqwest_client;
use crate::config;
use crate::dlc::get_node_key;
use crate::dlc::get_node_pubkey;
use anyhow::Result;
use bitcoin::Amount;
use reqwest::Url;
use xxi_node::commons;
#[derive(Clone)]
pub struct HodlInvoice {
pub payment_request: String,
pub pre_image: String,
pub r_hash: String,
pub amount: Amount,
}
pub async fn get_hodl_invoice_from_coordinator(amount: Amount) -> Result<HodlInvoice> {
// TODO(bonomat): we might want to store this in the db so.
let pre_image = commons::create_pre_image();
let client = reqwest_client();
let url = format!("http://{}", config::get_http_endpoint());
let url = Url::parse(&url).expect("correct URL");
let url = url.join("/api/invoice")?;
let invoice_params = commons::HodlInvoiceParams {
trader_pubkey: get_node_pubkey(),
amt_sats: amount.to_sat(),
r_hash: pre_image.hash.clone(),
};
let invoice_params = commons::SignedValue::new(invoice_params, get_node_key())?;
let response = client
.post(url)
.json(&invoice_params)
.send()
.await?
.error_for_status()?;
let payment_request = response.json::<String>().await?;
let hodl_invoice = HodlInvoice {
payment_request,
pre_image: pre_image.get_base64_encoded_pre_image(),
r_hash: pre_image.hash,
amount,
};
Ok(hodl_invoice)
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/api.rs | mobile/native/src/api.rs | use crate::calculations;
use crate::channel_trade_constraints;
use crate::channel_trade_constraints::TradeConstraints;
use crate::commons::api::Price;
use crate::config;
use crate::config::api::Config;
use crate::config::api::Directories;
use crate::config::get_network;
use crate::db;
use crate::destination;
use crate::dlc;
use crate::dlc::get_storage;
pub use crate::dlc_channel::ChannelState;
pub use crate::dlc_channel::DlcChannel;
pub use crate::dlc_channel::SignedChannelState;
use crate::emergency_kit;
use crate::event;
use crate::event::api::FlutterSubscriber;
use crate::event::BackgroundTask;
use crate::event::EventInternal;
use crate::event::TaskStatus;
use crate::health;
use crate::logger;
use crate::max_quantity::max_quantity;
use crate::polls;
use crate::trade::funding_fee_event::handler::get_funding_fee_events;
use crate::trade::order;
use crate::trade::order::api::NewOrder;
use crate::trade::order::api::Order;
use crate::trade::position;
use crate::trade::position::api::Position;
use crate::trade::trades::api::Trade;
use crate::trade::users;
use crate::unfunded_channel_opening_order;
use crate::unfunded_channel_opening_order::ExternalFunding;
use anyhow::ensure;
use anyhow::Context;
use anyhow::Result;
use bdk::FeeRate;
use bitcoin::Amount;
use flutter_rust_bridge::StreamSink;
use flutter_rust_bridge::SyncReturn;
use lightning::chain::chaininterface::ConfirmationTarget as LnConfirmationTarget;
use rust_decimal::prelude::FromPrimitive;
use rust_decimal::prelude::ToPrimitive;
use rust_decimal::Decimal;
use std::backtrace::Backtrace;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use time::OffsetDateTime;
use tokio::sync::broadcast;
use tokio::sync::broadcast::channel;
use xxi_node::commons::ChannelOpeningParams;
pub use xxi_node::commons::ContractSymbol;
pub use xxi_node::commons::Direction;
use xxi_node::commons::OrderbookRequest;
use xxi_node::seed::Bip39Seed;
/// Initialise logging infrastructure for Rust
pub fn init_logging(sink: StreamSink<logger::LogEntry>) {
logger::create_log_stream(sink)
}
#[derive(Clone, Debug, Default)]
pub struct TenTenOneConfig {
pub liquidity_options: Vec<LiquidityOption>,
pub min_quantity: u64,
pub maintenance_margin_rate: f32,
pub referral_status: ReferralStatus,
pub max_leverage: u8,
}
impl From<xxi_node::commons::TenTenOneConfig> for TenTenOneConfig {
fn from(value: xxi_node::commons::TenTenOneConfig) -> Self {
Self {
liquidity_options: value
.liquidity_options
.into_iter()
.map(|lo| lo.into())
.collect(),
min_quantity: value.min_quantity,
maintenance_margin_rate: value.maintenance_margin_rate,
referral_status: value.referral_status.into(),
max_leverage: value.max_leverage,
}
}
}
/// Assembles the wallet info and publishes wallet info update event.
#[tokio::main(flavor = "current_thread")]
pub async fn refresh_wallet_info() -> Result<()> {
dlc::refresh_wallet_info().await?;
Ok(())
}
#[tokio::main(flavor = "current_thread")]
pub async fn full_sync(stop_gap: usize) -> Result<()> {
dlc::full_sync(stop_gap).await?;
Ok(())
}
#[derive(Debug, Clone)]
pub struct Poll {
pub id: i32,
pub poll_type: PollType,
pub question: String,
pub choices: Vec<Choice>,
}
#[derive(Debug, Clone)]
pub struct Choice {
pub id: i32,
pub value: String,
pub editable: bool,
}
#[derive(Debug, Clone)]
pub enum PollType {
SingleChoice,
}
impl From<xxi_node::commons::Poll> for Poll {
fn from(value: xxi_node::commons::Poll) -> Self {
Poll {
id: value.id,
poll_type: value.poll_type.into(),
question: value.question,
choices: value
.choices
.into_iter()
.map(|choice| choice.into())
.collect(),
}
}
}
impl From<xxi_node::commons::PollType> for PollType {
fn from(value: xxi_node::commons::PollType) -> Self {
match value {
xxi_node::commons::PollType::SingleChoice => PollType::SingleChoice,
}
}
}
impl From<xxi_node::commons::Choice> for Choice {
fn from(value: xxi_node::commons::Choice) -> Self {
Choice {
id: value.id,
value: value.value,
editable: value.editable,
}
}
}
impl From<Choice> for xxi_node::commons::Choice {
fn from(value: Choice) -> Self {
xxi_node::commons::Choice {
id: value.id,
value: value.value,
editable: value.editable,
}
}
}
#[tokio::main(flavor = "current_thread")]
pub async fn fetch_poll() -> Result<Option<Poll>> {
let polls: Vec<Poll> = polls::get_new_polls()
.await?
.into_iter()
.map(|poll| poll.into())
.collect();
// For now we just return the first poll
Ok(polls.first().cloned())
}
#[tokio::main(flavor = "current_thread")]
pub async fn post_selected_choice(selected_choice: Choice, poll_id: i32) -> Result<()> {
let trader_pk = dlc::get_node_pubkey();
polls::answer_poll(selected_choice.into(), poll_id, trader_pk).await?;
Ok(())
}
pub fn reset_all_answered_polls() -> Result<SyncReturn<()>> {
db::delete_answered_poll_cache()?;
Ok(SyncReturn(()))
}
pub fn ignore_poll(poll_id: i32) -> Result<SyncReturn<()>> {
polls::ignore_poll(poll_id)?;
Ok(SyncReturn(()))
}
#[derive(Clone, Debug)]
pub struct WalletHistoryItem {
pub flow: PaymentFlow,
pub amount_sats: u64,
pub timestamp: u64,
pub status: Status,
pub wallet_type: WalletHistoryItemType,
}
#[derive(Clone, Debug)]
pub enum WalletHistoryItemType {
OnChain {
txid: String,
fee_sats: Option<u64>,
confirmations: u64,
},
Lightning {
payment_hash: String,
description: String,
payment_preimage: Option<String>,
invoice: Option<String>,
fee_msat: Option<u64>,
expiry_timestamp: Option<u64>,
funding_txid: Option<String>,
},
Trade {
order_id: String,
fee_sat: u64,
pnl: Option<i64>,
contracts: u64,
direction: String,
},
DlcChannelFunding {
funding_txid: String,
// This fee represents the total fee reserved for all off-chain transactions, i.e. for the
// fund/buffer/cet/refund. Only the part for the fund tx has been paid for now
funding_tx_fee_sats: Option<u64>,
confirmations: u64,
// The amount we hold in the channel
our_channel_input_amount_sats: u64,
},
}
#[derive(Clone, Debug, Default, Copy)]
pub enum PaymentFlow {
#[default]
Inbound,
Outbound,
}
impl fmt::Display for PaymentFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
PaymentFlow::Inbound => write!(f, "inbound"),
PaymentFlow::Outbound => write!(f, "outbound"),
}
}
}
#[derive(Clone, Debug, Default)]
pub enum Status {
#[default]
Pending,
Confirmed,
Expired,
Failed,
}
pub fn calculate_margin(price: f32, quantity: f32, leverage: f32) -> SyncReturn<u64> {
SyncReturn(calculations::calculate_margin(price, quantity, leverage))
}
pub fn calculate_quantity(price: f32, margin: u64, leverage: f32) -> SyncReturn<f32> {
SyncReturn(calculations::calculate_quantity(price, margin, leverage))
}
pub fn calculate_liquidation_price(
price: f32,
leverage: f32,
direction: Direction,
) -> SyncReturn<f32> {
let maintenance_margin_rate = dlc::get_maintenance_margin_rate();
SyncReturn(calculations::calculate_liquidation_price(
price,
leverage,
direction,
maintenance_margin_rate,
))
}
pub fn calculate_pnl(
opening_price: f32,
closing_price: Price,
quantity: f32,
leverage: f32,
direction: Direction,
) -> SyncReturn<i64> {
// TODO: Handle the result and don't just return 0
SyncReturn(
calculations::calculate_pnl(
opening_price,
closing_price.into(),
quantity,
leverage,
direction,
)
.unwrap_or(0),
)
}
/// Calculate the order matching fee that the app user will have to pay for if the corresponding
/// trade gets executed.
///
/// This is only an estimate as the price may change slightly. Also, the coordinator could choose to
/// change the fee structure independently.
pub fn order_matching_fee(quantity: f32, price: f32) -> SyncReturn<u64> {
let price = Decimal::from_f32(price).expect("price to fit in Decimal");
let fee_rate = dlc::get_order_matching_fee_rate(false);
let order_matching_fee =
xxi_node::commons::order_matching_fee(quantity, price, fee_rate).to_sat();
SyncReturn(order_matching_fee)
}
/// Calculates the max quantity the user is able to trade considering the trader and the coordinator
/// balances and constraints. Note, this is not an exact maximum, but a very close approximation.
pub fn calculate_max_quantity(
price: f32,
trader_leverage: f32,
trader_direction: Direction,
) -> SyncReturn<u64> {
let price = Decimal::from_f32(price).expect("to fit");
let max_quantity =
max_quantity(price, trader_leverage, trader_direction).unwrap_or(Decimal::ZERO);
let max_quantity = max_quantity.floor().to_u64().expect("to fit");
SyncReturn(max_quantity)
}
#[tokio::main(flavor = "current_thread")]
pub async fn submit_order(order: NewOrder) -> Result<String> {
order::handler::submit_order(order.into(), None)
.await
.map_err(anyhow::Error::new)
.map(|id| id.to_string())
}
#[tokio::main(flavor = "current_thread")]
pub async fn submit_channel_opening_order(
order: NewOrder,
coordinator_reserve: u64,
trader_reserve: u64,
) -> Result<String> {
order::handler::submit_order(
order.into(),
Some(ChannelOpeningParams {
coordinator_reserve: Amount::from_sat(coordinator_reserve),
trader_reserve: Amount::from_sat(trader_reserve),
pre_image: None,
}),
)
.await
.map_err(anyhow::Error::new)
.map(|id| id.to_string())
}
#[tokio::main(flavor = "current_thread")]
pub async fn get_orders() -> Result<Vec<Order>> {
let orders = order::handler::get_orders_for_ui()
.await?
.into_iter()
.map(|order| order.into())
.collect::<Vec<Order>>();
Ok(orders)
}
#[tokio::main(flavor = "current_thread")]
pub async fn get_positions() -> Result<Vec<Position>> {
let positions = position::handler::get_positions()?
.into_iter()
.map(|order| order.into())
.collect::<Vec<Position>>();
Ok(positions)
}
#[tokio::main(flavor = "current_thread")]
pub async fn get_trades() -> Result<Vec<Trade>> {
let trades = crate::trade::trades::handler::get_trades()?
.into_iter()
.map(|trade| trade.into());
let funding_fee_events = get_funding_fee_events()?.into_iter().map(|e| e.into());
let trades = trades.chain(funding_fee_events).collect();
Ok(trades)
}
pub fn set_filling_orders_to_failed() -> Result<()> {
emergency_kit::set_filling_orders_to_failed()
}
pub fn delete_position() -> Result<()> {
emergency_kit::delete_position()
}
pub fn recreate_position() -> Result<()> {
emergency_kit::recreate_position()
}
pub fn resend_settle_finalize_message() -> Result<()> {
emergency_kit::resend_settle_finalize_message()
}
pub fn subscribe(stream: StreamSink<event::api::Event>) {
tracing::debug!("Subscribing flutter to event hub");
event::subscribe(FlutterSubscriber::new(stream))
}
/// Wrapper for Flutter purposes - can throw an exception.
pub fn run_in_flutter(seed_dir: String, fcm_token: String) -> Result<()> {
match crate::state::try_get_websocket() {
None => {
let (tx_websocket, _rx) = channel::<OrderbookRequest>(10);
run_internal(
seed_dir,
fcm_token,
tx_websocket.clone(),
IncludeBacktraceOnPanic::Yes,
)
.context("Failed to start the backend")?;
crate::state::set_websocket(tx_websocket);
}
Some(tx_websocket) => {
// In case of a hot-restart we do not start the node again as it is already running.
// However, we need to re-send the authentication message to get the initial data from
// the coordinator and trigger a new user login event.
tracing::info!("Re-sending authentication message");
let signature = orderbook_client::create_auth_message_signature(move |msg| {
xxi_node::commons::Signature {
pubkey: dlc::get_node_pubkey(),
signature: dlc::get_node_key().sign_ecdsa(msg),
}
});
let version = env!("CARGO_PKG_VERSION").to_string();
let os = std::env::consts::OS.to_string();
let runtime = crate::state::get_or_create_tokio_runtime()?;
runtime.block_on(async {
tx_websocket.send(OrderbookRequest::Authenticate {
fcm_token: Some(fcm_token),
version: Some(version),
os: Some(os),
signature,
})
})?;
}
};
Ok(())
}
/// Wrapper for the tests.
///
/// Needed as we do not want to have a hot restart handling in the tests and also can't expose a
/// channel::Sender through frb.
pub fn run_in_test(seed_dir: String) -> Result<()> {
let (tx_websocket, _rx) = channel::<OrderbookRequest>(10);
run_internal(
seed_dir,
"".to_string(),
tx_websocket,
IncludeBacktraceOnPanic::No,
)
}
#[derive(PartialEq)]
pub enum IncludeBacktraceOnPanic {
Yes,
No,
}
pub fn set_config(config: Config, app_dir: String, seed_dir: String) -> Result<()> {
crate::state::set_config((config, Directories { app_dir, seed_dir }).into());
Ok(())
}
#[tokio::main(flavor = "current_thread")]
pub async fn full_backup() -> Result<()> {
db::init_db(&config::get_data_dir(), get_network())?;
get_storage().full_backup().await
}
fn run_internal(
seed_dir: String,
fcm_token: String,
tx_websocket: broadcast::Sender<OrderbookRequest>,
backtrace_on_panic: IncludeBacktraceOnPanic,
) -> Result<()> {
if backtrace_on_panic == IncludeBacktraceOnPanic::Yes {
std::panic::set_hook(
#[allow(clippy::print_stderr)]
Box::new(|info| {
let backtrace = Backtrace::force_capture();
tracing::error!(%info, "Aborting after panic in task");
eprintln!("{backtrace}");
std::process::abort()
}),
);
}
db::init_db(&config::get_data_dir(), get_network())?;
let runtime = crate::state::get_or_create_tokio_runtime()?;
let seed_dir = Path::new(&seed_dir).join(get_network().to_string());
let seed_path = seed_dir.join("seed");
let seed = Bip39Seed::initialize(&seed_path)?;
crate::state::set_seed(seed.clone());
let (_health, tx) = health::Health::new(runtime);
dlc::run(runtime, tx, fcm_token, tx_websocket)
}
pub fn get_new_address() -> Result<String> {
dlc::get_new_address()
}
pub fn get_unused_address() -> Result<String> {
dlc::get_unused_address()
}
#[tokio::main(flavor = "current_thread")]
pub async fn close_channel() -> Result<()> {
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CloseChannel(TaskStatus::Pending),
));
let fail = |e: &anyhow::Error| {
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CloseChannel(TaskStatus::Failed(format!("{e:#}"))),
))
};
dlc::close_channel(false).await.inspect_err(fail)?;
// wait a bit so that the sync can find the the broadcasted transaction.
tokio::time::sleep(Duration::from_millis(500)).await;
dlc::refresh_wallet_info().await.inspect_err(fail)?;
event::publish(&EventInternal::BackgroundNotification(
BackgroundTask::CloseChannel(TaskStatus::Success),
));
Ok(())
}
#[tokio::main(flavor = "current_thread")]
pub async fn force_close_channel() -> Result<()> {
dlc::close_channel(true).await
}
pub fn channel_trade_constraints() -> Result<SyncReturn<TradeConstraints>> {
let trade_constraints = channel_trade_constraints::channel_trade_constraints()?;
Ok(SyncReturn(trade_constraints))
}
#[derive(Debug, Clone)]
pub struct LiquidityOption {
pub id: i32,
pub rank: usize,
pub title: String,
/// the amount the trader can trade up to
pub trade_up_to_sats: u64,
/// min deposit in sats
pub min_deposit_sats: u64,
/// max deposit in sats
pub max_deposit_sats: u64,
/// min fee in sats
pub min_fee_sats: u64,
pub fee_percentage: f64,
pub coordinator_leverage: f32,
pub active: bool,
}
impl From<xxi_node::commons::LiquidityOption> for LiquidityOption {
fn from(value: xxi_node::commons::LiquidityOption) -> Self {
LiquidityOption {
id: value.id,
rank: value.rank,
title: value.title,
trade_up_to_sats: value.trade_up_to_sats,
min_deposit_sats: value.min_deposit_sats,
max_deposit_sats: value.max_deposit_sats,
min_fee_sats: value.min_fee_sats,
fee_percentage: value.fee_percentage,
coordinator_leverage: value.coordinator_leverage,
active: value.active,
}
}
}
pub struct PaymentRequest {
pub address: String,
pub bip21: String,
}
pub fn create_payment_request(
amount_sats: Option<u64>,
_description: String,
) -> Result<PaymentRequest> {
let amount_query = amount_sats
.map(|amt| format!("?amount={}", Amount::from_sat(amt).to_btc()))
.unwrap_or_default();
let addr = dlc::get_unused_address()?;
Ok(PaymentRequest {
bip21: format!("bitcoin:{addr}{amount_query}"),
address: addr,
})
}
/// Fee configuration for an on-chaint transaction.
pub enum FeeConfig {
/// The fee rate is derived from the configured priority.
Priority(ConfirmationTarget),
/// The fee rate is explicitly configured.
FeeRate { sats_per_vbyte: f32 },
}
impl From<FeeConfig> for xxi_node::FeeConfig {
fn from(value: FeeConfig) -> Self {
match value {
FeeConfig::Priority(target) => xxi_node::FeeConfig::Priority(target.into()),
FeeConfig::FeeRate { sats_per_vbyte } => {
xxi_node::FeeConfig::FeeRate(FeeRate::from_sat_per_vb(sats_per_vbyte))
}
}
}
}
/// Analogous to [`lightning::chain::chaininterface::ConfirmationTarget`] but for the Flutter API
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ConfirmationTarget {
Minimum,
Background,
Normal,
HighPriority,
}
impl From<ConfirmationTarget> for LnConfirmationTarget {
fn from(value: ConfirmationTarget) -> Self {
match value {
ConfirmationTarget::Minimum => LnConfirmationTarget::MempoolMinimum,
ConfirmationTarget::Background => LnConfirmationTarget::Background,
ConfirmationTarget::Normal => LnConfirmationTarget::Normal,
ConfirmationTarget::HighPriority => LnConfirmationTarget::HighPriority,
}
}
}
pub struct FeeEstimation {
pub sats_per_vbyte: f32,
pub total_sats: u64,
}
/// Calculate the fees for an on-chain transaction, using the 3 default fee rates (background,
/// normal, and high priority). This both estimates the fee rate and calculates the TX size to get
/// the overall fee for a given TX.
pub fn calculate_all_fees_for_on_chain(address: String) -> Result<Vec<FeeEstimation>> {
const TARGETS: [ConfirmationTarget; 4] = [
ConfirmationTarget::Minimum,
ConfirmationTarget::Background,
ConfirmationTarget::Normal,
ConfirmationTarget::HighPriority,
];
let runtime = crate::state::get_or_create_tokio_runtime()?;
runtime.block_on(async {
let mut fees = Vec::with_capacity(TARGETS.len());
for confirmation_target in TARGETS {
let fee_rate_sats_per_vb = fee_rate(confirmation_target);
let fee_config = FeeConfig::Priority(confirmation_target);
let absolute_fee = dlc::estimate_payment_fee(&address, fee_config).await?;
fees.push(FeeEstimation {
sats_per_vbyte: fee_rate_sats_per_vb,
total_sats: absolute_fee.to_sat(),
})
}
Ok(fees)
})
}
#[tokio::main(flavor = "current_thread")]
pub async fn calculate_fee_estimate(
address: String,
fee_rate_sats_per_vb: f32,
) -> Result<FeeEstimation> {
let estimate = dlc::estimate_payment_fee(
&address,
FeeConfig::FeeRate {
sats_per_vbyte: fee_rate_sats_per_vb,
},
)
.await?;
Ok(FeeEstimation {
sats_per_vbyte: fee_rate_sats_per_vb,
total_sats: estimate.to_sat(),
})
}
pub fn fee_rate(confirmation_target: ConfirmationTarget) -> f32 {
dlc::get_fee_rate_for_target(confirmation_target.into()).as_sat_per_vb()
}
#[tokio::main(flavor = "current_thread")]
pub async fn send_payment(amount: u64, address: String, fee: FeeConfig) -> Result<String> {
let txid = dlc::send_payment(amount, address, fee).await?;
Ok(txid.to_string())
}
pub struct LastLogin {
pub id: i32,
pub date: String,
}
pub fn get_seed_phrase() -> SyncReturn<Vec<String>> {
SyncReturn(dlc::get_seed_phrase())
}
#[tokio::main(flavor = "current_thread")]
pub async fn restore_from_seed_phrase(
seed_phrase: String,
target_seed_file_path: String,
) -> Result<()> {
let file_path = PathBuf::from(target_seed_file_path);
tracing::info!("Restoring seed from phrase to {:?}", file_path);
dlc::restore_from_mnemonic(&seed_phrase, file_path.as_path()).await?;
Ok(())
}
pub fn init_new_mnemonic(target_seed_file_path: String) -> Result<()> {
let file_path = PathBuf::from(target_seed_file_path);
tracing::info!("Creating a new seed in {:?}", file_path);
dlc::init_new_mnemonic(file_path.as_path())
}
/// Enroll or update a user in the beta program
#[tokio::main(flavor = "current_thread")]
pub async fn register_beta(contact: String, referral_code: Option<String>) -> Result<()> {
let version = env!("CARGO_PKG_VERSION").to_string();
users::register_beta(contact, version, referral_code).await
}
#[derive(Debug)]
pub struct User {
pub pubkey: String,
pub contact: Option<String>,
pub nickname: Option<String>,
}
impl From<xxi_node::commons::User> for User {
fn from(value: xxi_node::commons::User) -> Self {
User {
pubkey: value.pubkey.to_string(),
contact: value.contact,
nickname: value.nickname,
}
}
}
#[tokio::main(flavor = "current_thread")]
pub async fn get_user_details() -> Result<User> {
users::get_user_details().await.map(|user| user.into())
}
pub enum Destination {
Bolt11 {
description: String,
amount_sats: u64,
timestamp: u64,
payee: String,
expiry: u64,
},
OnChainAddress(String),
Bip21 {
address: String,
label: String,
message: String,
amount_sats: Option<u64>,
},
}
pub fn decode_destination(destination: String) -> Result<Destination> {
ensure!(!destination.is_empty(), "Destination must be set");
destination::decode_destination(destination)
}
pub fn get_node_id() -> SyncReturn<String> {
SyncReturn(dlc::get_node_pubkey().to_string())
}
pub fn get_estimated_channel_fee_reserve() -> Result<SyncReturn<u64>> {
let reserve = dlc::estimated_fee_reserve()?;
Ok(SyncReturn(reserve.to_sat()))
}
pub fn get_estimated_funding_tx_fee() -> Result<SyncReturn<u64>> {
let fee = dlc::estimated_funding_tx_fee()?;
Ok(SyncReturn(fee.to_sat()))
}
pub fn get_expiry_timestamp(network: String) -> SyncReturn<i64> {
let network = config::api::parse_network(&network);
SyncReturn(
xxi_node::commons::calculate_next_expiry(OffsetDateTime::now_utc(), network)
.unix_timestamp(),
)
}
pub fn get_dlc_channel_id() -> Result<Option<String>> {
let dlc_channel_id =
dlc::get_signed_dlc_channel()?.map(|channel| hex::encode(channel.channel_id));
Ok(dlc_channel_id)
}
pub fn list_dlc_channels() -> Result<Vec<DlcChannel>> {
let channels = dlc::list_dlc_channels()?
.iter()
.map(dlc::DlcChannel::from)
.map(DlcChannel::from)
.collect();
Ok(channels)
}
pub fn delete_dlc_channel(dlc_channel_id: String) -> Result<()> {
emergency_kit::delete_dlc_channel(dlc_channel_id)
}
pub fn get_new_random_name() -> SyncReturn<String> {
SyncReturn(crate::names::get_new_name())
}
#[tokio::main(flavor = "current_thread")]
pub async fn update_nickname(nickname: String) -> Result<()> {
users::update_username(nickname).await
}
pub fn roll_back_channel_state() -> Result<()> {
tracing::warn!(
"Executing emergency kit! Attempting to rollback channel state to last stable state"
);
dlc::roll_back_channel_state()
}
#[derive(Clone, Debug, Default)]
pub struct ReferralStatus {
pub referral_code: String,
pub number_of_activated_referrals: usize,
pub number_of_total_referrals: usize,
pub referral_tier: usize,
pub referral_fee_bonus: f32,
/// The type of this referral status
pub bonus_status_type: BonusStatusType,
}
#[derive(Clone, Debug, Default)]
pub enum BonusStatusType {
#[default]
None,
/// The bonus is because he referred enough users
Referral,
/// The user has been referred and gets a bonus
Referent,
}
impl From<xxi_node::commons::BonusStatusType> for BonusStatusType {
fn from(value: xxi_node::commons::BonusStatusType) -> Self {
match value {
xxi_node::commons::BonusStatusType::Referral => BonusStatusType::Referral,
xxi_node::commons::BonusStatusType::Referent => BonusStatusType::Referent,
}
}
}
impl From<xxi_node::commons::ReferralStatus> for ReferralStatus {
fn from(value: xxi_node::commons::ReferralStatus) -> Self {
ReferralStatus {
referral_code: value.referral_code,
referral_tier: value.referral_tier,
number_of_activated_referrals: value.number_of_activated_referrals,
number_of_total_referrals: value.number_of_total_referrals,
referral_fee_bonus: value.referral_fee_bonus.to_f32().expect("to fit into f32"),
bonus_status_type: value
.bonus_status_type
.map(|status| status.into())
.unwrap_or_default(),
}
}
}
/// Returns true if the user has at least a single trade in his db
pub fn has_traded_once() -> Result<SyncReturn<bool>> {
Ok(SyncReturn(!db::get_all_trades()?.is_empty()))
}
#[tokio::main(flavor = "current_thread")]
pub async fn submit_unfunded_channel_opening_order(
order: NewOrder,
coordinator_reserve: u64,
trader_reserve: u64,
estimated_margin: u64,
order_matching_fees: u64,
) -> Result<ExternalFunding> {
unfunded_channel_opening_order::submit_unfunded_channel_opening_order(
order,
coordinator_reserve,
trader_reserve,
estimated_margin,
order_matching_fees,
)
.await
}
#[tokio::main(flavor = "current_thread")]
pub async fn abort_unfunded_channel_opening_order() -> Result<()> {
unfunded_channel_opening_order::abort_watcher().await
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/schema.rs | mobile/native/src/schema.rs | // @generated automatically by Diesel CLI.
diesel::table! {
answered_polls (id) {
id -> Integer,
poll_id -> Integer,
timestamp -> BigInt,
}
}
diesel::table! {
channels (user_channel_id) {
user_channel_id -> Text,
channel_id -> Nullable<Text>,
inbound -> BigInt,
outbound -> BigInt,
funding_txid -> Nullable<Text>,
channel_state -> Text,
counterparty_pubkey -> Text,
created_at -> BigInt,
updated_at -> BigInt,
liquidity_option_id -> Nullable<Integer>,
fee_sats -> Nullable<BigInt>,
open_channel_payment_hash -> Nullable<Text>,
}
}
diesel::table! {
dlc_messages (message_hash) {
message_hash -> Text,
inbound -> Bool,
peer_id -> Text,
message_type -> Text,
timestamp -> BigInt,
}
}
diesel::table! {
funding_fee_events (id) {
id -> Integer,
contract_symbol -> Text,
contracts -> Float,
direction -> Text,
price -> Float,
fee -> BigInt,
due_date -> BigInt,
paid_date -> Nullable<BigInt>,
}
}
diesel::table! {
ignored_polls (id) {
id -> Integer,
poll_id -> Integer,
timestamp -> BigInt,
}
}
diesel::table! {
last_outbound_dlc_messages (peer_id) {
peer_id -> Text,
message_hash -> Text,
message -> Text,
timestamp -> BigInt,
}
}
diesel::table! {
orders (id) {
id -> Text,
leverage -> Float,
quantity -> Float,
contract_symbol -> Text,
direction -> Text,
order_type -> Text,
state -> Text,
creation_timestamp -> BigInt,
limit_price -> Nullable<Float>,
execution_price -> Nullable<Float>,
failure_reason -> Nullable<Text>,
order_expiry_timestamp -> BigInt,
reason -> Text,
stable -> Bool,
matching_fee_sats -> Nullable<BigInt>,
}
}
diesel::table! {
payments (id) {
id -> Integer,
payment_hash -> Text,
preimage -> Nullable<Text>,
secret -> Nullable<Text>,
htlc_status -> Text,
amount_msat -> Nullable<BigInt>,
flow -> Text,
created_at -> BigInt,
updated_at -> BigInt,
description -> Text,
invoice -> Nullable<Text>,
fee_msat -> Nullable<BigInt>,
funding_txid -> Nullable<Text>,
}
}
diesel::table! {
positions (contract_symbol) {
contract_symbol -> Text,
leverage -> Float,
quantity -> Float,
direction -> Text,
average_entry_price -> Float,
liquidation_price -> Float,
state -> Text,
collateral -> BigInt,
creation_timestamp -> BigInt,
expiry_timestamp -> BigInt,
updated_timestamp -> BigInt,
stable -> Bool,
order_matching_fees -> BigInt,
}
}
diesel::table! {
rollover_params (protocol_id) {
protocol_id -> Text,
contract_symbol -> Text,
funding_fee_sat -> BigInt,
expiry -> BigInt,
}
}
diesel::table! {
spendable_outputs (id) {
id -> Integer,
outpoint -> Text,
descriptor -> Text,
}
}
diesel::table! {
trades (id) {
id -> Integer,
order_id -> Text,
contract_symbol -> Text,
contracts -> Float,
direction -> Text,
trade_cost_sat -> BigInt,
fee_sat -> BigInt,
pnl_sat -> Nullable<BigInt>,
price -> Float,
timestamp -> BigInt,
}
}
diesel::table! {
transactions (txid) {
txid -> Text,
fee -> BigInt,
created_at -> BigInt,
updated_at -> BigInt,
raw -> Text,
}
}
diesel::joinable!(last_outbound_dlc_messages -> dlc_messages (message_hash));
diesel::allow_tables_to_appear_in_same_query!(
answered_polls,
channels,
dlc_messages,
funding_fee_events,
ignored_polls,
last_outbound_dlc_messages,
orders,
payments,
positions,
rollover_params,
spendable_outputs,
trades,
transactions,
);
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/polls.rs | mobile/native/src/polls.rs | use crate::commons::reqwest_client;
use crate::config;
use crate::db;
use anyhow::Result;
use bitcoin::secp256k1::PublicKey;
use reqwest::Url;
use xxi_node::commons::Answer;
use xxi_node::commons::Choice;
use xxi_node::commons::Poll;
use xxi_node::commons::PollAnswers;
pub(crate) async fn get_new_polls() -> Result<Vec<Poll>> {
let node = crate::state::get_node();
let new_polls = fetch_polls(&node.inner.info.pubkey).await?;
tracing::debug!(new_polls = new_polls.len(), "Fetched new polls");
let answered_polls = db::load_ignored_or_answered_polls()?;
let unanswered_polls = new_polls
.into_iter()
.filter(|poll| {
!answered_polls
.iter()
.any(|answered_poll| answered_poll.poll_id == poll.id)
})
.collect::<Vec<_>>();
tracing::debug!(unanswered_polls = unanswered_polls.len(), "Polls to answer");
for i in &unanswered_polls {
tracing::debug!(poll_id = i.id, "Unanswered polls");
}
Ok(unanswered_polls)
}
pub(crate) async fn answer_poll(choice: Choice, poll_id: i32, trader_pk: PublicKey) -> Result<()> {
post_selected_choice(choice.clone(), poll_id, trader_pk).await?;
db::set_poll_to_ignored_or_answered(poll_id)?;
tracing::debug!(poll_id, choice = ?choice, "Answered poll");
Ok(())
}
pub(crate) fn ignore_poll(poll_id: i32) -> Result<()> {
db::set_poll_to_ignored_or_answered(poll_id)?;
tracing::debug!(poll_id, "Poll won't be shown again");
Ok(())
}
async fn fetch_polls(node_id: &PublicKey) -> Result<Vec<Poll>> {
let client = reqwest_client();
let url = format!("http://{}", config::get_http_endpoint());
let url = Url::parse(&url).expect("correct URL");
let url = url.join(format!("/api/polls/{node_id}").as_str())?;
let response = client.get(url).send().await?;
let polls = response.json().await?;
Ok(polls)
}
async fn post_selected_choice(choice: Choice, poll_id: i32, trader_pk: PublicKey) -> Result<()> {
let client = reqwest_client();
let url = format!("http://{}", config::get_http_endpoint());
let url = Url::parse(&url).expect("correct URL");
let url = url.join("/api/polls")?;
let response = client
.post(url)
.json(&PollAnswers {
poll_id,
trader_pk,
answers: vec![Answer {
choice_id: choice.id,
value: choice.value,
}],
})
.send()
.await?;
response.error_for_status()?;
Ok(())
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/backup.rs | mobile/native/src/backup.rs | use crate::cipher::AesCipher;
use crate::config;
use crate::db;
use crate::event::subscriber::Subscriber;
use crate::event::EventInternal;
use crate::event::EventType;
use anyhow::bail;
use anyhow::ensure;
use anyhow::Result;
use futures::future::RemoteHandle;
use futures::FutureExt;
use reqwest::Client;
use reqwest::StatusCode;
use std::fs;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use xxi_node::commons::Backup;
use xxi_node::commons::DeleteBackup;
use xxi_node::commons::Restore;
use xxi_node::storage::sled::SledStorageProvider;
use xxi_node::storage::DlcStoreProvider;
const BLACKLIST: [&str; 1] = ["ln/network_graph"];
pub const DB_BACKUP_KEY: &str = "10101";
pub const LN_BACKUP_KEY: &str = "ln";
pub const DLC_BACKUP_KEY: &str = "dlc";
pub const DB_BACKUP_NAME: &str = "db";
#[derive(Clone)]
pub struct DBBackupSubscriber {
client: RemoteBackupClient,
}
impl DBBackupSubscriber {
pub fn new(client: RemoteBackupClient) -> Self {
Self { client }
}
pub fn back_up(&self) -> Result<()> {
let runtime = crate::state::get_or_create_tokio_runtime()?;
runtime.spawn_blocking({
let client = self.client.clone();
move || {
let db_backup = db::back_up()?;
tracing::debug!("Successfully created backup of database! Uploading snapshot!");
let value = fs::read(db_backup)?;
client
.backup(format!("{DB_BACKUP_KEY}/{DB_BACKUP_NAME}"), value)
.forget();
anyhow::Ok(())
}
});
Ok(())
}
}
impl Subscriber for DBBackupSubscriber {
fn notify(&self, _event: &EventInternal) {
if let Err(e) = self.back_up() {
tracing::error!("Failed to backup db. {e:#}");
}
}
fn events(&self) -> Vec<EventType> {
vec![
EventType::PositionUpdateNotification,
EventType::PositionClosedNotification,
EventType::OrderUpdateNotification,
EventType::OrderFilledWith,
EventType::SpendableOutputs,
]
}
}
#[derive(Clone)]
pub struct RemoteBackupClient {
inner: Client,
endpoint: String,
cipher: AesCipher,
}
impl RemoteBackupClient {
pub fn new(cipher: AesCipher) -> RemoteBackupClient {
let inner = Client::builder()
.timeout(Duration::from_secs(30))
.build()
.expect("Could not build reqwest client");
Self {
inner,
endpoint: format!("http://{}/api", config::get_http_endpoint()),
cipher,
}
}
}
impl RemoteBackupClient {
pub fn delete(&self, key: String) -> RemoteHandle<()> {
let (fut, remote_handle) = {
let client = self.inner.clone();
let node_id = self.cipher.public_key();
let endpoint = format!("{}/backup/{}", self.endpoint.clone(), node_id);
let cipher = self.cipher.clone();
let message = node_id.to_string().as_bytes().to_vec();
async move {
let signature = match cipher.sign(message) {
Ok(signature) => signature,
Err(e) => {
tracing::error!(%key, "{e:#}");
return;
}
};
let backup = DeleteBackup {
key: key.clone(),
signature,
};
if let Err(e) = client.delete(endpoint).json(&backup).send().await {
tracing::error!("Failed to delete backup of {key}. {e:#}")
} else {
tracing::debug!("Successfully deleted backup of {key}");
}
}
}
.remote_handle();
let runtime =
crate::state::get_or_create_tokio_runtime().expect("To be able to get a tokio runtime");
runtime.spawn(fut);
remote_handle
}
pub fn backup(&self, key: String, value: Vec<u8>) -> RemoteHandle<()> {
let size_mb = value.len() as f64 / (1024.0 * 1024.0);
tracing::trace!(%size_mb, "Creating backup for {key}");
let (fut, remote_handle) = {
let client = self.inner.clone();
let cipher = self.cipher.clone();
let node_id = cipher.public_key();
let endpoint = format!("{}/backup/{}", self.endpoint.clone(), node_id);
async move {
if BLACKLIST.contains(&key.as_str()) {
tracing::debug!(key, "Skipping blacklisted backup");
return;
}
let encrypted_value = match cipher.encrypt(value) {
Ok(encrypted_value) => encrypted_value,
Err(e) => {
tracing::error!(%key, "{e:#}");
return;
}
};
let signature = match cipher.sign(encrypted_value.clone()) {
Ok(signature) => signature,
Err(e) => {
tracing::error!(%key, "{e:#}");
return;
}
};
let backup = Backup {
key: key.clone(),
value: encrypted_value,
signature,
};
match client.post(endpoint).json(&backup).send().await {
Ok(response) => {
tracing::debug!("Response status code {}", response.status());
if response.status() != StatusCode::OK {
match response.text().await {
Ok(response) => {
tracing::error!("Failed to upload backup. {response}")
}
Err(e) => tracing::error!("Failed to upload backup. {e}"),
}
} else {
tracing::debug!("Successfully uploaded backup of {key}.");
}
}
Err(e) => tracing::error!("Failed to create a backup of {key}. {e:#}"),
}
}
}
.remote_handle();
let runtime =
crate::state::get_or_create_tokio_runtime().expect("To be able to get a tokio runtime");
runtime.spawn(fut);
remote_handle
}
pub async fn restore(&self, dlc_storage: Arc<SledStorageProvider>) -> Result<()> {
let runtime = crate::state::get_or_create_tokio_runtime()?;
runtime
.spawn({
let client = self.inner.clone();
let cipher = self.cipher.clone();
let node_id = cipher.public_key();
let endpoint = format!("{}/restore/{}", self.endpoint.clone(), node_id);
let data_dir = config::get_data_dir();
let network = config::get_network();
let message = node_id.to_string().as_bytes().to_vec();
async move {
let signature = cipher.sign(message)?;
match client.get(endpoint).json(&signature).send().await {
Ok(response) => {
tracing::debug!("Response status code {}", response.status());
if response.status() != StatusCode::OK {
let response = response.text().await?;
bail!("Failed to download backup. {response}");
}
let backup: Vec<Restore> = response.json().await?;
tracing::debug!("Successfully downloaded backup.");
for restore in backup.into_iter() {
let decrypted_value = cipher.decrypt(restore.value)?;
let keys = restore
.key
.split('/')
.map(|key| key.to_string())
.collect::<Vec<String>>();
let (backup_key, key) =
keys.split_first().expect("keys to be long enough");
let key = key.join("/");
let backup_key = backup_key.as_str();
match backup_key {
x if x == LN_BACKUP_KEY => {
tracing::debug!("Restoring {}", key);
let dest_file = Path::new(&data_dir)
.join(network.to_string())
.join(key.clone());
fs::create_dir_all(dest_file.parent().expect("parent"))?;
fs::write(dest_file.as_path(), decrypted_value)?;
}
x if x == DLC_BACKUP_KEY => {
tracing::debug!("Restoring {}", key);
let keys = key.split('/').collect::<Vec<&str>>();
ensure!(keys.len() == 2, "dlc key is too short");
let kind = *hex::decode(keys.first().expect("to exist"))?
.first()
.expect("to exist");
let key = hex::decode(keys.get(1).expect("to exist"))?;
dlc_storage.write(kind, key, decrypted_value)?;
}
x if x == DB_BACKUP_KEY => {
let data_dir = Path::new(&data_dir);
let db_file =
data_dir.join(format!("trades-{}.sqlite", network));
tracing::debug!(
"Restoring 10101 database backup into {}",
db_file.to_string_lossy().to_string()
);
fs::write(db_file.as_path(), decrypted_value)?;
}
_ => {
tracing::warn!(backup_key, "Received unknown backup key")
}
}
}
tracing::info!("Successfully restored 10101 from backup!");
}
Err(e) => bail!("Failed to download backup. {e:#}"),
}
Ok(())
}
})
.await?
}
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
get10101/10101 | https://github.com/get10101/10101/blob/3ae135090528d64fbe2702aa03e1e3953cd57e2f/mobile/native/src/health.rs | mobile/native/src/health.rs | use crate::config;
use crate::event;
use crate::event::EventInternal;
use anyhow::Context;
use anyhow::Result;
use futures::future::RemoteHandle;
use futures::FutureExt;
use reqwest::StatusCode;
use std::time::Duration;
use tokio::runtime::Runtime;
use tokio::sync::watch;
/// Services which status is monitored
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Service {
Orderbook,
Coordinator,
}
/// Health status of the node
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub enum ServiceStatus {
#[default]
Unknown,
Online,
Offline,
}
#[derive(Debug, Clone)]
pub struct ServiceUpdate {
pub service: Service,
pub status: ServiceStatus,
}
impl From<(Service, ServiceStatus)> for ServiceUpdate {
fn from(tuple: (Service, ServiceStatus)) -> Self {
let (service, status) = tuple;
ServiceUpdate { service, status }
}
}
/// Senders for the health status updates.
///
/// Meant to be injected into the services that need to publish their health status.
pub struct Tx {
pub orderbook: watch::Sender<ServiceStatus>,
}
/// Entity that gathers all the service health data and publishes notifications
pub struct Health {
_tasks: Vec<RemoteHandle<std::result::Result<(), tokio::task::JoinError>>>,
}
impl Health {
pub fn new(runtime: &Runtime) -> (Self, Tx) {
let (orderbook_tx, orderbook_rx) = watch::channel(ServiceStatus::Unknown);
let mut tasks = Vec::new();
let orderbook_monitoring = runtime
.spawn(publish_status_updates(Service::Orderbook, orderbook_rx))
.remote_handle()
.1;
tasks.push(orderbook_monitoring);
let (coordinator_tx, coordinator_rx) = watch::channel(ServiceStatus::Unknown);
let check_coordinator = runtime
.spawn(check_health_endpoint(
config::coordinator_health_endpoint(),
coordinator_tx,
config::health_check_interval(),
))
.remote_handle()
.1;
tasks.push(check_coordinator);
let coordinator_monitoring = runtime
.spawn(publish_status_updates(Service::Coordinator, coordinator_rx))
.remote_handle()
.1;
tasks.push(coordinator_monitoring);
(
Self { _tasks: tasks },
Tx {
orderbook: orderbook_tx,
},
)
}
}
/// Publishes the health status updates for a given service to the event hub
async fn publish_status_updates(service: Service, mut rx: watch::Receiver<ServiceStatus>) {
loop {
match rx.changed().await {
Ok(()) => {
let status = rx.borrow();
event::publish(&EventInternal::ServiceHealthUpdate(
(service, *status).into(),
));
}
Err(_) => {
tracing::error!("Sender dropped");
event::publish(&EventInternal::ServiceHealthUpdate(
(service, ServiceStatus::Unknown).into(),
));
break;
}
}
}
}
/// Periodically checks the health of a given service and updates the watch channel
async fn check_health_endpoint(
endpoint: String,
tx: watch::Sender<ServiceStatus>,
interval: Duration,
) {
loop {
let status = if send_request(&endpoint).await.is_ok() {
ServiceStatus::Online
} else {
ServiceStatus::Offline
};
tx.send(status).expect("Receiver not to be dropped");
tokio::time::sleep(interval).await;
}
}
// Returns the status code of the health endpoint, returning an error if the request fails
async fn send_request(endpoint: &str) -> Result<StatusCode> {
tracing::trace!(%endpoint, "Sending request");
let response = reqwest::get(endpoint)
.await
.context("could not send request")?
.error_for_status()?;
Ok(response.status())
}
| rust | MIT | 3ae135090528d64fbe2702aa03e1e3953cd57e2f | 2026-01-04T20:18:11.134572Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.