repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/libs_bench/src/main.rs | xtask/libs_bench/src/main.rs | fn main() {}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/libs_bench/benches/contains_criterion.rs | xtask/libs_bench/benches/contains_criterion.rs | mod contains;
use contains::*;
use criterion::{criterion_group, criterion_main, Criterion};
use fastbloom_rs::Membership;
fn criterion_benchmark(c: &mut Criterion) {
let set = contains_hashset_setup();
c.bench_function("contains_hashset", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(*k));
}
count
})
});
let set = contains_btreeset_setup();
c.bench_function("contains_btreeset", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count = i32::from(set.contains(*k));
}
count
})
});
let set = contains_bloom_setup();
c.bench_function("contains_bloom", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(k.as_bytes()))
}
count
})
});
let set = contains_trie_setup();
c.bench_function("contains_trie", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains_key(k.as_bytes()));
}
count
})
});
let set = contains_slice_setup();
c.bench_function("contains_slice", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += set.iter().position(|x| x == k).unwrap_or(0);
}
count
})
});
let set = contains_fst_setup();
c.bench_function("contains_fst", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(k));
}
count
})
});
let mut set = keywords();
set.sort();
c.bench_function("contains_binary_search", |b| {
b.iter(|| {
let mut count = 0;
for k in search_for() {
count += set.binary_search_by(|v| (*k).cmp(v.as_str())).unwrap_or(1);
}
count
})
});
let set = contains_memchr_setup();
c.bench_function("contains_memchr", |b| {
b.iter(|| {
{
let mut count = 0;
for k in search_for() {
for item in set.iter() {
count += i32::from(
memchr::memmem::find(k.as_bytes(), item.as_str().as_bytes()).is_some(),
);
}
}
count
};
})
});
}
criterion_group!(contains, criterion_benchmark);
criterion_main!(contains);
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/libs_bench/benches/to_camel_case.rs | xtask/libs_bench/benches/to_camel_case.rs | fn to_camel_case_rome_all_lowercase() {
let _ = rome_js_analyze::utils::to_camel_case(iai::black_box("lowercase"));
}
fn to_camel_case_case_all_lowercase() {
let _ = case::CaseExt::to_camel(iai::black_box("lowercase"));
}
fn to_camel_case_rome_already_camel_case() {
let _ = rome_js_analyze::utils::to_camel_case(iai::black_box("camelCase"));
}
fn to_camel_case_case_already_camel_case() {
let _ = case::CaseExt::to_camel(iai::black_box("camelCase"));
}
fn to_camel_case_rome_pascal_case() {
let _ = rome_js_analyze::utils::to_camel_case(iai::black_box("CamelCase"));
}
fn to_camel_case_case_pascal_case() {
let _ = case::CaseExt::to_camel(iai::black_box("CamelCase"));
}
iai::main!(
to_camel_case_rome_all_lowercase,
to_camel_case_case_all_lowercase,
to_camel_case_rome_already_camel_case,
to_camel_case_case_already_camel_case,
to_camel_case_rome_pascal_case,
to_camel_case_case_pascal_case,
);
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/libs_bench/benches/contains.rs | xtask/libs_bench/benches/contains.rs | #![allow(dead_code)]
use std::collections::{BTreeSet, HashSet};
use fastbloom_rs::{BloomFilter, FilterBuilder, Membership};
use qp_trie::Trie;
pub fn keywords() -> Vec<String> {
let repeat = std::env::var("ROME_BENCH_CONTAINS_REPEAT")
.unwrap_or_else(|_| "1".to_string())
.parse()
.unwrap();
let v = &["undefined", "NaN", "Infinity", "arguments", "eval"].repeat(repeat);
v.iter()
.enumerate()
.map(|(i, x)| format!("{}{}", x, i))
.collect()
}
pub fn search_for() -> &'static [&'static str] {
&[
"undefined",
"a",
"NaN",
"longVariableName",
"Infinity",
"xxxxxxxx",
"arguments",
"eval",
][..]
}
pub fn contains_slice_setup() -> Vec<String> {
keywords()
}
pub fn contains_slice() -> usize {
let set = contains_slice_setup();
let mut count = 0;
for k in search_for() {
count += set.iter().position(|x| x == k).unwrap_or(0);
}
count
}
pub fn contains_binary_search_setup() -> Vec<String> {
let mut words = keywords();
words.sort();
words
}
pub fn contains_binary_search() -> usize {
let set = contains_binary_search_setup();
let mut count = 0;
for k in search_for() {
count += set.binary_search_by(|v| (*k).cmp(v.as_str())).unwrap_or(1);
}
count
}
pub fn contains_hashset_setup() -> HashSet<String> {
let mut set = HashSet::new();
for k in keywords() {
set.insert(k.to_string());
}
set
}
pub fn contains_hashset() -> i32 {
let set = contains_hashset_setup();
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(*k));
}
count
}
pub fn contains_btreeset_setup() -> BTreeSet<String> {
let mut set = BTreeSet::new();
for k in keywords() {
set.insert(k.to_string());
}
set
}
pub fn contains_btreeset() -> i32 {
let set = contains_btreeset_setup();
let mut count = 0;
for k in search_for() {
count = i32::from(set.contains(*k));
}
count
}
pub fn contains_bloom_setup() -> BloomFilter {
let builder = FilterBuilder::new(100_000_000, 0.01);
let mut set = BloomFilter::new(builder);
for k in keywords() {
set.add(k.as_bytes());
}
set
}
pub fn contains_bloom() -> i32 {
let set = contains_bloom_setup();
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(k.as_bytes()));
}
count
}
pub fn contains_trie_setup() -> Trie<Vec<u8>, i32> {
let mut set = Trie::new();
for k in keywords() {
set.insert(k.into_bytes(), 0);
}
set
}
pub fn contains_trie() -> i32 {
let set = contains_trie_setup();
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains_key(k.as_bytes()));
}
count
}
pub fn contains_fst_setup() -> fst::Set<Vec<u8>> {
let w = vec![];
let mut set = fst::SetBuilder::new(w).unwrap();
let mut keywords = keywords().to_vec();
keywords.sort();
for k in keywords {
let _ = set.insert(k);
}
set.into_set()
}
pub fn contains_fst() -> i32 {
let set = contains_fst_setup();
let mut count = 0;
for k in search_for() {
count += i32::from(set.contains(k));
}
count
}
pub fn contains_memchr_setup() -> Vec<String> {
contains_binary_search_setup()
}
pub fn contains_memchr() -> i32 {
let set = contains_memchr_setup();
let mut count = 0;
for k in search_for() {
for item in set.iter() {
count +=
i32::from(memchr::memmem::find(k.as_bytes(), item.as_str().as_bytes()).is_some());
}
}
count
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/libs_bench/benches/contains_iai.rs | xtask/libs_bench/benches/contains_iai.rs | mod contains;
use contains::*;
// iai do not support setup, so we basically run the setup and
// the whole setup + test. To see the difference.
// https://github.com/bheisler/iai/pull/24
iai::main!(
contains_hashset_setup,
contains_hashset,
contains_btreeset_setup,
contains_btreeset,
contains_bloom_setup,
contains_bloom,
contains_trie_setup,
contains_trie,
contains_slice_setup,
contains_slice,
contains_fst_setup,
contains_fst,
contains_binary_search_setup,
contains_binary_search,
contains_memchr_setup,
contains_memchr,
);
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/kinds_src.rs | xtask/codegen/src/kinds_src.rs | //! Definitions for the ECMAScript AST used for codegen
//! Based on the rust analyzer parser and ast definitions
use crate::LanguageKind;
use quote::format_ident;
use std::collections::BTreeMap;
const LANGUAGE_PREFIXES: [&str; 6] = ["js_", "ts_", "jsx_", "tsx_", "css_", "json_"];
pub struct KindsSrc<'a> {
pub punct: &'a [(&'a str, &'a str)],
pub keywords: &'a [&'a str],
pub literals: &'a [&'a str],
pub tokens: &'a [&'a str],
pub nodes: &'a [&'a str],
}
pub const JS_KINDS_SRC: KindsSrc = KindsSrc {
punct: &[
(";", "SEMICOLON"),
(",", "COMMA"),
("(", "L_PAREN"),
(")", "R_PAREN"),
("{", "L_CURLY"),
("}", "R_CURLY"),
("[", "L_BRACK"),
("]", "R_BRACK"),
("<", "L_ANGLE"),
(">", "R_ANGLE"),
("~", "TILDE"),
("?", "QUESTION"),
("??", "QUESTION2"),
// These are *not* question AND dot tokens, they are one
// to distinguish between `? .3134` and `?.` per ecma specs
("?.", "QUESTIONDOT"),
("&", "AMP"),
("|", "PIPE"),
("+", "PLUS"),
("++", "PLUS2"),
("*", "STAR"),
("**", "STAR2"),
("/", "SLASH"),
("^", "CARET"),
("%", "PERCENT"),
(".", "DOT"),
("...", "DOT3"),
(":", "COLON"),
("=", "EQ"),
("==", "EQ2"),
("===", "EQ3"),
("=>", "FAT_ARROW"),
("!", "BANG"),
("!=", "NEQ"),
("!==", "NEQ2"),
("-", "MINUS"),
("--", "MINUS2"),
("<=", "LTEQ"),
(">=", "GTEQ"),
("+=", "PLUSEQ"),
("-=", "MINUSEQ"),
("|=", "PIPEEQ"),
("&=", "AMPEQ"),
("^=", "CARETEQ"),
("/=", "SLASHEQ"),
("*=", "STAREQ"),
("%=", "PERCENTEQ"),
("&&", "AMP2"),
("||", "PIPE2"),
("<<", "SHL"),
(">>", "SHR"),
(">>>", "USHR"),
("<<=", "SHLEQ"),
(">>=", "SHREQ"),
(">>>=", "USHREQ"),
("&&=", "AMP2EQ"),
("||=", "PIPE2EQ"),
("**=", "STAR2EQ"),
("??=", "QUESTION2EQ"),
("@", "AT"),
("`", "BACKTICK"),
],
keywords: &[
"break",
"case",
"catch",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"else",
"enum",
"export",
"extends",
"false",
"finally",
"for",
"function",
"if",
"in",
"instanceof",
"import",
"new",
"null",
"return",
"super",
"switch",
"this",
"throw",
"try",
"true",
"typeof",
"var",
"void",
"while",
"with",
// Strict mode contextual keywords
"implements",
"interface",
"let",
"package",
"private",
"protected",
"public",
"static",
"yield",
// contextual keywords
"abstract",
"accessor",
"as",
"satisfies",
"asserts",
"assert",
"any",
"async",
"await",
"boolean",
"constructor",
"declare",
"get",
"infer",
"is",
"keyof",
"module",
"namespace",
"never",
"readonly",
"require",
"number",
"object",
"set",
"string",
"symbol",
"type",
"undefined",
"unique",
"unknown",
"from",
"global",
"bigint",
"override",
"of",
"out",
"using",
],
literals: &[
"JS_NUMBER_LITERAL",
"JS_BIGINT_LITERAL",
"JS_STRING_LITERAL",
"JS_REGEX_LITERAL",
"JSX_TEXT_LITERAL",
"JSX_STRING_LITERAL",
],
tokens: &[
"TARGET",
"META",
"HASH", // #
"TEMPLATE_CHUNK",
"DOLLAR_CURLY", // ${
"ERROR_TOKEN",
"IDENT",
"JSX_IDENT",
"NEWLINE",
"WHITESPACE",
"COMMENT",
"MULTILINE_COMMENT",
"JS_SHEBANG",
],
nodes: &[
"JS_MODULE",
"JS_MODULE_ITEM_LIST",
"JS_SCRIPT",
"JS_EXPRESSION_SNIPPED",
"JS_DIRECTIVE",
"JS_DIRECTIVE_LIST",
"JS_STATEMENT_LIST",
"JS_BLOCK_STATEMENT",
"JS_FUNCTION_BODY",
"JS_VARIABLE_STATEMENT",
"JS_VARIABLE_DECLARATION",
"JS_VARIABLE_DECLARATOR_LIST",
"JS_VARIABLE_DECLARATOR",
"JS_VARIABLE_DECLARATION_CLAUSE",
"TS_DEFINITE_VARIABLE_ANNOTATION",
"JS_INITIALIZER_CLAUSE",
"JS_EMPTY_STATEMENT",
"JS_EXPRESSION_STATEMENT",
"JS_IF_STATEMENT",
"JS_ELSE_CLAUSE",
"JS_DO_WHILE_STATEMENT",
"JS_WHILE_STATEMENT",
"JS_FOR_STATEMENT",
"JS_FOR_IN_STATEMENT",
"JS_FOR_OF_STATEMENT",
"JS_FOR_VARIABLE_DECLARATION",
"JS_CONTINUE_STATEMENT",
"JS_BREAK_STATEMENT",
"JS_RETURN_STATEMENT",
"JS_WITH_STATEMENT",
"JS_SWITCH_STATEMENT",
"JS_SWITCH_CASE_LIST",
"JS_CASE_CLAUSE",
"JS_DEFAULT_CLAUSE",
"JS_LABELED_STATEMENT",
"JS_THROW_STATEMENT",
"JS_TRY_STATEMENT",
"JS_TRY_FINALLY_STATEMENT",
"JS_CATCH_CLAUSE",
"JS_CATCH_DECLARATION",
"JS_FINALLY_CLAUSE",
"JS_DEBUGGER_STATEMENT",
"JS_FUNCTION_DECLARATION",
"JS_PARAMETERS",
"JS_PARAMETER_LIST",
"JS_FORMAL_PARAMETER",
"JS_REST_PARAMETER",
"TS_THIS_PARAMETER",
"TS_PROPERTY_PARAMETER",
"TS_PROPERTY_PARAMETER_MODIFIER_LIST",
"TS_TYPE_ANNOTATION",
"TS_RETURN_TYPE_ANNOTATION",
"JS_IDENTIFIER_BINDING",
"JS_IDENTIFIER_EXPRESSION",
"JS_REFERENCE_IDENTIFIER",
"JS_NAME",
"JS_PRIVATE_NAME",
"JS_THIS_EXPRESSION",
"JS_ARRAY_EXPRESSION",
"JS_ARRAY_ELEMENT_LIST",
"JS_ARRAY_HOLE",
"JS_COMPUTED_MEMBER_NAME",
"JS_LITERAL_MEMBER_NAME",
"JS_OBJECT_EXPRESSION",
"JS_OBJECT_MEMBER_LIST",
"JS_PROPERTY_OBJECT_MEMBER",
"JS_GETTER_OBJECT_MEMBER",
"JS_SETTER_OBJECT_MEMBER",
"JS_METHOD_OBJECT_MEMBER",
"JS_SUPER_EXPRESSION",
"JS_PARENTHESIZED_EXPRESSION",
"JS_NEW_EXPRESSION",
"JS_FUNCTION_EXPRESSION",
"JS_STATIC_MEMBER_EXPRESSION",
"JS_COMPUTED_MEMBER_EXPRESSION",
"JS_CALL_EXPRESSION",
"JS_UNARY_EXPRESSION",
"JS_PRE_UPDATE_EXPRESSION",
"JS_POST_UPDATE_EXPRESSION",
"JS_BINARY_EXPRESSION",
"JS_INSTANCEOF_EXPRESSION",
"JS_IN_EXPRESSION",
"JS_LOGICAL_EXPRESSION",
"JS_CONDITIONAL_EXPRESSION",
"JS_ASSIGNMENT_EXPRESSION",
"JS_SEQUENCE_EXPRESSION",
"JS_CALL_ARGUMENTS",
"JS_CALL_ARGUMENT_LIST",
"JS_STRING_LITERAL_EXPRESSION",
"JS_NUMBER_LITERAL_EXPRESSION",
"JS_BIGINT_LITERAL_EXPRESSION",
"JS_BOOLEAN_LITERAL_EXPRESSION",
"JS_NULL_LITERAL_EXPRESSION",
"JS_REGEX_LITERAL_EXPRESSION",
"JS_TEMPLATE_EXPRESSION",
"JS_TEMPLATE_ELEMENT",
"JS_TEMPLATE_CHUNK_ELEMENT",
"JS_TEMPLATE_ELEMENT_LIST",
"JS_IMPORT_CALL_EXPRESSION",
"JS_NEW_TARGET_EXPRESSION",
"JS_IMPORT_META_EXPRESSION",
"JS_SHORTHAND_PROPERTY_OBJECT_MEMBER",
"JS_SPREAD",
"JS_OBJECT_BINDING_PATTERN",
"JS_ARRAY_BINDING_PATTERN",
"JS_ARRAY_BINDING_PATTERN_ELEMENT_LIST",
"JS_BINDING_PATTERN_WITH_DEFAULT",
"JS_ARRAY_BINDING_PATTERN_REST_ELEMENT",
"JS_OBJECT_BINDING_PATTERN_PROPERTY_LIST",
"JS_OBJECT_BINDING_PATTERN_REST",
"JS_OBJECT_BINDING_PATTERN_PROPERTY",
"JS_OBJECT_BINDING_PATTERN_SHORTHAND_PROPERTY",
"JS_ARROW_FUNCTION_EXPRESSION",
"JS_YIELD_EXPRESSION",
"JS_YIELD_ARGUMENT",
"JS_CLASS_DECLARATION",
"JS_CLASS_EXPRESSION",
"JS_CLASS_MEMBER_LIST",
"JS_STATIC_MODIFIER",
"JS_ACCESSOR_MODIFIER",
"TS_DECLARE_MODIFIER",
"TS_READONLY_MODIFIER",
"TS_ABSTRACT_MODIFIER",
"TS_OVERRIDE_MODIFIER",
"TS_ACCESSIBILITY_MODIFIER",
"TS_CONST_MODIFIER",
"TS_IN_MODIFIER",
"TS_OUT_MODIFIER",
"JS_EXTENDS_CLAUSE",
"TS_IMPLEMENTS_CLAUSE",
"JS_PRIVATE_CLASS_MEMBER_NAME",
"JS_CONSTRUCTOR_CLASS_MEMBER",
"TS_CONSTRUCTOR_SIGNATURE_CLASS_MEMBER",
"JS_CONSTRUCTOR_MODIFIER_LIST",
"JS_CONSTRUCTOR_PARAMETER_LIST",
"JS_CONSTRUCTOR_PARAMETERS",
"JS_PROPERTY_CLASS_MEMBER",
"JS_PROPERTY_MODIFIER_LIST",
"TS_OPTIONAL_PROPERTY_ANNOTATION",
"TS_DEFINITE_PROPERTY_ANNOTATION",
"JS_STATIC_INITIALIZATION_BLOCK_CLASS_MEMBER",
"JS_METHOD_CLASS_MEMBER",
"JS_METHOD_MODIFIER_LIST",
"JS_GETTER_CLASS_MEMBER",
"JS_SETTER_CLASS_MEMBER",
"JS_EMPTY_CLASS_MEMBER",
"JS_ASSIGNMENT_WITH_DEFAULT",
"JS_PARENTHESIZED_ASSIGNMENT",
"JS_IDENTIFIER_ASSIGNMENT",
"JS_STATIC_MEMBER_ASSIGNMENT",
"JS_COMPUTED_MEMBER_ASSIGNMENT",
"TS_NON_NULL_ASSERTION_ASSIGNMENT",
"TS_AS_ASSIGNMENT",
"TS_SATISFIES_ASSIGNMENT",
"TS_TYPE_ASSERTION_ASSIGNMENT",
"JS_ARRAY_ASSIGNMENT_PATTERN",
"JS_ARRAY_ASSIGNMENT_PATTERN_ELEMENT_LIST",
"JS_ARRAY_ASSIGNMENT_PATTERN_REST_ELEMENT",
"JS_OBJECT_ASSIGNMENT_PATTERN",
"JS_OBJECT_ASSIGNMENT_PATTERN_PROPERTY_LIST",
"JS_OBJECT_ASSIGNMENT_PATTERN_SHORTHAND_PROPERTY",
"JS_OBJECT_ASSIGNMENT_PATTERN_PROPERTY",
"JS_OBJECT_ASSIGNMENT_PATTERN_REST",
"JS_IMPORT",
"JS_IMPORT_BARE_CLAUSE",
"JS_IMPORT_DEFAULT_CLAUSE",
"JS_IMPORT_NAMESPACE_CLAUSE",
"JS_IMPORT_NAMED_CLAUSE",
"JS_NAMED_IMPORT_SPECIFIERS",
"JS_NAMED_IMPORT_SPECIFIER_LIST",
"JS_NAMESPACE_IMPORT_SPECIFIER",
"JS_DEFAULT_IMPORT_SPECIFIER",
"JS_NAMED_IMPORT_SPECIFIER",
"JS_SHORTHAND_NAMED_IMPORT_SPECIFIER",
"JS_IMPORT_ASSERTION",
"JS_IMPORT_ASSERTION_ENTRY_LIST",
"JS_IMPORT_ASSERTION_ENTRY",
"JS_MODULE_SOURCE",
"JS_EXPORT",
"JS_EXPORT_NAMED_CLAUSE",
"JS_EXPORT_NAMED_SPECIFIER_LIST",
"JS_EXPORT_NAMED_SHORTHAND_SPECIFIER",
"JS_EXPORT_NAMED_SPECIFIER",
"JS_EXPORT_DEFAULT_EXPRESSION_CLAUSE",
"JS_EXPORT_DEFAULT_DECLARATION_CLAUSE",
"JS_CLASS_EXPORT_DEFAULT_DECLARATION",
"JS_FUNCTION_EXPORT_DEFAULT_DECLARATION",
"JS_EXPORT_FROM_CLAUSE",
"JS_EXPORT_NAMED_FROM_CLAUSE",
"JS_EXPORT_NAMED_FROM_SPECIFIER_LIST",
"JS_EXPORT_NAMED_FROM_SPECIFIER",
"JS_EXPORT_AS_CLAUSE",
"TS_EXPORT_AS_NAMESPACE_CLAUSE",
"TS_EXPORT_ASSIGNMENT_CLAUSE",
"TS_EXPORT_DECLARE_CLAUSE",
"JS_LITERAL_EXPORT_NAME",
"JS_AWAIT_EXPRESSION",
"JS_DECORATOR",
"JS_DECORATOR_LIST",
// TypeScript
"TS_IDENTIFIER_BINDING",
"TS_ANY_TYPE",
"TS_UNKNOWN_TYPE",
"TS_NUMBER_TYPE",
"TS_NON_PRIMITIVE_TYPE",
"TS_BOOLEAN_TYPE",
"TS_BIGINT_TYPE",
"TS_STRING_TYPE",
"TS_SYMBOL_TYPE",
"TS_VOID_TYPE",
"TS_UNDEFINED_TYPE",
"TS_NEVER_TYPE",
"TS_THIS_TYPE",
"TS_TYPEOF_TYPE",
"TS_PARENTHESIZED_TYPE",
"TS_MAPPED_TYPE",
"TS_MAPPED_TYPE_OPTIONAL_MODIFIER_CLAUSE",
"TS_MAPPED_TYPE_READONLY_MODIFIER_CLAUSE",
"TS_MAPPED_TYPE_AS_CLAUSE",
"TS_TYPE_ALIAS_DECLARATION",
"TS_MODULE_DECLARATION",
"TS_GLOBAL_DECLARATION",
"TS_QUALIFIED_MODULE_NAME",
"TS_MODULE_BLOCK",
"TS_EXTERNAL_MODULE_DECLARATION",
"TS_EMPTY_EXTERNAL_MODULE_DECLARATION_BODY",
"TS_QUALIFIED_NAME",
"TS_REFERENCE_TYPE",
"TS_UNION_TYPE",
"TS_UNION_TYPE_VARIANT_LIST",
"TS_INTERSECTION_TYPE",
"TS_INTERSECTION_TYPE_ELEMENT_LIST",
"TS_OBJECT_TYPE",
"TS_TYPE_MEMBER_LIST",
"TS_INTERFACE_DECLARATION",
"TS_EXTENDS_CLAUSE",
"TS_PROPERTY_SIGNATURE_TYPE_MEMBER",
"TS_METHOD_SIGNATURE_TYPE_MEMBER",
"TS_CALL_SIGNATURE_TYPE_MEMBER",
"TS_CONSTRUCT_SIGNATURE_TYPE_MEMBER",
"TS_GETTER_SIGNATURE_TYPE_MEMBER",
"TS_SETTER_SIGNATURE_TYPE_MEMBER",
"TS_INDEX_SIGNATURE_TYPE_MEMBER",
"TS_IMPORT_TYPE",
"TS_IMPORT_TYPE_QUALIFIER",
"TS_ARRAY_TYPE",
"TS_INDEXED_ACCESS_TYPE",
"TS_TUPLE_TYPE",
"TS_TUPLE_TYPE_ELEMENT_LIST",
"TS_REST_TUPLE_TYPE_ELEMENT",
"TS_OPTIONAL_TUPLE_TYPE_ELEMENT",
"TS_NAMED_TUPLE_TYPE_ELEMENT",
"TS_TYPE_OPERATOR_TYPE",
"TS_INFER_TYPE",
"TS_CONSTRUCTOR_TYPE",
"TS_FUNCTION_TYPE",
"TS_PREDICATE_RETURN_TYPE",
"TS_ASSERTS_RETURN_TYPE",
"TS_ASSERTS_CONDITION",
"TS_TYPE_PARAMETERS",
"TS_TYPE_PARAMETER_LIST",
"TS_TYPE_PARAMETER",
"TS_TYPE_PARAMETER_MODIFIER_LIST",
"TS_TYPE_PARAMETER_NAME",
"TS_TYPE_CONSTRAINT_CLAUSE",
"TS_DEFAULT_TYPE_CLAUSE",
"TS_STRING_LITERAL_TYPE",
"TS_NUMBER_LITERAL_TYPE",
"TS_BIGINT_LITERAL_TYPE",
"TS_BOOLEAN_LITERAL_TYPE",
"TS_NULL_LITERAL_TYPE",
"TS_TEMPLATE_LITERAL_TYPE",
"TS_TEMPLATE_ELEMENT_LIST",
"TS_TEMPLATE_CHUNK_ELEMENT",
"TS_TEMPLATE_ELEMENT",
"TS_TYPE_ARGUMENTS",
"TS_TYPE_ARGUMENT_LIST",
"TS_TYPE_LIST",
"TS_EXTENDS",
"TS_CONDITIONAL_TYPE",
"TS_NON_NULL_ASSERTION_EXPRESSION",
"TS_TYPE_ASSERTION_EXPRESSION",
"TS_AS_EXPRESSION",
"TS_SATISFIES_EXPRESSION",
"TS_INSTANTIATION_EXPRESSION",
"TS_ENUM_DECLARATION",
"TS_ENUM_MEMBER_LIST",
"TS_ENUM_MEMBER",
"TS_IMPORT_EQUALS_DECLARATION",
"TS_EXTERNAL_MODULE_REFERENCE",
"TS_NAME_WITH_TYPE_ARGUMENTS",
"TS_DECLARE_FUNCTION_DECLARATION",
"TS_DECLARE_FUNCTION_EXPORT_DEFAULT_DECLARATION",
"TS_DECLARE_STATEMENT",
"TS_INDEX_SIGNATURE_PARAMETER",
"TS_PROPERTY_SIGNATURE_CLASS_MEMBER",
"TS_INITIALIZED_PROPERTY_SIGNATURE_CLASS_MEMBER",
"TS_PROPERTY_SIGNATURE_MODIFIER_LIST",
"TS_METHOD_SIGNATURE_CLASS_MEMBER",
"TS_METHOD_SIGNATURE_MODIFIER_LIST",
"TS_GETTER_SIGNATURE_CLASS_MEMBER",
"TS_SETTER_SIGNATURE_CLASS_MEMBER",
"TS_INDEX_SIGNATURE_CLASS_MEMBER",
"TS_INDEX_SIGNATURE_MODIFIER_LIST",
//JSX
"JSX_NAME",
"JSX_NAMESPACE_NAME",
"JSX_REFERENCE_IDENTIFIER",
"JSX_TAG_EXPRESSION",
"JSX_ELEMENT",
"JSX_FRAGMENT",
"JSX_OPENING_FRAGMENT",
"JSX_CLOSING_FRAGMENT",
"JSX_SELF_CLOSING_ELEMENT",
"JSX_OPENING_ELEMENT",
"JSX_CLOSING_ELEMENT",
"JSX_MEMBER_NAME",
"JSX_TEXT",
"JSX_ATTRIBUTE_LIST",
"JSX_ATTRIBUTE",
"JSX_SPREAD_ATTRIBUTE",
"JSX_ATTRIBUTE_INITIALIZER_CLAUSE",
"JSX_EXPRESSION_ATTRIBUTE_VALUE",
"JSX_CHILD_LIST",
"JSX_EXPRESSION_CHILD",
"JSX_SPREAD_CHILD",
"JSX_STRING",
// bogus nodes JS
"JS_BOGUS",
"JS_BOGUS_EXPRESSION",
"JS_BOGUS_STATEMENT",
"JS_BOGUS_MEMBER",
"JS_BOGUS_BINDING",
"JS_BOGUS_PARAMETER",
"JS_BOGUS_IMPORT_ASSERTION_ENTRY",
"JS_BOGUS_NAMED_IMPORT_SPECIFIER",
"JS_BOGUS_ASSIGNMENT",
"TS_BOGUS_TYPE",
],
};
#[derive(Default, Debug)]
pub struct AstSrc {
pub nodes: Vec<AstNodeSrc>,
pub unions: Vec<AstEnumSrc>,
pub lists: BTreeMap<String, AstListSrc>,
pub bogus: Vec<String>,
}
impl AstSrc {
pub fn push_list(&mut self, name: &str, src: AstListSrc) {
self.lists.insert(String::from(name), src);
}
pub fn lists(&self) -> std::collections::btree_map::Iter<String, AstListSrc> {
self.lists.iter()
}
pub fn is_list(&self, name: &str) -> bool {
self.lists.contains_key(name)
}
/// Sorts all nodes, enums, etc. for a stable code gen result
pub fn sort(&mut self) {
// No need to sort lists, they're stored in a btree
self.nodes.sort_unstable_by(|a, b| a.name.cmp(&b.name));
self.unions.sort_unstable_by(|a, b| a.name.cmp(&b.name));
self.bogus.sort_unstable();
for union in self.unions.iter_mut() {
union.variants.sort_unstable();
}
}
}
#[derive(Debug)]
pub struct AstListSrc {
pub element_name: String,
pub separator: Option<AstListSeparatorConfiguration>,
}
#[derive(Debug)]
pub struct AstListSeparatorConfiguration {
/// Name of the separator token
pub separator_token: String,
/// Whatever the list allows a trailing comma or not
pub allow_trailing: bool,
}
#[derive(Debug)]
pub struct AstNodeSrc {
pub documentation: Vec<String>,
pub name: String,
// pub traits: Vec<String>,
pub fields: Vec<Field>,
}
#[derive(Debug, Eq, PartialEq)]
pub enum TokenKind {
Single(String),
Many(Vec<String>),
}
#[derive(Debug, Eq, PartialEq)]
pub enum Field {
Token {
name: String,
kind: TokenKind,
optional: bool,
},
Node {
name: String,
ty: String,
optional: bool,
},
}
#[derive(Debug, Clone)]
pub struct AstEnumSrc {
pub documentation: Vec<String>,
pub name: String,
// pub traits: Vec<String>,
pub variants: Vec<String>,
}
impl Field {
pub fn method_name(&self, language_kind: LanguageKind) -> proc_macro2::Ident {
match self {
Field::Token { name, .. } => {
let name = match (name.as_str(), language_kind) {
(";", _) => "semicolon",
("'{'", _) => "l_curly",
("'}'", _) => "r_curly",
("'('", _) => "l_paren",
("')'", _) => "r_paren",
("'['", _) => "l_brack",
("']'", _) => "r_brack",
("'`'", _) => "backtick",
("<", _) => "l_angle",
(">", _) => "r_angle",
("=", _) => "eq",
("!", _) => "excl",
("*", _) => "star",
("&", _) => "amp",
(".", _) => "dot",
("...", _) => "dotdotdot",
("=>", _) => "fat_arrow",
(":", _) => "colon",
("?", _) => "question_mark",
("+", _) => "plus",
("-", _) => "minus",
("#", _) => "hash",
("@", _) => "at",
("+=", _) => "add_assign",
("-=", _) => "subtract_assign",
("*=", _) => "times_assign",
("%=", _) => "remainder_assign",
("**=", _) => "exponent_assign",
(">>=", _) => "left_shift_assign",
("<<=", _) => "right_shift_assign",
(">>>=", _) => "unsigned_right_shift_assign",
("~", _) => "bitwise_not",
("&=", _) => "bitwise_and_assign",
("|=", LanguageKind::Js) => "bitwise_or_assign",
("|=", LanguageKind::Css) => "exactly_or_hyphen",
("^=", LanguageKind::Js) => "bitwise_xor_assign",
("^=", LanguageKind::Css) => "prefix",
("&&=", _) => "bitwise_logical_and_assign",
("||=", _) => "bitwise_logical_or_assign",
("??=", _) => "bitwise_nullish_coalescing_assign",
("++", _) => "increment",
("--", _) => "decrement",
("<=", _) => "less_than_equal",
(">=", _) => "greater_than_equal",
("==", _) => "equality",
("===", _) => "strict_equality",
("!=", _) => "inequality",
("!==", _) => "strict_inequality",
("/", _) => "slash",
("%", _) => "reminder",
("**", _) => "exponent",
("<<", _) => "left_shift",
(">>", _) => "right_shift",
(">>>", _) => "unsigned_right_shift",
("|", _) => "bitwise_or",
("^", _) => "bitwise_xor",
("??", _) => "nullish_coalescing",
("||", _) => "logical_or",
("&&", _) => "logical_and",
("$=", _) => "suffix",
("~=", _) => "whitespace_like",
(",", _) => "comma",
_ => name,
};
format_ident!("{}_token", name)
}
Field::Node { name, .. } => {
let name = name;
let (prefix, tail) = name.split_once('_').unwrap_or(("", name));
let final_name = if LANGUAGE_PREFIXES.contains(&prefix) {
tail
} else {
name.as_str()
};
// this check here is to avoid emitting methods called "type()",
// where "type" is a reserved word
if final_name == "type" {
format_ident!("ty")
} else {
format_ident!("{}", final_name)
}
}
}
}
#[allow(dead_code)]
pub fn ty(&self) -> proc_macro2::Ident {
match self {
Field::Token { .. } => format_ident!("SyntaxToken"),
Field::Node { ty, .. } => format_ident!("{}", ty),
}
}
pub fn is_optional(&self) -> bool {
match self {
Field::Node { optional, .. } => *optional,
Field::Token { optional, .. } => *optional,
}
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/ast.rs | xtask/codegen/src/ast.rs | //! Generate SyntaxKind definitions as well as typed AST definitions for nodes and tokens.
//! This is derived from rust-analyzer/xtask/codegen
use std::collections::{HashMap, HashSet, VecDeque};
use std::str::FromStr;
use std::vec;
use super::{
kinds_src::{AstSrc, Field},
to_lower_snake_case, Mode,
};
use crate::css_kinds_src::CSS_KINDS_SRC;
use crate::generate_node_factory::generate_node_factory;
use crate::generate_nodes_mut::generate_nodes_mut;
use crate::generate_syntax_factory::generate_syntax_factory;
use crate::json_kinds_src::JSON_KINDS_SRC;
use crate::kinds_src::{AstListSeparatorConfiguration, AstListSrc, TokenKind};
use crate::termcolorful::{println_string_with_fg_color, Color};
use crate::ALL_LANGUAGE_KIND;
use crate::{
generate_macros::generate_macros,
generate_nodes::generate_nodes,
generate_syntax_kinds::generate_syntax_kinds,
kinds_src::{AstEnumSrc, AstNodeSrc, JS_KINDS_SRC},
update, LanguageKind,
};
use std::fmt::Write;
use ungrammar::{Grammar, Rule, Token};
use xtask::{project_root, Result};
// these node won't generate any code
pub const SYNTAX_ELEMENT_TYPE: &str = "SyntaxElement";
pub fn generate_ast(mode: Mode, language_kind_list: Vec<String>) -> Result<()> {
let codegen_language_kinds = if language_kind_list.is_empty() {
ALL_LANGUAGE_KIND.clone().to_vec()
} else {
language_kind_list
.iter()
.filter_map(|kind| match LanguageKind::from_str(kind) {
Ok(kind) => Some(kind),
Err(err) => {
println_string_with_fg_color(err, Color::Red);
None
}
})
.collect::<Vec<_>>()
};
for kind in codegen_language_kinds {
println_string_with_fg_color(
format!(
"-------------------Generating Grammar for {}-------------------",
kind
),
Color::Green,
);
let mut ast = load_ast(kind);
ast.sort();
generate_syntax(ast, &mode, kind)?;
}
Ok(())
}
pub(crate) fn load_ast(language: LanguageKind) -> AstSrc {
match language {
LanguageKind::Js => load_js_ast(),
LanguageKind::Css => load_css_ast(),
LanguageKind::Json => load_json_ast(),
}
}
pub(crate) fn generate_syntax(ast: AstSrc, mode: &Mode, language_kind: LanguageKind) -> Result<()> {
let syntax_generated_path = project_root()
.join("crates")
.join(language_kind.syntax_crate_name())
.join("src/generated");
let factory_generated_path = project_root()
.join("crates")
.join(language_kind.factory_crate_name())
.join("src/generated");
let kind_src = match language_kind {
LanguageKind::Js => JS_KINDS_SRC,
LanguageKind::Css => CSS_KINDS_SRC,
LanguageKind::Json => JSON_KINDS_SRC,
};
let ast_nodes_file = syntax_generated_path.join("nodes.rs");
let contents = generate_nodes(&ast, language_kind)?;
update(ast_nodes_file.as_path(), &contents, mode)?;
let ast_nodes_mut_file = syntax_generated_path.join("nodes_mut.rs");
let contents = generate_nodes_mut(&ast, language_kind)?;
update(ast_nodes_mut_file.as_path(), &contents, mode)?;
let syntax_kinds_file = syntax_generated_path.join("kind.rs");
let contents = generate_syntax_kinds(kind_src, language_kind)?;
update(syntax_kinds_file.as_path(), &contents, mode)?;
let syntax_factory_file = factory_generated_path.join("syntax_factory.rs");
let contents = generate_syntax_factory(&ast, language_kind)?;
update(syntax_factory_file.as_path(), &contents, mode)?;
let node_factory_file = factory_generated_path.join("node_factory.rs");
let contents = generate_node_factory(&ast, language_kind)?;
update(node_factory_file.as_path(), &contents, mode)?;
let ast_macros_file = syntax_generated_path.join("macros.rs");
let contents = generate_macros(&ast, language_kind)?;
update(ast_macros_file.as_path(), &contents, mode)?;
Ok(())
}
fn check_unions(unions: &[AstEnumSrc]) {
// Setup a map to find the unions quickly
let union_map: HashMap<_, _> = unions.iter().map(|en| (&en.name, en)).collect();
// Iterate over all unions
for union in unions {
let mut stack_string = format!(
"\n******** START ERROR STACK ********\nChecking {}, variants : {:?}",
union.name, union.variants
);
let mut union_set: HashSet<_> = HashSet::from([&union.name]);
let mut union_queue: VecDeque<_> = VecDeque::new();
// Init queue for BFS
union_queue.extend(&union.variants);
// Loop over the queue getting the first variant
while let Some(variant) = union_queue.pop_front() {
if union_map.contains_key(variant) {
// The variant is a compound variant
// Get the struct from the map
let current_union = union_map[variant];
write!(
stack_string,
"\nSUB-ENUM CHECK : {}, variants : {:?}",
current_union.name, current_union.variants
)
.unwrap();
// Try to insert the current variant into the set
if union_set.insert(¤t_union.name) {
// Add all variants into the BFS queue
union_queue.extend(¤t_union.variants);
} else {
// We either have a circular dependency or 2 variants referencing the same type
println!("{}", stack_string);
panic!("Variant '{variant}' used twice or circular dependency");
}
} else {
// The variant isn't another enum
// stack_string.push_str(&format!());
write!(stack_string, "\nBASE-VAR CHECK : {}", variant).unwrap();
if !union_set.insert(variant) {
// The variant already used
println!("{}", stack_string);
panic!("Variant '{variant}' used twice");
}
}
}
}
}
pub(crate) fn load_js_ast() -> AstSrc {
let grammar_src = include_str!("../js.ungram");
let grammar: Grammar = grammar_src.parse().unwrap();
let ast: AstSrc = make_ast(&grammar);
check_unions(&ast.unions);
ast
}
pub(crate) fn load_css_ast() -> AstSrc {
let grammar_src = include_str!("../css.ungram");
let grammar: Grammar = grammar_src.parse().unwrap();
make_ast(&grammar)
}
pub(crate) fn load_json_ast() -> AstSrc {
let grammar_src = include_str!("../json.ungram");
let grammar: Grammar = grammar_src.parse().unwrap();
make_ast(&grammar)
}
fn make_ast(grammar: &Grammar) -> AstSrc {
let mut ast = AstSrc::default();
for node in grammar.iter() {
let name = grammar[node].name.clone();
if name == SYNTAX_ELEMENT_TYPE {
continue;
}
let rule = &grammar[node].rule;
match classify_node_rule(grammar, rule) {
NodeRuleClassification::Union(variants) => ast.unions.push(AstEnumSrc {
documentation: vec![],
name,
variants,
}),
NodeRuleClassification::Node => {
let mut fields = vec![];
handle_rule(&mut fields, grammar, rule, None, false);
ast.nodes.push(AstNodeSrc {
documentation: vec![],
name,
fields,
})
}
NodeRuleClassification::Bogus => ast.bogus.push(name),
NodeRuleClassification::List {
separator,
element_name,
} => {
ast.push_list(
name.as_str(),
AstListSrc {
element_name,
separator,
},
);
}
}
}
ast
}
/// Classification of a node rule.
/// Determined by matching the top level production of any node.
enum NodeRuleClassification {
/// Union of the form `A = B | C`
Union(Vec<String>),
/// Regular node containing tokens or sub nodes of the form `A = B 'c'
Node,
/// A bogus node of the form `A = SyntaxElement*`
Bogus,
/// A list node of the form `A = B*` or `A = (B (',' B)*)` or `A = (B (',' B)* ','?)`
List {
/// Name of the nodes stored in this list (`B` in the example above)
element_name: String,
/// [None] if this is a node list or [Some] if this is a separated list
separator: Option<AstListSeparatorConfiguration>,
},
}
fn classify_node_rule(grammar: &Grammar, rule: &Rule) -> NodeRuleClassification {
match rule {
// this is for enums
Rule::Alt(alternatives) => {
let mut all_alternatives = vec![];
for alternative in alternatives {
match alternative {
Rule::Node(it) => all_alternatives.push(grammar[*it].name.clone()),
Rule::Token(it) if grammar[*it].name == ";" => (),
_ => return NodeRuleClassification::Node,
}
}
NodeRuleClassification::Union(all_alternatives)
}
// A*
Rule::Rep(rule) => {
let element_type = match rule.as_ref() {
Rule::Node(node) => &grammar[*node].name,
_ => {
panic!("Lists should only be over node types");
}
};
if element_type == SYNTAX_ELEMENT_TYPE {
NodeRuleClassification::Bogus
} else {
NodeRuleClassification::List {
separator: None,
element_name: element_type.to_string(),
}
}
}
Rule::Seq(rules) => {
// (T (',' T)* ','?)
// (T (',' T)*)
if let Some(comma_list) = handle_comma_list(grammar, rules.as_slice()) {
NodeRuleClassification::List {
separator: Some(AstListSeparatorConfiguration {
allow_trailing: comma_list.trailing_separator,
separator_token: comma_list.separator_name.to_string(),
}),
element_name: comma_list.node_name.to_string(),
}
} else {
NodeRuleClassification::Node
}
}
_ => NodeRuleClassification::Node,
}
}
fn clean_token_name(grammar: &Grammar, token: &Token) -> String {
let mut name = grammar[*token].name.clone();
// These tokens, when parsed to proc_macro2::TokenStream, generates a stream of bytes
// that can't be recognized by [quote].
// Hence, they need to be decorated with single quotes.
if "[]{}()`".contains(&name) {
name = format!("'{}'", name);
}
name
}
fn handle_rule(
fields: &mut Vec<Field>,
grammar: &Grammar,
rule: &Rule,
label: Option<&str>,
optional: bool,
) {
match rule {
Rule::Labeled { label, rule } => {
// Some methods need to be manually implemented because they need some custom logic;
// we use the prefix "manual__" to exclude labelled nodes.
if handle_tokens_in_unions(fields, grammar, rule, label, optional) {
return;
}
handle_rule(fields, grammar, rule, Some(label), optional)
}
Rule::Node(node) => {
let ty = grammar[*node].name.clone();
let name = label
.map(String::from)
.unwrap_or_else(|| to_lower_snake_case(&ty));
let field = Field::Node { name, ty, optional };
fields.push(field);
}
Rule::Token(token) => {
let name = clean_token_name(grammar, token);
if name == "''" {
// array hole
return;
}
let field = Field::Token {
name: label.map(String::from).unwrap_or_else(|| name.clone()),
kind: TokenKind::Single(name),
optional,
};
fields.push(field);
}
Rule::Rep(_) => {
panic!("Create a list node for *many* children {:?}", label);
}
Rule::Opt(rule) => {
handle_rule(fields, grammar, rule, label, true);
}
Rule::Alt(rules) => {
for rule in rules {
handle_rule(fields, grammar, rule, label, false);
}
}
Rule::Seq(rules) => {
for rule in rules {
handle_rule(fields, grammar, rule, label, false);
}
}
};
}
#[derive(Debug)]
struct CommaList<'a> {
node_name: &'a str,
separator_name: &'a str,
trailing_separator: bool,
}
// (T (',' T)* ','?)
// (T (',' T)*)
fn handle_comma_list<'a>(grammar: &'a Grammar, rules: &[Rule]) -> Option<CommaList<'a>> {
// Does it match (T * ',')?
let (node, repeat, trailing_separator) = match rules {
[Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_separator)] => {
(node, repeat, Some(trailing_separator))
}
[Rule::Node(node), Rule::Rep(repeat)] => (node, repeat, None),
_ => return None,
};
// Is the repeat a ()*?
let repeat = match &**repeat {
Rule::Seq(it) => it,
_ => return None,
};
// Does the repeat match (token node))
let comma = match repeat.as_slice() {
[comma, Rule::Node(n)] => {
let separator_matches_trailing = if let Some(trailing) = trailing_separator {
&**trailing == comma
} else {
true
};
if n != node || !separator_matches_trailing {
return None;
}
comma
}
_ => return None,
};
let separator_name = match comma {
Rule::Token(token) => &grammar[*token].name,
_ => panic!("The separator in rule {:?} must be a token", rules),
};
Some(CommaList {
node_name: &grammar[*node].name,
trailing_separator: trailing_separator.is_some(),
separator_name,
})
}
// handle cases like: `op: ('-' | '+' | '*')`
fn handle_tokens_in_unions(
fields: &mut Vec<Field>,
grammar: &Grammar,
rule: &Rule,
label: &str,
optional: bool,
) -> bool {
let (rule, optional) = match rule {
Rule::Opt(rule) => (&**rule, true),
_ => (rule, optional),
};
let rule = match rule {
Rule::Alt(rule) => rule,
_ => return false,
};
let mut token_kinds = vec![];
for rule in rule.iter() {
match rule {
Rule::Token(token) => token_kinds.push(clean_token_name(grammar, token)),
_ => return false,
}
}
let field = Field::Token {
name: label.to_string(),
kind: TokenKind::Many(token_kinds),
optional,
};
fields.push(field);
true
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_website.rs | xtask/codegen/src/generate_website.rs | use rome_cli::rome_command;
use rome_service::VERSION;
use std::fs;
use xtask::{project_root, Result};
const FRONTMATTER: &str = r#"---
title: VSCode extension
emoji: 💻
category: reference
description: Notes about the Rome's VSCode extension
---
"#;
const CLI_FRONTMATTER: &str = r#"---
title: CLI
emoji: ⌨️
category: reference
description: Available commands and arguments in the Rome CLI.
---
"#;
const SCHEMA_TEMPLATE: &str = r#"// Run `ROME_VERSION=<version number> cargo codegen-website
// to generate a new schema
import {readFileSync} from "fs";
import {join, resolve} from "path"
export function get() {
const schemaPath = resolve(join("..", "npm", "rome", "configuration_schema.json"));
const schema = readFileSync(schemaPath, "utf8")
return new Response(schema, {
status: 200,
headers: {
"content-type": "application/json"
}
})
}"#;
/// Generates
pub(crate) fn generate_files() -> Result<()> {
let readme = fs::read_to_string(project_root().join("editors/vscode/README.md"))?;
fs::remove_file(project_root().join("website/src/pages/vscode.mdx")).ok();
let page = format!("{FRONTMATTER}{readme}");
fs::write(project_root().join("website/src/pages/vscode.mdx"), page)?;
if VERSION != "0.0.0" {
let parser = rome_command();
let markdown = parser.render_markdown("rome");
fs::write(
project_root().join("website/src/pages/cli.mdx"),
format!("{CLI_FRONTMATTER}{markdown}"),
)?;
let schema_root_folder = project_root().join("website/src/pages/schemas");
let schema_version_folder = schema_root_folder.join(VERSION);
let schema_js_file = schema_version_folder.join("schema.json.js");
if schema_version_folder.exists() {
fs::remove_file(schema_js_file.clone())?;
fs::remove_dir(schema_version_folder.clone())?;
}
fs::create_dir(schema_version_folder.clone())?;
fs::write(schema_js_file.clone(), SCHEMA_TEMPLATE)?;
}
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_configuration.rs | xtask/codegen/src/generate_configuration.rs | use case::CaseExt;
use proc_macro2::{Ident, Literal, Span, TokenStream};
use pulldown_cmark::{Event, Parser, Tag};
use quote::quote;
use rome_analyze::{
GroupCategory, Queryable, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata,
};
use rome_js_syntax::JsLanguage;
use rome_json_syntax::JsonLanguage;
use std::collections::BTreeMap;
use xtask::*;
use xtask_codegen::{to_lower_snake_case, update};
pub(crate) fn generate_rules_configuration(mode: Mode) -> Result<()> {
let config_root = project_root().join("crates/rome_service/src/configuration/linter");
let config_parsing_root =
project_root().join("crates/rome_service/src/configuration/parse/json/");
let push_rules_directory = project_root().join("crates/rome_service/src/configuration");
#[derive(Default)]
struct LintRulesVisitor {
groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>,
}
impl RegistryVisitor<JsLanguage> for LintRulesVisitor {
fn record_category<C: GroupCategory<Language = JsLanguage>>(&mut self) {
if matches!(C::CATEGORY, RuleCategory::Lint) {
C::record_groups(self);
}
}
fn record_rule<R>(&mut self)
where
R: Rule + 'static,
R::Query: Queryable<Language = JsLanguage>,
<R::Query as Queryable>::Output: Clone,
{
self.groups
.entry(<R::Group as RuleGroup>::NAME)
.or_insert_with(BTreeMap::new)
.insert(R::METADATA.name, R::METADATA);
}
}
impl RegistryVisitor<JsonLanguage> for LintRulesVisitor {
fn record_category<C: GroupCategory<Language = JsonLanguage>>(&mut self) {
if matches!(C::CATEGORY, RuleCategory::Lint) {
C::record_groups(self);
}
}
fn record_rule<R>(&mut self)
where
R: Rule + 'static,
R::Query: Queryable<Language = JsonLanguage>,
<R::Query as Queryable>::Output: Clone,
{
self.groups
.entry(<R::Group as RuleGroup>::NAME)
.or_insert_with(BTreeMap::new)
.insert(R::METADATA.name, R::METADATA);
}
}
let mut visitor = LintRulesVisitor::default();
rome_js_analyze::visit_registry(&mut visitor);
rome_json_analyze::visit_registry(&mut visitor);
let LintRulesVisitor { groups } = visitor;
let mut struct_groups = Vec::new();
let mut line_groups = Vec::new();
let mut default_for_groups = Vec::new();
let mut group_rules_union = Vec::new();
let mut group_match_code = Vec::new();
let mut group_get_severity = Vec::new();
let mut group_name_list = vec!["recommended", "all"];
let mut rule_visitor_call = Vec::new();
let mut visitor_rule_list = Vec::new();
let mut push_rule_list = Vec::new();
for (group, rules) in groups {
group_name_list.push(group);
let property_group_name = Ident::new(&to_lower_snake_case(group), Span::call_site());
let group_struct_name = Ident::new(&group.to_capitalized(), Span::call_site());
let group_name_string_literal = Literal::string(group);
struct_groups.push(generate_struct(group, &rules));
visitor_rule_list.push(generate_visitor(group, &rules));
push_rule_list.push(generate_push_to_analyzer_rules(group));
line_groups.push(quote! {
#[serde(skip_serializing_if = "Option::is_none")]
#[bpaf(external, hide, optional)]
pub #property_group_name: Option<#group_struct_name>
});
default_for_groups.push(quote! {
#property_group_name: None
});
let global_recommended = if group == "nursery" {
quote! { self.is_recommended() && rome_flags::is_unstable() }
} else {
quote! { self.is_recommended() }
};
group_rules_union.push(quote! {
if let Some(group) = self.#property_group_name.as_ref() {
group.collect_preset_rules(self.is_recommended(), &mut enabled_rules, &mut disabled_rules);
enabled_rules.extend(&group.get_enabled_rules());
disabled_rules.extend(&group.get_disabled_rules());
} else if self.is_all() {
enabled_rules.extend(#group_struct_name::all_rules_as_filters());
} else if self.is_not_all() {
disabled_rules.extend(#group_struct_name::all_rules_as_filters());
} else if #global_recommended {
enabled_rules.extend(#group_struct_name::recommended_rules_as_filters());
}
});
group_get_severity.push(quote! {
#group => self
.#property_group_name
.as_ref()
.and_then(|#property_group_name| #property_group_name.get_rule_configuration(rule_name))
.map(|rule_setting| rule_setting.into())
.unwrap_or_else(|| {
if #group_struct_name::is_recommended_rule(rule_name) {
Severity::Error
} else {
Severity::Warning
}
})
});
group_match_code.push(quote! {
#group => #group_struct_name::has_rule(rule_name).then_some((category, rule_name))
});
rule_visitor_call.push(quote! {
#group_name_string_literal => {
let mut visitor = #group_struct_name::default();
if are_recommended_and_all_correct(
&value,
name_text,
diagnostics,
)? {
self.map_to_object(&value, name_text, &mut visitor, diagnostics)?;
self.#property_group_name = Some(visitor);
}
}
});
}
let groups = quote! {
use serde::{Deserialize, Serialize};
#[cfg(feature = "schema")]
use schemars::JsonSchema;
use crate::RuleConfiguration;
use rome_analyze::RuleFilter;
use indexmap::IndexSet;
use bpaf::Bpaf;
use rome_diagnostics::{Category, Severity};
#[derive(Deserialize, Serialize, Debug, Clone, Bpaf)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Rules {
/// It enables the lint rules recommended by Rome. `true` by default.
#[serde(skip_serializing_if = "Option::is_none")]
#[bpaf(hide)]
pub recommended: Option<bool>,
/// It enables ALL rules. The rules that belong to `nursery` won't be enabled.
#[serde(skip_serializing_if = "Option::is_none")]
#[bpaf(hide)]
pub all: Option<bool>,
#( #line_groups ),*
}
impl Default for Rules {
fn default() -> Self {
Self {
recommended: Some(true),
all: None,
#( #default_for_groups ),*
}
}
}
impl Rules {
/// Checks if the code coming from [rome_diagnostics::Diagnostic] corresponds to a rule.
/// Usually the code is built like {category}/{rule_name}
pub fn matches_diagnostic_code<'a>(
&self,
category: Option<&'a str>,
rule_name: Option<&'a str>,
) -> Option<(&'a str, &'a str)> {
match (category, rule_name) {
(Some(category), Some(rule_name)) => match category {
#( #group_match_code ),*,
_ => None
},
_ => None
}
}
/// Given a category coming from [Diagnostic](rome_diagnostics::Diagnostic), this function returns
/// the [Severity](rome_diagnostics::Severity) associated to the rule, if the configuration changed it.
///
/// If not, the function returns [None].
pub fn get_severity_from_code(&self, category: &Category) -> Option<Severity> {
let mut split_code = category.name().split('/');
let _lint = split_code.next();
debug_assert_eq!(_lint, Some("lint"));
let group = split_code.next();
let rule_name = split_code.next();
if let Some((group, rule_name)) = self.matches_diagnostic_code(group, rule_name) {
let severity = match group {
#( #group_get_severity ),*,
_ => unreachable!("this group should not exist, found {}", group),
};
Some(severity)
} else {
None
}
}
pub(crate) const fn is_recommended(&self) -> bool {
// It is only considered _not_ recommended when
// the configuration is `"recommended": false`.
// Hence, omission of the setting or set to `true` are considered recommended.
!matches!(self.recommended, Some(false))
}
pub(crate) const fn is_all(&self) -> bool {
matches!(self.all, Some(true))
}
pub(crate) const fn is_not_all(&self) -> bool {
matches!(self.all, Some(false))
}
/// It returns a tuple of filters. The first element of the tuple are the enabled rules,
/// while the second element are the disabled rules.
///
/// Only one element of the tuple is [Some] at the time.
///
/// The enabled rules are calculated from the difference with the disabled rules.
pub fn as_enabled_rules(&self) -> IndexSet<RuleFilter> {
let mut enabled_rules = IndexSet::new();
let mut disabled_rules = IndexSet::new();
#( #group_rules_union )*
enabled_rules.difference(&disabled_rules).cloned().collect()
}
}
#( #struct_groups )*
};
let visitors = quote! {
use crate::configuration::linter::*;
use crate::Rules;
use rome_deserialize::json::{has_only_known_keys, VisitJsonNode};
use rome_deserialize::{DeserializationDiagnostic, VisitNode};
use rome_json_syntax::{AnyJsonValue, JsonLanguage};
use rome_rowan::{AstNode, SyntaxNode};
use crate::configuration::parse::json::linter::are_recommended_and_all_correct;
impl VisitJsonNode for Rules {}
impl VisitNode<JsonLanguage> for Rules {
fn visit_member_name(
&mut self,
node: &SyntaxNode<JsonLanguage>,
diagnostics: &mut Vec<DeserializationDiagnostic>,
) -> Option<()> {
has_only_known_keys(node, &[#( #group_name_list ),*], diagnostics)
}
fn visit_map(
&mut self,
key: &SyntaxNode<JsonLanguage>,
value: &SyntaxNode<JsonLanguage>,
diagnostics: &mut Vec<DeserializationDiagnostic>,
) -> Option<()> {
let (name, value) = self.get_key_and_value(key, value, diagnostics)?;
let name_text = name.text();
match name_text {
"recommended" => {
self.recommended = Some(self.map_to_boolean(&value, name_text, diagnostics)?);
}
"all" => {
self.all = Some(self.map_to_boolean(&value, name_text, diagnostics)?);
}
#( #rule_visitor_call )*
_ => {}
}
Some(())
}
}
#( #visitor_rule_list )*
};
let push_rules = quote! {
use crate::configuration::linter::*;
use crate::{RuleConfiguration, Rules};
use rome_analyze::{AnalyzerRules, MetadataRegistry};
pub(crate) fn push_to_analyzer_rules(
rules: &Rules,
metadata: &MetadataRegistry,
analyzer_rules: &mut AnalyzerRules,
) {
#( #push_rule_list )*
}
};
let configuration = groups.to_string();
let visitors = visitors.to_string();
let push_rules = push_rules.to_string();
update(
&config_root.join("rules.rs"),
&xtask::reformat(configuration)?,
&mode,
)?;
update(
&config_parsing_root.join("rules.rs"),
&xtask::reformat(visitors)?,
&mode,
)?;
update(
&push_rules_directory.join("generated.rs"),
&xtask::reformat(push_rules)?,
&mode,
)?;
Ok(())
}
fn generate_struct(group: &str, rules: &BTreeMap<&'static str, RuleMetadata>) -> TokenStream {
let mut lines_recommended_rule = Vec::new();
let mut lines_recommended_rule_as_filter = Vec::new();
let mut lines_all_rule_as_filter = Vec::new();
let mut declarations = Vec::new();
let mut lines_rule = Vec::new();
let mut schema_lines_rules = Vec::new();
let mut rule_enabled_check_line = Vec::new();
let mut rule_disabled_check_line = Vec::new();
let mut get_rule_configuration_line = Vec::new();
let mut number_of_recommended_rules: u8 = 0;
let number_of_rules = Literal::u8_unsuffixed(rules.len() as u8);
for (index, (rule, metadata)) in rules.iter().enumerate() {
let summary = {
let mut docs = String::new();
let parser = Parser::new(metadata.docs);
for event in parser {
match event {
Event::Text(text) => {
docs.push_str(text.as_ref());
}
Event::Code(text) => {
docs.push_str(text.as_ref());
}
Event::SoftBreak => {
docs.push(' ');
}
Event::Start(Tag::Paragraph) => {}
Event::End(Tag::Paragraph) => {
break;
}
Event::Start(tag) => match tag {
Tag::Strong | Tag::Paragraph => {
continue;
}
_ => panic!("Unimplemented tag {:?}", { tag }),
},
Event::End(tag) => match tag {
Tag::Strong | Tag::Paragraph => {
continue;
}
_ => panic!("Unimplemented tag {:?}", { tag }),
},
_ => {
panic!("Unimplemented event {:?}", { event })
}
}
}
docs
};
let rule_position = Literal::u8_unsuffixed(index as u8);
let rule_identifier = Ident::new(&to_lower_snake_case(rule), Span::call_site());
let rule_cli_identifier = Literal::string(&to_lower_snake_case(rule).to_dashed());
let declaration = quote! {
#[serde(skip_serializing_if = "RuleConfiguration::is_err")]
pub #rule_identifier: RuleConfiguration
};
declarations.push(declaration);
if metadata.recommended {
lines_recommended_rule_as_filter.push(quote! {
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position])
});
lines_recommended_rule.push(quote! {
#rule
});
number_of_recommended_rules += 1;
}
lines_all_rule_as_filter.push(quote! {
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position])
});
lines_rule.push(quote! {
#rule
});
schema_lines_rules.push(quote! {
#[doc = #summary]
#[bpaf(long(#rule_cli_identifier), argument("on|off|warn"), optional, hide)]
#[serde(skip_serializing_if = "Option::is_none")]
pub #rule_identifier: Option<RuleConfiguration>
});
rule_enabled_check_line.push(quote! {
if let Some(rule) = self.#rule_identifier.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(
Self::GROUP_NAME,
Self::GROUP_RULES[#rule_position],
));
}
}
});
rule_disabled_check_line.push(quote! {
if let Some(rule) = self.#rule_identifier.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(
Self::GROUP_NAME,
Self::GROUP_RULES[#rule_position],
));
}
}
});
get_rule_configuration_line.push(quote! {
#rule => self.#rule_identifier.as_ref()
});
}
let group_struct_name = Ident::new(&group.to_capitalized(), Span::call_site());
let number_of_recommended_rules = Literal::u8_unsuffixed(number_of_recommended_rules);
let (group_recommended, parent_parameter) = if group == "nursery" {
(
quote! { self.is_recommended() },
quote! { _parent_is_recommended: bool, },
)
} else {
(
quote! { parent_is_recommended || self.is_recommended() },
quote! { parent_is_recommended: bool, },
)
};
quote! {
#[derive(Deserialize, Default, Serialize, Debug, Clone, Bpaf)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
#[serde(rename_all = "camelCase", default)]
/// A list of rules that belong to this group
pub struct #group_struct_name {
/// It enables the recommended rules for this group
#[serde(skip_serializing_if = "Option::is_none")]
#[bpaf(hide)]
pub recommended: Option<bool>,
/// It enables ALL rules for this group.
#[serde(skip_serializing_if = "Option::is_none")]
#[bpaf(hide)]
pub all: Option<bool>,
#( #schema_lines_rules ),*
}
impl #group_struct_name {
const GROUP_NAME: &'static str = #group;
pub(crate) const GROUP_RULES: [&'static str; #number_of_rules] = [
#( #lines_rule ),*
];
const RECOMMENDED_RULES: [&'static str; #number_of_recommended_rules] = [
#( #lines_recommended_rule ),*
];
const RECOMMENDED_RULES_AS_FILTERS: [RuleFilter<'static>; #number_of_recommended_rules] = [
#( #lines_recommended_rule_as_filter ),*
];
const ALL_RULES_AS_FILTERS: [RuleFilter<'static>; #number_of_rules] = [
#( #lines_all_rule_as_filter ),*
];
/// Retrieves the recommended rules
pub(crate) fn is_recommended(&self) -> bool {
// we should inject recommended rules only when they are set to "true"
matches!(self.recommended, Some(true))
}
pub(crate) const fn is_not_recommended(&self) -> bool {
matches!(self.recommended, Some(false))
}
pub(crate) fn is_all(&self) -> bool {
matches!(self.all, Some(true))
}
pub(crate) fn is_not_all(&self) -> bool {
matches!(self.all, Some(false))
}
pub(crate) fn get_enabled_rules(&self) -> IndexSet<RuleFilter> {
let mut index_set = IndexSet::new();
#( #rule_enabled_check_line )*
index_set
}
pub(crate) fn get_disabled_rules(&self) -> IndexSet<RuleFilter> {
let mut index_set = IndexSet::new();
#( #rule_disabled_check_line )*
index_set
}
/// Checks if, given a rule name, matches one of the rules contained in this category
pub(crate) fn has_rule(rule_name: &str) -> bool {
Self::GROUP_RULES.contains(&rule_name)
}
/// Checks if, given a rule name, it is marked as recommended
pub(crate) fn is_recommended_rule(rule_name: &str) -> bool {
Self::RECOMMENDED_RULES.contains(&rule_name)
}
pub(crate) fn recommended_rules_as_filters() -> [RuleFilter<'static>; #number_of_recommended_rules] {
Self::RECOMMENDED_RULES_AS_FILTERS
}
pub(crate) fn all_rules_as_filters() -> [RuleFilter<'static>; #number_of_rules] {
Self::ALL_RULES_AS_FILTERS
}
/// Select preset rules
pub(crate) fn collect_preset_rules(
&self,
#parent_parameter
enabled_rules: &mut IndexSet<RuleFilter>,
disabled_rules: &mut IndexSet<RuleFilter>,
) {
if self.is_all() {
enabled_rules.extend(Self::all_rules_as_filters());
} else if #group_recommended {
enabled_rules.extend(Self::recommended_rules_as_filters());
}
if self.is_not_all() {
disabled_rules.extend(Self::all_rules_as_filters());
} else if self.is_not_recommended() {
disabled_rules.extend(Self::recommended_rules_as_filters());
}
}
pub(crate) fn get_rule_configuration(&self, rule_name: &str) -> Option<&RuleConfiguration> {
match rule_name {
#( #get_rule_configuration_line ),*,
_ => None
}
}
}
}
}
fn generate_visitor(group: &str, rules: &BTreeMap<&'static str, RuleMetadata>) -> TokenStream {
let group_struct_name = Ident::new(&group.to_capitalized(), Span::call_site());
let mut group_rules = vec![Literal::string("recommended"), Literal::string("all")];
let mut visitor_rule_line = Vec::new();
for rule_name in rules.keys() {
let rule_identifier = Ident::new(&to_lower_snake_case(rule_name), Span::call_site());
group_rules.push(Literal::string(rule_name));
visitor_rule_line.push(quote! {
#rule_name => match value {
AnyJsonValue::JsonStringValue(_) => {
let mut configuration = RuleConfiguration::default();
self.map_to_known_string(&value, name_text, &mut configuration, diagnostics)?;
self.#rule_identifier = Some(configuration);
}
AnyJsonValue::JsonObjectValue(_) => {
let mut rule_configuration = RuleConfiguration::default();
rule_configuration.map_rule_configuration(&value, name_text, #rule_name, diagnostics)?;
self.#rule_identifier = Some(rule_configuration);
}
_ => {
diagnostics.push(DeserializationDiagnostic::new_incorrect_type(
"object or string",
value.range(),
));
}
}
});
}
quote! {
impl VisitJsonNode for #group_struct_name {}
impl VisitNode<JsonLanguage> for #group_struct_name {
fn visit_member_name(
&mut self,
node: &SyntaxNode<JsonLanguage>,
diagnostics: &mut Vec<DeserializationDiagnostic>,
) -> Option<()> {
has_only_known_keys(node, &[#( #group_rules ),*], diagnostics)
}
fn visit_map(
&mut self,
key: &SyntaxNode<JsonLanguage>,
value: &SyntaxNode<JsonLanguage>,
diagnostics: &mut Vec<DeserializationDiagnostic>,
) -> Option<()> {
let (name, value) = self.get_key_and_value(key, value, diagnostics)?;
let name_text = name.text();
match name_text {
"recommended" => {
self.recommended = Some(self.map_to_boolean(&value, name_text, diagnostics)?);
}
"all" => {
self.all = Some(self.map_to_boolean(&value, name_text, diagnostics)?);
}
#( #visitor_rule_line ),*,
_ => {}
}
Some(())
}
}
}
}
fn generate_push_to_analyzer_rules(group: &str) -> TokenStream {
let group_struct_name = Ident::new(&group.to_capitalized(), Span::call_site());
let group_identifier = Ident::new(group, Span::call_site());
quote! {
if let Some(rules) = rules.#group_identifier.as_ref() {
for rule_name in &#group_struct_name::GROUP_RULES {
if let Some(RuleConfiguration::WithOptions(rule_options)) =
rules.get_rule_configuration(rule_name)
{
if let Some(possible_options) = &rule_options.options {
if let Some(rule_key) = metadata.find_rule(#group, rule_name) {
let rule_options = possible_options.extract_option(&rule_key);
analyzer_rules.push_rule(rule_key, rule_options);
}
}
}
}
}
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/lib.rs | xtask/codegen/src/lib.rs | //! Codegen tools for generating Syntax and AST definitions. Derived from Rust analyzer's codegen
//!
mod ast;
mod css_kinds_src;
mod formatter;
mod generate_analyzer;
mod generate_macros;
pub mod generate_new_lintrule;
mod generate_node_factory;
mod generate_nodes;
mod generate_nodes_mut;
mod generate_syntax_factory;
mod generate_syntax_kinds;
mod json_kinds_src;
mod kinds_src;
mod parser_tests;
pub mod promote_rule;
mod termcolorful;
mod unicode;
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use std::path::Path;
use std::str::FromStr;
use xtask::{glue::fs2, Mode, Result};
pub use self::ast::generate_ast;
pub use self::formatter::generate_formatters;
pub use self::generate_analyzer::generate_analyzer;
pub use self::parser_tests::generate_parser_tests;
pub use self::unicode::generate_tables;
pub enum UpdateResult {
NotUpdated,
Updated,
}
#[derive(Debug, Eq, Copy, Clone, PartialEq)]
pub enum LanguageKind {
Js,
Css,
Json,
}
impl std::fmt::Display for LanguageKind {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
LanguageKind::Js => write!(f, "js"),
LanguageKind::Css => write!(f, "css"),
LanguageKind::Json => write!(f, "json"),
}
}
}
pub const ALL_LANGUAGE_KIND: [LanguageKind; 3] =
[LanguageKind::Js, LanguageKind::Css, LanguageKind::Json];
impl FromStr for LanguageKind {
type Err = String;
fn from_str(kind: &str) -> Result<Self, Self::Err> {
match kind {
"js" => Ok(LanguageKind::Js),
"css" => Ok(LanguageKind::Css),
"json" => Ok(LanguageKind::Json),
_ => Err(format!(
"Language {} not supported, please use: `js`, `css` or `json`",
kind
)),
}
}
}
impl LanguageKind {
pub(crate) fn syntax_crate_ident(&self) -> Ident {
Ident::new(self.syntax_crate_name(), Span::call_site())
}
pub(crate) fn syntax_kind(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxKind },
LanguageKind::Css => quote! { CssSyntaxKind },
LanguageKind::Json => quote! {JsonSyntaxKind},
}
}
pub(crate) fn syntax_node(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxNode },
LanguageKind::Css => quote! { CssSyntaxNode },
LanguageKind::Json => quote! { JsonSyntaxNode },
}
}
pub(crate) fn syntax_element(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxElement },
LanguageKind::Css => quote! { CssSyntaxElement },
LanguageKind::Json => quote! { JsonSyntaxElement },
}
}
pub(crate) fn syntax_token(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxToken },
LanguageKind::Css => quote! { CssSyntaxToken },
LanguageKind::Json => quote! { JsonSyntaxToken },
}
}
pub(crate) fn syntax_element_children(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxElementChildren },
LanguageKind::Css => quote! { CssSyntaxElementChildren },
LanguageKind::Json => quote! { JsonSyntaxElementChildren },
}
}
pub(crate) fn syntax_list(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsSyntaxList },
LanguageKind::Css => quote! { CssSyntaxList },
LanguageKind::Json => quote! { JsonSyntaxList },
}
}
pub(crate) fn language(&self) -> TokenStream {
match self {
LanguageKind::Js => quote! { JsLanguage },
LanguageKind::Css => quote! { CssLanguage },
LanguageKind::Json => quote! { JsonLanguage },
}
}
pub fn formatter_crate_name(&self) -> &'static str {
match self {
LanguageKind::Js => "rome_js_formatter",
LanguageKind::Css => "rome_css_formatter",
LanguageKind::Json => "rome_json_formatter",
}
}
pub fn syntax_crate_name(&self) -> &'static str {
match self {
LanguageKind::Js => "rome_js_syntax",
LanguageKind::Css => "rome_css_syntax",
LanguageKind::Json => "rome_json_syntax",
}
}
pub fn factory_crate_name(&self) -> &'static str {
match self {
LanguageKind::Js => "rome_js_factory",
LanguageKind::Css => "rome_css_factory",
LanguageKind::Json => "rome_json_factory",
}
}
}
/// A helper to update file on disk if it has changed.
/// With verify = false,
pub fn update(path: &Path, contents: &str, mode: &Mode) -> Result<UpdateResult> {
match fs2::read_to_string(path) {
Ok(old_contents) if old_contents == contents => {
return Ok(UpdateResult::NotUpdated);
}
_ => (),
}
if *mode == Mode::Verify {
anyhow::bail!("`{}` is not up-to-date", path.display());
}
eprintln!("updating {}", path.display());
fs2::write(path, contents)?;
Ok(UpdateResult::Updated)
}
pub fn to_camel_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev = false;
for c in s.chars() {
if c == '_' {
prev = true;
} else if prev {
buf.push(c.to_ascii_uppercase());
prev = false;
} else {
buf.push(c);
}
}
buf
}
pub fn to_upper_snake_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev = false;
for c in s.chars() {
if c.is_ascii_uppercase() && prev {
buf.push('_')
}
prev = true;
buf.push(c.to_ascii_uppercase());
}
buf
}
pub fn to_lower_snake_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev = false;
for c in s.chars() {
if c.is_ascii_uppercase() && prev {
buf.push('_')
}
prev = true;
buf.push(c.to_ascii_lowercase());
}
buf
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_schema.rs | xtask/codegen/src/generate_schema.rs | use rome_json_formatter::context::JsonFormatOptions;
use rome_json_parser::{parse_json, JsonParserOptions};
use rome_service::Configuration;
use schemars::schema_for;
use serde_json::to_string;
use xtask::{project_root, Mode, Result};
use xtask_codegen::update;
pub(crate) fn generate_configuration_schema(mode: Mode) -> Result<()> {
let schema_path_vscode = project_root().join("editors/vscode/configuration_schema.json");
let schema_path_npm = project_root().join("npm/rome/configuration_schema.json");
let schema = schema_for!(Configuration);
let json_schema = to_string(&schema)?;
let parsed = parse_json(&json_schema, JsonParserOptions::default());
let formatted =
rome_json_formatter::format_node(JsonFormatOptions::default(), &parsed.syntax())
.unwrap()
.print()
.unwrap();
update(&schema_path_vscode, formatted.as_code(), &mode)?;
update(&schema_path_npm, formatted.as_code(), &mode)?;
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_bindings.rs | xtask/codegen/src/generate_bindings.rs | use rome_js_factory::make;
use rome_js_formatter::{context::JsFormatOptions, format_node};
use rome_js_syntax::{
AnyJsBinding, AnyJsBindingPattern, AnyJsCallArgument, AnyJsDeclaration, AnyJsDeclarationClause,
AnyJsExportClause, AnyJsExpression, AnyJsFormalParameter, AnyJsImportClause,
AnyJsLiteralExpression, AnyJsModuleItem, AnyJsName, AnyJsNamedImport,
AnyJsNamedImportSpecifier, AnyJsObjectMember, AnyJsObjectMemberName, AnyJsParameter,
AnyJsStatement, AnyTsName, AnyTsReturnType, AnyTsType, AnyTsTypeMember, JsFileSource,
TriviaPieceKind, T,
};
use rome_rowan::AstNode;
use rome_service::workspace_types::{generate_type, methods, ModuleQueue};
use xtask::{project_root, Mode, Result};
use xtask_codegen::{to_camel_case, update};
pub(crate) fn generate_workspace_bindings(mode: Mode) -> Result<()> {
let bindings_path = project_root().join("npm/backend-jsonrpc/src/workspace.ts");
let methods = methods();
let mut declarations = Vec::new();
let mut member_definitions = Vec::with_capacity(methods.len());
let mut member_declarations = Vec::with_capacity(methods.len());
let mut queue = ModuleQueue::default();
for method in &methods {
let params = generate_type(&mut declarations, &mut queue, &method.params);
let result = generate_type(&mut declarations, &mut queue, &method.result);
let camel_case = to_camel_case(method.name);
member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember(
make::ts_method_signature_type_member(
AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(
make::ident(&camel_case),
)),
make::js_parameters(
make::token(T!['(']),
make::js_parameter_list(
Some(AnyJsParameter::AnyJsFormalParameter(
AnyJsFormalParameter::JsFormalParameter(
make::js_formal_parameter(
make::js_decorator_list([]),
AnyJsBindingPattern::AnyJsBinding(
AnyJsBinding::JsIdentifierBinding(
make::js_identifier_binding(make::ident("params")),
),
),
)
.with_type_annotation(make::ts_type_annotation(
make::token(T![:]),
params,
))
.build(),
),
)),
None,
),
make::token(T![')']),
),
)
.with_return_type_annotation(make::ts_return_type_annotation(
make::token(T![:]),
AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType(
make::ts_reference_type(AnyTsName::JsReferenceIdentifier(
make::js_reference_identifier(make::ident("Promise")),
))
.with_type_arguments(make::ts_type_arguments(
make::token(T![<]),
make::ts_type_argument_list(Some(result), None),
make::token(T![>]),
))
.build(),
)),
))
.build(),
));
member_declarations.push(AnyJsObjectMember::JsMethodObjectMember(
make::js_method_object_member(
AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(
make::ident(&camel_case),
)),
make::js_parameters(
make::token(T!['(']),
make::js_parameter_list(
Some(AnyJsParameter::AnyJsFormalParameter(
AnyJsFormalParameter::JsFormalParameter(
make::js_formal_parameter(make::js_decorator_list([]),AnyJsBindingPattern::AnyJsBinding(
AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding(
make::ident("params"),
)),
))
.build(),
),
)),
None,
),
make::token(T![')']),
),
make::js_function_body(
make::token(T!['{']),
make::js_directive_list(None),
make::js_statement_list(Some(AnyJsStatement::JsReturnStatement(
make::js_return_statement(make::token(T![return]))
.with_argument(AnyJsExpression::JsCallExpression(
make::js_call_expression(
AnyJsExpression::JsStaticMemberExpression(
make::js_static_member_expression(
AnyJsExpression::JsIdentifierExpression(
make::js_identifier_expression(
make::js_reference_identifier(make::ident(
"transport",
)),
),
),
make::token(T![.]),
AnyJsName::JsName(make::js_name(make::ident(
"request",
))),
),
),
make::js_call_arguments(
make::token(T!['(']),
make::js_call_argument_list(
[
AnyJsCallArgument::AnyJsExpression(
AnyJsExpression::AnyJsLiteralExpression(
AnyJsLiteralExpression::JsStringLiteralExpression(make::js_string_literal_expression(make::js_string_literal(&format!("rome/{}", method.name)))),
),
),
AnyJsCallArgument::AnyJsExpression(
AnyJsExpression::JsIdentifierExpression(
make::js_identifier_expression(
make::js_reference_identifier(make::ident(
"params",
)),
),
),
),
],
Some(make::token(T![,])),
),
make::token(T![')']),
),
)
.build(),
))
.build(),
))),
make::token(T!['}']),
),
)
.build(),
));
}
let leading_comment = [
(
TriviaPieceKind::SingleLineComment,
"// Generated file, do not edit by hand, see `xtask/codegen`",
),
(TriviaPieceKind::Newline, "\n"),
];
let mut items = vec![AnyJsModuleItem::JsImport(
make::js_import(
make::token(T![import]).with_leading_trivia(leading_comment.into_iter()),
AnyJsImportClause::JsImportNamedClause(
make::js_import_named_clause(
AnyJsNamedImport::JsNamedImportSpecifiers(make::js_named_import_specifiers(
make::token(T!['{']),
make::js_named_import_specifier_list(
Some(AnyJsNamedImportSpecifier::JsShorthandNamedImportSpecifier(
make::js_shorthand_named_import_specifier(
AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding(
make::ident("Transport"),
)),
)
.build(),
)),
None,
),
make::token(T!['}']),
)),
make::token(T![from]),
make::js_module_source(make::js_string_literal("./transport")),
)
.with_type_token(make::token(T![type]))
.build(),
),
)
.build(),
)];
items.extend(declarations.into_iter().map(|(decl, description)| {
let mut export = make::token(T![export]);
if let Some(description) = description {
let comment = format!("/**\n\t* {} \n\t */\n", description);
let trivia = vec![
(TriviaPieceKind::Newline, "\n"),
(TriviaPieceKind::MultiLineComment, comment.as_str()),
(TriviaPieceKind::Newline, "\n"),
];
export = export.with_leading_trivia(trivia);
}
AnyJsModuleItem::JsExport(make::js_export(
make::js_decorator_list([]),
export,
AnyJsExportClause::AnyJsDeclarationClause(match decl {
AnyJsDeclaration::JsClassDeclaration(decl) => {
AnyJsDeclarationClause::JsClassDeclaration(decl)
}
AnyJsDeclaration::JsFunctionDeclaration(decl) => {
AnyJsDeclarationClause::JsFunctionDeclaration(decl)
}
AnyJsDeclaration::JsVariableDeclaration(decl) => {
AnyJsDeclarationClause::JsVariableDeclarationClause(
make::js_variable_declaration_clause(decl).build(),
)
}
AnyJsDeclaration::TsDeclareFunctionDeclaration(decl) => {
AnyJsDeclarationClause::TsDeclareFunctionDeclaration(decl)
}
AnyJsDeclaration::TsEnumDeclaration(decl) => {
AnyJsDeclarationClause::TsEnumDeclaration(decl)
}
AnyJsDeclaration::TsExternalModuleDeclaration(decl) => {
AnyJsDeclarationClause::TsExternalModuleDeclaration(decl)
}
AnyJsDeclaration::TsGlobalDeclaration(decl) => {
AnyJsDeclarationClause::TsGlobalDeclaration(decl)
}
AnyJsDeclaration::TsImportEqualsDeclaration(decl) => {
AnyJsDeclarationClause::TsImportEqualsDeclaration(decl)
}
AnyJsDeclaration::TsInterfaceDeclaration(decl) => {
AnyJsDeclarationClause::TsInterfaceDeclaration(decl)
}
AnyJsDeclaration::TsModuleDeclaration(decl) => {
AnyJsDeclarationClause::TsModuleDeclaration(decl)
}
AnyJsDeclaration::TsTypeAliasDeclaration(decl) => {
AnyJsDeclarationClause::TsTypeAliasDeclaration(decl)
}
}),
))
}));
member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember(
make::ts_method_signature_type_member(
AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident(
"destroy",
))),
make::js_parameters(
make::token(T!['(']),
make::js_parameter_list(None, None),
make::token(T![')']),
),
)
.with_return_type_annotation(make::ts_return_type_annotation(
make::token(T![:]),
AnyTsReturnType::AnyTsType(AnyTsType::TsVoidType(make::ts_void_type(make::token(T![
void
])))),
))
.build(),
));
member_declarations.push(AnyJsObjectMember::JsMethodObjectMember(
make::js_method_object_member(
AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident(
"destroy",
))),
make::js_parameters(
make::token(T!['(']),
make::js_parameter_list(None, None),
make::token(T![')']),
),
make::js_function_body(
make::token(T!['{']),
make::js_directive_list(None),
make::js_statement_list(Some(AnyJsStatement::JsExpressionStatement(
make::js_expression_statement(AnyJsExpression::JsCallExpression(
make::js_call_expression(
AnyJsExpression::JsStaticMemberExpression(
make::js_static_member_expression(
AnyJsExpression::JsIdentifierExpression(
make::js_identifier_expression(
make::js_reference_identifier(make::ident("transport")),
),
),
make::token(T![.]),
AnyJsName::JsName(make::js_name(make::ident("destroy"))),
),
),
make::js_call_arguments(
make::token(T!['(']),
make::js_call_argument_list(None, None),
make::token(T![')']),
),
)
.build(),
))
.build(),
))),
make::token(T!['}']),
),
)
.build(),
));
items.push(AnyJsModuleItem::JsExport(make::js_export(
make::js_decorator_list([]),
make::token(T![export]),
AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::TsInterfaceDeclaration(
make::ts_interface_declaration(
make::token(T![interface]),
make::ts_identifier_binding(make::ident("Workspace")),
make::token(T!['{']),
make::ts_type_member_list(member_definitions),
make::token(T!['}']),
)
.build(),
)),
)));
let member_separators = (0..member_declarations.len()).map(|_| make::token(T![,]));
items.push(AnyJsModuleItem::JsExport(make::js_export(
make::js_decorator_list([]),
make::token(T![export]),
AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::JsFunctionDeclaration(
make::js_function_declaration(
make::token(T![function]),
AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding(make::ident(
"createWorkspace",
))),
make::js_parameters(
make::token(T!['(']),
make::js_parameter_list(
Some(AnyJsParameter::AnyJsFormalParameter(
AnyJsFormalParameter::JsFormalParameter(
make::js_formal_parameter(
make::js_decorator_list([]),
AnyJsBindingPattern::AnyJsBinding(
AnyJsBinding::JsIdentifierBinding(
make::js_identifier_binding(make::ident("transport")),
),
),
)
.with_type_annotation(make::ts_type_annotation(
make::token(T![:]),
AnyTsType::TsReferenceType(
make::ts_reference_type(AnyTsName::JsReferenceIdentifier(
make::js_reference_identifier(make::ident("Transport")),
))
.build(),
),
))
.build(),
),
)),
None,
),
make::token(T![')']),
),
make::js_function_body(
make::token(T!['{']),
make::js_directive_list(None),
make::js_statement_list(Some(AnyJsStatement::JsReturnStatement(
make::js_return_statement(make::token(T![return]))
.with_argument(AnyJsExpression::JsObjectExpression(
make::js_object_expression(
make::token(T!['{']),
make::js_object_member_list(
member_declarations,
member_separators,
),
make::token(T!['}']),
),
))
.build(),
))),
make::token(T!['}']),
),
)
.with_return_type_annotation(make::ts_return_type_annotation(
make::token(T![:]),
AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType(
make::ts_reference_type(AnyTsName::JsReferenceIdentifier(
make::js_reference_identifier(make::ident("Workspace")),
))
.build(),
)),
))
.build(),
)),
)));
let module = make::js_module(
make::js_directive_list(None),
make::js_module_item_list(items),
make::eof(),
)
.build();
let formatted = format_node(JsFormatOptions::new(JsFileSource::ts()), module.syntax()).unwrap();
let printed = formatted.print().unwrap();
let code = printed.into_code();
update(&bindings_path, &code, &mode)?;
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/promote_rule.rs | xtask/codegen/src/promote_rule.rs | use case::CaseExt;
use fs_extra::dir::{move_dir, CopyOptions};
use fs_extra::file;
use fs_extra::file::move_file;
use std::env;
use std::path::PathBuf;
const KNOWN_GROUPS: [&str; 7] = [
"a11y",
"suspicious",
"correctness",
"performance",
"security",
"style",
"complexity",
];
const KNOWN_PATHS: [&str; 3] = [
"crates/rome_js_analyze/src/analyzers",
"crates/rome_js_analyze/src/semantic_analyzers",
"crates/rome_js_analyze/src/aria_analyzers",
];
pub fn promote_rule(rule_name: &str, new_group: &str) {
let current_dir = env::current_dir().ok().unwrap();
if !KNOWN_GROUPS.contains(&new_group) {
panic!(
"The group '{}' doesn't exist. Available groups: {}",
new_group,
KNOWN_GROUPS.join(", ")
)
}
let rule_name_snake = rule_name.to_snake();
// look for the rule in the source code
let mut rule_path = None;
let mut analyzers_path = None;
for known_path in KNOWN_PATHS {
let local_rule_path = current_dir
.join(known_path)
.join("nursery")
.join(format!("{}.rs", &rule_name_snake));
if local_rule_path.exists() {
rule_path = Some(local_rule_path);
analyzers_path = Some(PathBuf::from(known_path));
break;
}
}
if let (Some(rule_path), Some(analyzers_path)) = (rule_path, analyzers_path) {
// rule found!
let new_group_path = analyzers_path.join(new_group);
let new_rule_path = new_group_path.join(format!("{}.rs", rule_name_snake));
let categories_path = "crates/rome_diagnostics_categories/src/categories.rs";
let categories = std::fs::read_to_string(categories_path).unwrap();
let categories = categories.replace(
&format!("lint/nursery/{}", rule_name),
&format!("lint/{}/{}", new_group, rule_name),
);
move_file(rule_path, new_rule_path, &file::CopyOptions::default()).unwrap();
std::fs::write(categories_path, categories).unwrap();
let old_test_path = current_dir
.join("crates/rome_js_analyze/tests/specs/nursery")
.join(rule_name);
let new_test_path = current_dir
.join("crates/rome_js_analyze/tests/specs")
.join(new_group)
.join(rule_name);
std::fs::create_dir(new_test_path).unwrap();
move_dir(
old_test_path.display().to_string(),
current_dir
.join("crates/rome_js_analyze/tests/specs")
.join(new_group)
.display()
.to_string(),
&CopyOptions::new(),
)
.unwrap();
} else {
panic!("Couldn't find the rule {}", rule_name);
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_analyzer.rs | xtask/codegen/src/generate_analyzer.rs | use std::collections::BTreeMap;
use std::path::PathBuf;
use anyhow::{Context, Ok, Result};
use case::CaseExt;
use proc_macro2::{Punct, Spacing, TokenStream};
use quote::{format_ident, quote};
use xtask::{glue::fs2, project_root};
pub fn generate_analyzer() -> Result<()> {
generate_js_analyzer()?;
generate_json_analyzer()?;
Ok(())
}
fn generate_js_analyzer() -> Result<()> {
let base_path = project_root().join("crates/rome_js_analyze/src");
let mut analyzers = BTreeMap::new();
generate_category("analyzers", &mut analyzers, base_path.clone())?;
let mut semantic_analyzers = BTreeMap::new();
generate_category(
"semantic_analyzers",
&mut semantic_analyzers,
base_path.clone(),
)?;
let mut aria_analyzers = BTreeMap::new();
generate_category("aria_analyzers", &mut aria_analyzers, base_path.clone())?;
let mut assists = BTreeMap::new();
generate_category("assists", &mut assists, base_path.clone())?;
let mut syntax = BTreeMap::new();
generate_category("syntax", &mut syntax, base_path)?;
update_js_registry_builder(
analyzers,
semantic_analyzers,
aria_analyzers,
assists,
syntax,
)
}
fn generate_json_analyzer() -> Result<()> {
let mut analyzers = BTreeMap::new();
generate_category(
"analyzers",
&mut analyzers,
project_root().join("crates/rome_json_analyze/src"),
)?;
update_json_registry_builder(analyzers)
}
fn generate_category(
name: &'static str,
entries: &mut BTreeMap<&'static str, TokenStream>,
base_path: PathBuf,
) -> Result<()> {
let path = base_path.join(name);
let mut groups = BTreeMap::new();
for entry in fs2::read_dir(path)? {
let entry = entry?;
if !entry.file_type()?.is_dir() {
continue;
}
let entry = entry.path();
let file_name = entry
.file_stem()
.context("path has no file name")?
.to_str()
.context("could not convert file name to string")?;
generate_group(name, file_name, base_path.clone())?;
let module_name = format_ident!("{}", file_name);
let group_name = format_ident!("{}", to_camel_case(file_name)?);
groups.insert(
file_name.to_string(),
(
quote! {
pub(crate) mod #module_name;
},
quote! {
self::#module_name::#group_name
},
),
);
}
let key = name;
let module_name = format_ident!("{name}");
let category_name = to_camel_case(name).unwrap();
let category_name = format_ident!("{category_name}");
let kind = match name {
"syntax" => format_ident!("Syntax"),
"analyzers" | "semantic_analyzers" | "aria_analyzers" => format_ident!("Lint"),
"assists" => format_ident!("Action"),
_ => panic!("unimplemented analyzer category {name:?}"),
};
entries.insert(
key,
quote! {
registry.record_category::<crate::#module_name::#category_name>();
},
);
let (modules, paths): (Vec<_>, Vec<_>) = groups.into_values().unzip();
let tokens = xtask::reformat(quote! {
#( #modules )*
::rome_analyze::declare_category! {
pub(crate) #category_name {
kind: #kind,
groups: [
#( #paths, )*
]
}
}
})?;
fs2::write(base_path.join(format!("{name}.rs")), tokens)?;
Ok(())
}
fn generate_group(category: &'static str, group: &str, base_path: PathBuf) -> Result<()> {
let path = base_path.join(category).join(group);
let mut rules = BTreeMap::new();
for entry in fs2::read_dir(path)? {
let entry = entry?.path();
let file_name = entry
.file_stem()
.context("path has no file name")?
.to_str()
.context("could not convert file name to string")?;
let rule_type = file_name.to_camel();
let key = rule_type.clone();
let module_name = format_ident!("{}", file_name);
let rule_type = format_ident!("{}", rule_type);
rules.insert(
key,
(
quote! {
pub(crate) mod #module_name;
},
quote! {
self::#module_name::#rule_type
},
),
);
}
let group_name = format_ident!("{}", to_camel_case(group)?);
let (rule_imports, rule_names): (Vec<_>, Vec<_>) = rules.into_values().unzip();
let nl = Punct::new('\n', Spacing::Alone);
let sp = Punct::new(' ', Spacing::Joint);
let sp4 = quote! { #sp #sp #sp #sp };
let tokens = xtask::reformat(quote! {
use rome_analyze::declare_group;
#nl #nl
#( #rule_imports )*
#nl #nl
declare_group! { #nl
#sp4 pub(crate) #group_name { #nl
#sp4 #sp4 name: #group, #nl
#sp4 #sp4 rules: [ #nl
#( #sp4 #sp4 #sp4 #rule_names, #nl )*
#sp4 #sp4 ] #nl
#sp4 } #nl
}
})?;
fs2::write(base_path.join(category).join(format!("{group}.rs")), tokens)?;
Ok(())
}
fn to_camel_case(input: &str) -> Result<String> {
let mut result = String::new();
let mut chars = input.char_indices();
while let Some((index, mut char)) = chars.next() {
if index == 0 {
char = char.to_ascii_uppercase();
}
if char == '_' {
let (_, next_char) = chars.next().context("iterator is empty")?;
char = next_char.to_ascii_uppercase();
}
result.push(char);
}
Ok(result)
}
fn update_js_registry_builder(
analyzers: BTreeMap<&'static str, TokenStream>,
semantic_analyzers: BTreeMap<&'static str, TokenStream>,
aria_analyzers: BTreeMap<&'static str, TokenStream>,
assists: BTreeMap<&'static str, TokenStream>,
syntax: BTreeMap<&'static str, TokenStream>,
) -> Result<()> {
let path = project_root().join("crates/rome_js_analyze/src/registry.rs");
let categories = analyzers
.into_iter()
.chain(semantic_analyzers)
.chain(aria_analyzers)
.chain(assists)
.chain(syntax)
.map(|(_, tokens)| tokens);
let tokens = xtask::reformat(quote! {
use rome_analyze::RegistryVisitor;
use rome_js_syntax::JsLanguage;
pub fn visit_registry<V: RegistryVisitor<JsLanguage>>(registry: &mut V) {
#( #categories )*
}
})?;
fs2::write(path, tokens)?;
Ok(())
}
fn update_json_registry_builder(analyzers: BTreeMap<&'static str, TokenStream>) -> Result<()> {
let path = project_root().join("crates/rome_json_analyze/src/registry.rs");
let categories = analyzers.into_values();
let tokens = xtask::reformat(quote! {
use rome_analyze::RegistryVisitor;
use rome_json_syntax::JsonLanguage;
pub fn visit_registry<V: RegistryVisitor<JsonLanguage>>(registry: &mut V) {
#( #categories )*
}
})?;
fs2::write(path, tokens)?;
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_nodes.rs | xtask/codegen/src/generate_nodes.rs | use crate::css_kinds_src::CSS_KINDS_SRC;
use crate::json_kinds_src::JSON_KINDS_SRC;
use crate::kinds_src::{AstSrc, Field, TokenKind, JS_KINDS_SRC};
use crate::{to_lower_snake_case, to_upper_snake_case, LanguageKind};
use proc_macro2::Literal;
use quote::{format_ident, quote};
use std::collections::HashMap;
use xtask::Result;
pub fn generate_nodes(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = ast
.nodes
.iter()
.map(|node| {
let name = format_ident!("{}", node.name);
let node_kind = format_ident!("{}", to_upper_snake_case(node.name.as_str()));
let methods = node
.fields
.iter()
.enumerate()
.map(|(slot_index, field)| match field {
Field::Token { name, kind, .. } => {
let many = matches!(kind, TokenKind::Many(_));
let method_name = if many {
format_ident!("{}", name)
} else {
field.method_name(language_kind)
};
let is_optional = field.is_optional();
if is_optional {
quote! {
pub fn #method_name(&self) -> Option<SyntaxToken> {
support::token(&self.syntax, #slot_index)
}
}
} else {
quote! {
pub fn #method_name(&self) -> SyntaxResult<SyntaxToken> {
support::required_token(&self.syntax, #slot_index)
}
}
}
}
Field::Node { ty, optional, .. } => {
let is_list = ast.is_list(ty);
let ty = format_ident!("{}", &ty);
let method_name = field.method_name(language_kind);
if is_list {
quote! {
pub fn #method_name(&self) -> #ty {
support::list(&self.syntax, #slot_index)
}
}
} else if *optional {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::node(&self.syntax, #slot_index)
}
}
} else {
quote! {
pub fn #method_name(&self) -> SyntaxResult<#ty> {
support::required_node(&self.syntax, #slot_index)
}
}
}
}
});
let fields = node.fields.iter().map(|field| {
let name = match field {
Field::Token {
name,
kind: TokenKind::Many(_),
..
} => format_ident!("{}", name),
_ => field.method_name(language_kind),
};
let is_list = match field {
Field::Node { ty, .. } => ast.is_list(ty),
_ => false,
};
let string_name = name.to_string();
if is_list {
quote! {
.field(#string_name, &self.#name())
}
} else if field.is_optional() {
quote! {
.field(#string_name, &support::DebugOptionalElement(self.#name()))
}
} else {
quote! {
.field(#string_name, &support::DebugSyntaxResult(self.#name()))
}
}
});
let string_name = name.to_string();
let slots_name = format_ident!("{}Fields", node.name);
let (slot_fields, slot_constructors): (Vec<_>, Vec<_>) = node
.fields
.iter()
.map(|field| match field {
Field::Token { name, kind, .. } => {
let many = matches!(kind, TokenKind::Many(_));
let method_name = if many {
format_ident!("{}", name)
} else {
field.method_name(language_kind)
};
let is_optional = field.is_optional();
let field = if is_optional {
quote! { #method_name: Option<SyntaxToken> }
} else {
quote! { #method_name: SyntaxResult<SyntaxToken> }
};
(field, quote! { #method_name: self.#method_name() })
}
Field::Node { ty, optional, .. } => {
let is_list = ast.is_list(ty);
let ty = format_ident!("{}", &ty);
let method_name = field.method_name(language_kind);
let field = if is_list {
quote! { #method_name: #ty }
} else if *optional {
quote! { #method_name: Option<#ty> }
} else {
quote! { #method_name: SyntaxResult<#ty> }
};
(field, quote! { #method_name: self.#method_name() })
}
})
.unzip();
(
quote! {
// TODO: review documentation
// #[doc = #documentation]
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct #name {
pub(crate) syntax: SyntaxNode,
}
impl #name {
/// Create an AstNode from a SyntaxNode without checking its kind
///
/// # Safety
/// This function must be guarded with a call to [AstNode::can_cast]
/// or a match on [SyntaxNode::kind]
#[inline]
pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self {
Self { syntax }
}
pub fn as_fields(&self) -> #slots_name {
#slots_name {
#( #slot_constructors, )*
}
}
#(#methods)*
}
#[cfg(feature = "serde")]
impl Serialize for #name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.as_fields().serialize(serializer)
}
}
#[cfg_attr(feature = "serde", derive(Serialize))]
pub struct #slots_name {
#( pub #slot_fields, )*
}
},
quote! {
impl AstNode for #name {
type Language = Language;
const KIND_SET: SyntaxKindSet<Language> =
SyntaxKindSet::from_raw(RawSyntaxKind(#node_kind as u16));
fn can_cast(kind: SyntaxKind) -> bool {
kind == #node_kind
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
fn into_syntax(self) -> SyntaxNode { self.syntax }
}
impl std::fmt::Debug for #name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(#string_name)
#(#fields)*
.finish()
}
}
impl From<#name> for SyntaxNode {
fn from(n: #name) -> SyntaxNode {
n.syntax
}
}
impl From<#name> for SyntaxElement {
fn from(n: #name) -> SyntaxElement {
n.syntax.into()
}
}
},
)
})
.unzip();
// it maps enum name A and its corresponding variants
let name_to_variants: HashMap<_, _> = ast
.unions
.iter()
.map(|current_enum| (current_enum.name.clone(), current_enum.variants.clone()))
.collect();
let (union_defs, union_boilerplate_impls): (Vec<_>, Vec<_>) = ast
.unions
.iter()
.map(|union| {
let name = format_ident!("{}", union.name);
// here we collect all the variants because this will generate the enums
// so we don't care about filtered variants
let variants_for_union: Vec<_> = union
.variants
.iter()
.map(|variant| {
let variant_name = format_ident!("{}", variant);
quote! {
#variant_name(#variant_name)
}
})
.collect();
let as_method_for_variants_for_union: Vec<_> = union
.variants
.iter()
.map(|variant| {
let variant_name = format_ident!("{}", variant);
let fn_name = format_ident!("as_{}", to_lower_snake_case(variant));
quote! {
pub fn #fn_name(&self) -> Option<&#variant_name> {
match &self {
#name::#variant_name(item) => Some(item),
_ => None
}
}
}
})
.collect();
// Here we make the partition
//
// Inside an enum, we can have variants that point to a "flat" type or to another enum;
// we want to divide these variants as we will generate a different code based on these requirements
let (variant_of_variants, simple_variants): (Vec<_>, Vec<_>) =
union.variants.iter().partition(|current_enum| {
if let Some(variants) = name_to_variants.get(*current_enum) {
!variants.is_empty()
} else {
false
}
});
let variants: Vec<_> = simple_variants
.iter()
.map(|var| format_ident!("{}", var))
.collect();
let kinds: Vec<_> = variants
.iter()
.map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
.collect();
let variant_cast: Vec<_> = simple_variants
.iter()
.map(|current_enum| {
let variant_is_enum = ast.unions.iter().find(|e| &e.name == *current_enum);
let variant_name = format_ident!("{}", current_enum);
if variant_is_enum.is_some() {
quote! {
#variant_name::cast(syntax)?
}
} else {
quote! {
#variant_name { syntax }
}
}
})
.collect();
// variant of variants
let vv: Vec<_> = variant_of_variants
.iter()
.enumerate()
.map(|(i, en)| {
let variant_name = format_ident!("{}", en);
let variable_name = format_ident!("{}", to_lower_snake_case(en.as_str()));
(
// cast() code
if i != variant_of_variants.len() - 1 {
quote! {
if let Some(#variable_name) = #variant_name::cast(syntax.clone()) {
return Some(#name::#variant_name(#variable_name));
}}
} else {
// if this is the last variant, do not clone syntax
quote! {
if let Some(#variable_name) = #variant_name::cast(syntax) {
return Some(#name::#variant_name(#variable_name));
}}
},
// can_cast() code
quote! {
k if #variant_name::can_cast(k) => true,
},
// syntax() code
quote! {
#name::#variant_name(it) => it.syntax()
},
// into_syntax() code
quote! {
#name::#variant_name(it) => it.into_syntax()
},
)
})
.collect();
let vv_cast = vv.iter().map(|v| v.0.clone());
let vv_can_cast = vv.iter().map(|v| v.1.clone());
let vv_syntax = vv.iter().map(|v| v.2.clone());
let vv_into_syntax = vv.iter().map(|v| v.3.clone());
let all_kinds = if !kinds.is_empty() {
quote! {
#(#kinds)|* => true,
}
} else {
quote! {}
};
let cast_fn = if !kinds.is_empty() {
quote! {
let res = match syntax.kind() {
#(
#kinds => #name::#variants(#variant_cast),
)*
_ => {
#(
#vv_cast
)*
return None
}
};
Some(res)
}
} else {
quote! {
#(
#vv_cast
)*
None
}
};
let can_cast_fn = if union.variants.iter().any(|v| !simple_variants.contains(&v)) {
quote! {
match kind {
#all_kinds
#(#vv_can_cast)*
_ => false
}
}
} else {
quote! {
matches!(kind, #(#kinds)|*)
}
};
let kind_set: Vec<_> = union
.variants
.iter()
.enumerate()
.map(|(index, v)| {
let ident = format_ident!("{}", v);
if index == 0 {
quote!( #ident::KIND_SET )
} else {
quote!( .union(#ident::KIND_SET) )
}
})
.collect();
let (variant_syntax, variant_into_syntax): (Vec<_>, Vec<_>) = simple_variants
.iter()
.map(|_| {
(
quote! {
&it.syntax
},
quote! {
it.syntax
},
)
})
.unzip();
let all_variant_names: Vec<_> = union
.variants
.iter()
.map(|variant| format_ident!("{}", variant))
.collect();
(
quote! {
// #[doc = #doc]
#[derive(Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize))]
pub enum #name {
#(#variants_for_union),*
}
impl #name {
#(#as_method_for_variants_for_union)*
}
},
quote! {
#(
impl From<#variants> for #name {
fn from(node: #variants) -> #name {
#name::#variants(node)
}
}
)*
impl AstNode for #name {
type Language = Language;
const KIND_SET: SyntaxKindSet<Language> = #( #kind_set )*;
fn can_cast(kind: SyntaxKind) -> bool {
#can_cast_fn
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
#cast_fn
}
fn syntax(&self) -> &SyntaxNode {
match self {
#(
#name::#variants(it) => #variant_syntax,
)*
#(
#vv_syntax
),*
}
}
fn into_syntax(self) -> SyntaxNode {
match self {
#(
#name::#variants(it) => #variant_into_syntax,
)*
#(
#vv_into_syntax
),*
}
}
}
impl std::fmt::Debug for #name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
#(
#name::#all_variant_names(it) => std::fmt::Debug::fmt(it, f),
)*
}
}
}
impl From<#name> for SyntaxNode {
fn from(n: #name) -> SyntaxNode {
match n {
#(
#name::#all_variant_names(it) => it.into(),
)*
}
}
}
impl From<#name> for SyntaxElement {
fn from(n: #name) -> SyntaxElement {
let node: SyntaxNode = n.into();
node.into()
}
}
},
)
})
.unzip();
let union_names = ast.unions.iter().map(|it| &it.name);
let node_names = ast.nodes.iter().map(|it| &it.name);
let display_impls = union_names
.chain(node_names.clone())
.map(|it| format_ident!("{}", it))
.map(|name| {
quote! {
impl std::fmt::Display for #name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
}
});
let bogus = ast.bogus.iter().map(|bogus_name| {
let ident = format_ident!("{}", bogus_name);
let string_name = bogus_name;
let kind = format_ident!("{}", to_upper_snake_case(bogus_name));
quote! {
#[derive(Clone, PartialEq, Eq, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize))]
pub struct #ident {
syntax: SyntaxNode
}
impl #ident {
/// Create an AstNode from a SyntaxNode without checking its kind
///
/// # Safety
/// This function must be guarded with a call to [AstNode::can_cast]
/// or a match on [SyntaxNode::kind]
#[inline]
pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self {
Self { syntax }
}
pub fn items(&self) -> SyntaxElementChildren {
support::elements(&self.syntax)
}
}
impl AstNode for #ident {
type Language = Language;
const KIND_SET: SyntaxKindSet<Language> =
SyntaxKindSet::from_raw(RawSyntaxKind(#kind as u16));
fn can_cast(kind: SyntaxKind) -> bool {
kind == #kind
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
&self.syntax
}
fn into_syntax(self) -> SyntaxNode {
self.syntax
}
}
impl std::fmt::Debug for #ident {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(#string_name)
.field("items", &DebugSyntaxElementChildren(self.items()))
.finish()
}
}
impl From<#ident> for SyntaxNode {
fn from(n: #ident) -> SyntaxNode {
n.syntax
}
}
impl From<#ident> for SyntaxElement {
fn from(n: #ident) -> SyntaxElement {
n.syntax.into()
}
}
}
});
let lists = ast.lists().map(|(name, list)| {
let list_name = format_ident!("{}", name);
let list_kind = format_ident!("{}", to_upper_snake_case(name));
let element_type = format_ident!("{}", list.element_name);
let node_impl = quote! {
impl #list_name {
/// Create an AstNode from a SyntaxNode without checking its kind
///
/// # Safety
/// This function must be guarded with a call to [AstNode::can_cast]
/// or a match on [SyntaxNode::kind]
#[inline]
pub unsafe fn new_unchecked(syntax: SyntaxNode) -> Self {
Self { syntax_list: syntax.into_list() }
}
}
impl AstNode for #list_name {
type Language = Language;
const KIND_SET: SyntaxKindSet<Language> =
SyntaxKindSet::from_raw(RawSyntaxKind(#list_kind as u16));
fn can_cast(kind: SyntaxKind) -> bool {
kind == #list_kind
}
fn cast(syntax: SyntaxNode) -> Option<#list_name> {
if Self::can_cast(syntax.kind()) {
Some(#list_name { syntax_list: syntax.into_list() })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode {
self.syntax_list.node()
}
fn into_syntax(self) -> SyntaxNode {
self.syntax_list.into_node()
}
}
};
let padded_name = format!("{} ", name);
let list_impl = if list.separator.is_some() {
quote! {
#[cfg(feature = "serde")]
impl Serialize for #list_name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(self.len()))?;
for e in self.iter() {
seq.serialize_element(&e)?;
}
seq.end()
}
}
impl AstSeparatedList for #list_name {
type Language = Language;
type Node = #element_type;
fn syntax_list(&self) -> &SyntaxList {
&self.syntax_list
}
fn into_syntax_list(self) -> SyntaxList {
self.syntax_list
}
}
impl Debug for #list_name {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(#padded_name)?;
f.debug_list().entries(self.elements()).finish()
}
}
impl IntoIterator for #list_name {
type Item = SyntaxResult<#element_type>;
type IntoIter = AstSeparatedListNodesIterator<Language, #element_type>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl IntoIterator for &#list_name {
type Item = SyntaxResult<#element_type>;
type IntoIter = AstSeparatedListNodesIterator<Language, #element_type>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
}
} else {
quote! {
#[cfg(feature = "serde")]
impl Serialize for #list_name {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(self.len()))?;
for e in self.iter() {
seq.serialize_element(&e)?;
}
seq.end()
}
}
impl AstNodeList for #list_name {
type Language = Language;
type Node = #element_type;
fn syntax_list(&self) -> &SyntaxList {
&self.syntax_list
}
fn into_syntax_list(self) -> SyntaxList {
self.syntax_list
}
}
impl Debug for #list_name {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(#padded_name)?;
f.debug_list().entries(self.iter()).finish()
}
}
impl IntoIterator for &#list_name {
type Item = #element_type;
type IntoIter = AstNodeListIterator<Language, #element_type>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl IntoIterator for #list_name {
type Item = #element_type;
type IntoIter = AstNodeListIterator<Language, #element_type>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
}
};
quote! {
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct #list_name {
syntax_list: SyntaxList,
}
#node_impl
#list_impl
}
});
let syntax_kind = language_kind.syntax_kind();
let syntax_node = language_kind.syntax_node();
let syntax_element = language_kind.syntax_element();
let syntax_element_children = language_kind.syntax_element_children();
let syntax_list = language_kind.syntax_list();
let syntax_token = language_kind.syntax_token();
let language = language_kind.language();
let serde_import = quote! {
#[cfg(feature = "serde")]
use serde::{Serialize, Serializer};
#[cfg(feature = "serde")]
use serde::ser::SerializeSeq;
};
let ast = quote! {
#![allow(clippy::enum_variant_names)]
// sometimes we generate comparison of simple tokens
#![allow(clippy::match_like_matches_macro)]
use crate::{
macros::map_syntax_node,
#language as Language, #syntax_element as SyntaxElement, #syntax_element_children as SyntaxElementChildren,
#syntax_kind::{self as SyntaxKind, *},
#syntax_list as SyntaxList, #syntax_node as SyntaxNode, #syntax_token as SyntaxToken,
};
#[allow(unused)]
use rome_rowan::{
AstNodeList, AstNodeListIterator, AstSeparatedList, AstSeparatedListNodesIterator
};
use rome_rowan::{support, AstNode, SyntaxKindSet, RawSyntaxKind, SyntaxResult};
use std::fmt::{Debug, Formatter};
#serde_import
#(#node_defs)*
#(#union_defs)*
#(#node_boilerplate_impls)*
#(#union_boilerplate_impls)*
#(#display_impls)*
#(#bogus)*
#(#lists)*
#[derive(Clone)]
pub struct DebugSyntaxElementChildren(pub SyntaxElementChildren);
impl Debug for DebugSyntaxElementChildren {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_list()
.entries(self.clone().0.map(DebugSyntaxElement))
.finish()
}
}
struct DebugSyntaxElement(SyntaxElement);
impl Debug for DebugSyntaxElement {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match &self.0 {
SyntaxElement::Node(node) => {
map_syntax_node!(node.clone(), node => std::fmt::Debug::fmt(&node, f))
}
SyntaxElement::Token(token) => Debug::fmt(token, f),
}
}
}
};
let ast = ast
.to_string()
.replace("T ! [ ", "T![")
.replace(" ] )", "])");
let pretty = xtask::reformat(ast)?;
Ok(pretty)
}
pub(crate) fn token_kind_to_code(
name: &str,
language_kind: LanguageKind,
) -> proc_macro2::TokenStream {
let kind_variant_name = to_upper_snake_case(name);
let kind_source = match language_kind {
LanguageKind::Js => JS_KINDS_SRC,
LanguageKind::Css => CSS_KINDS_SRC,
LanguageKind::Json => JSON_KINDS_SRC,
};
if kind_source.literals.contains(&kind_variant_name.as_str())
|| kind_source.tokens.contains(&kind_variant_name.as_str())
{
let ident = format_ident!("{}", kind_variant_name);
quote! { #ident }
} else {
// $ is valid syntax in rust and it's part of macros,
// so we need to decorate the tokens with quotes
if name == "$=" {
let token = Literal::string(name);
quote! { T![#token] }
} else {
let token: proc_macro2::TokenStream = name.parse().unwrap();
quote! { T![#token] }
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | true |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_node_factory.rs | xtask/codegen/src/generate_node_factory.rs | use super::kinds_src::AstSrc;
use crate::to_lower_snake_case;
use crate::{kinds_src::Field, to_upper_snake_case, LanguageKind};
use quote::{format_ident, quote};
use xtask::Result;
pub fn generate_node_factory(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {
let syntax_crate = language_kind.syntax_crate_ident();
let syntax_kind = language_kind.syntax_kind();
let syntax_token = language_kind.syntax_token();
let syntax_node = language_kind.syntax_node();
let syntax_element = language_kind.syntax_element();
let nodes =
ast.nodes.iter().map(|node| {
let type_name = format_ident!("{}", node.name);
let kind = format_ident!("{}", to_upper_snake_case(&node.name));
let factory_name = format_ident!("{}", to_lower_snake_case(&node.name));
let (optional, required): (Vec<_>, Vec<_>) =
node.fields.iter().partition(|field| field.is_optional());
if optional.is_empty() {
let (args, slots): (Vec<_>, Vec<_>) = required
.into_iter()
.map(|field| {
let name = field.method_name(language_kind);
let type_name = field.ty();
let arg = quote! { #name: #type_name };
let slot = match field {
Field::Token { .. } => {
quote! { Some(SyntaxElement::Token(#name)) }
}
Field::Node { .. } => {
quote! { Some(SyntaxElement::Node(#name.into_syntax())) }
}
};
(arg, slot)
})
.unzip();
return quote! {
pub fn #factory_name( #( #args ),* ) -> #type_name {
#type_name::unwrap_cast(SyntaxNode::new_detached(
#syntax_kind::#kind,
[#( #slots ),*],
))
}
};
}
let builder_name = format_ident!("{}Builder", node.name);
let (required_args, required_fields): (Vec<_>, Vec<_>) = required
.into_iter()
.map(|field| {
let name = field.method_name(language_kind);
let type_name = field.ty();
let arg = quote! { #name: #type_name };
let field = quote! { #name };
(arg, field)
})
.unzip();
let (optional_builder, optional_methods): (Vec<_>, Vec<_>) = optional
.into_iter()
.map(|field| {
let name = field.method_name(language_kind);
let method_name = format_ident!("with_{}", name);
let type_name = field.ty();
let field_type = quote! { #name: Option<#type_name> };
let field_init = quote! { #name: None };
let method = quote! {
pub fn #method_name(mut self, #name: #type_name) -> Self {
self.#name = Some(#name);
self
}
};
((field_type, field_init), method)
})
.unzip();
let (optional_fields, optional_inits): (Vec<_>, Vec<_>) =
optional_builder.into_iter().unzip();
let slots: Vec<_> = node
.fields
.iter()
.map(|field| {
let name = field.method_name(language_kind);
match field {
Field::Token { optional, .. } => if *optional {
quote! { self.#name.map(|token| SyntaxElement::Token(token)) }
} else {
quote! { Some(SyntaxElement::Token(self.#name)) }
}
Field::Node { optional, .. } => if *optional {
quote! { self.#name.map(|token| SyntaxElement::Node(token.into_syntax())) }
} else {
quote! { Some(SyntaxElement::Node(self.#name.into_syntax())) }
}
}
})
.collect();
quote! {
pub fn #factory_name( #( #required_args ),* ) -> #builder_name {
#builder_name {
#( #required_fields, )*
#( #optional_inits, )*
}
}
pub struct #builder_name {
#( #required_args, )*
#( #optional_fields, )*
}
impl #builder_name {
#( #optional_methods )*
pub fn build(self) -> #type_name {
#type_name::unwrap_cast(SyntaxNode::new_detached(
#syntax_kind::#kind,
[#( #slots ),*],
))
}
}
}
});
let lists = ast.lists().map(|(name, list)| {
let list_name = format_ident!("{}", name);
let kind = format_ident!("{}", to_upper_snake_case(name));
let factory_name = format_ident!("{}", to_lower_snake_case(name));
let item = format_ident!("{}", list.element_name);
if list.separator.is_some() {
quote! {
pub fn #factory_name<I, S>(items: I, separators: S) -> #list_name
where
I: IntoIterator<Item = #item>,
I::IntoIter: ExactSizeIterator,
S: IntoIterator<Item = #syntax_token>,
S::IntoIter: ExactSizeIterator,
{
let mut items = items.into_iter();
let mut separators = separators.into_iter();
let length = items.len() + separators.len();
#list_name::unwrap_cast(SyntaxNode::new_detached(
#syntax_kind::#kind,
(0..length).map(|index| {
if index % 2 == 0 {
Some(items.next()?.into_syntax().into())
} else {
Some(separators.next()?.into())
}
}),
))
}
}
} else {
quote! {
pub fn #factory_name<I>(items: I) -> #list_name
where
I: IntoIterator<Item = #item>,
I::IntoIter: ExactSizeIterator,
{
#list_name::unwrap_cast(SyntaxNode::new_detached(
#syntax_kind::#kind,
items
.into_iter()
.map(|item| Some(item.into_syntax().into())),
))
}
}
}
});
let bogus = ast.bogus.iter().map(|name| {
let bogus_name = format_ident!("{}", name);
let kind = format_ident!("{}", to_upper_snake_case(name));
let factory_name = format_ident!("{}", to_lower_snake_case(name));
quote! {
pub fn #factory_name<I>(slots: I) -> #bogus_name
where
I: IntoIterator<Item = Option<SyntaxElement>>,
I::IntoIter: ExactSizeIterator,
{
#bogus_name::unwrap_cast(SyntaxNode::new_detached(
#syntax_kind::#kind,
slots
))
}
}
});
let output = quote! {
#![allow(clippy::redundant_closure)]
#![allow(clippy::too_many_arguments)]
use #syntax_crate::{*, #syntax_token as SyntaxToken, #syntax_node as SyntaxNode, #syntax_element as SyntaxElement};
use rome_rowan::AstNode;
#(#nodes)*
#(#lists)*
#(#bogus)*
};
let pretty = xtask::reformat(output)?;
Ok(pretty)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_new_lintrule.rs | xtask/codegen/src/generate_new_lintrule.rs | use std::{path::PathBuf, str::FromStr};
use case::CaseExt;
pub fn generate_new_lintrule(path: &str, rule_name: &str) {
let rule_folder = PathBuf::from_str(path).unwrap();
match rule_folder.file_stem().and_then(|x| x.to_str()) {
Some("nursery") => {}
_ => {
panic!("all new rules must be at a nursery folder");
}
}
let rule_name_upper_camel = rule_name.to_camel();
let rule_name_snake = rule_name.to_snake();
let rule_name_lower_camel = rule_name_snake.to_camel_lowercase();
// Generate rule code
let code = format!(
r#"use crate::semantic_services::Semantic;
use rome_analyze::{{
context::RuleContext, declare_rule, Rule, RuleDiagnostic,
}};
use rome_console::markup;
use rome_js_semantic::{{Reference, ReferencesExtensions}};
use rome_js_syntax::JsIdentifierBinding;
declare_rule! {{
/// Succinct description of the rule.
///
/// Put context and details about the rule.
/// As a starting point, you can take the description of the corresponding _ESLint_ rule (if any).
///
/// Try to stay consistent with the descriptions of implemented rules.
///
/// Add a link to the corresponding ESLint rule (if any):
///
/// Source: https://eslint.org/docs/latest/rules/rule-name
///
/// ## Examples
///
/// ### Invalid
///
/// ```js,expect_diagnostic
/// var a = 1;
/// a = 2;
/// ```
///
/// ## Valid
///
/// ```js
/// var a = 1;
/// ```
///
pub(crate) {rule_name_upper_camel} {{
version: "next",
name: "{rule_name_lower_camel}",
recommended: false,
}}
}}
impl Rule for {rule_name_upper_camel} {{
type Query = Semantic<JsIdentifierBinding>;
type State = Reference;
type Signals = Vec<Self::State>;
type Options = ();
fn run(ctx: &RuleContext<Self>) -> Self::Signals {{
let binding = ctx.query();
let model = ctx.model();
binding.all_references(model).collect()
}}
fn diagnostic(_: &RuleContext<Self>, reference: &Self::State) -> Option<RuleDiagnostic> {{
Some(
RuleDiagnostic::new(
rule_category!(),
reference.range(),
markup! {{
"Variable is read here."
}},
)
.note(markup! {{
"This note will give you more information."
}}),
)
}}
}}
"#
);
let file_name = format!("{path}/{rule_name_snake}.rs");
std::fs::write(file_name, code).unwrap();
let categories_path = "crates/rome_diagnostics_categories/src/categories.rs";
let categories = std::fs::read_to_string(categories_path).unwrap();
if !categories.contains(&rule_name_lower_camel) {
// We sort rules to reduce conflicts between contributions made in parallel.
let rule_line = format!(
r#" "lint/nursery/{rule_name_lower_camel}": "https://docs.rome.tools/lint/rules/{rule_name_lower_camel}","#
);
let nursery_start = " // nursery\n";
let nursery_end = "\n // nursery end";
debug_assert!(categories.contains(nursery_start));
debug_assert!(categories.contains(nursery_end));
let nursery_start_index = categories.find(nursery_start).unwrap() + nursery_start.len();
let nursery_end_index = categories.find(nursery_end).unwrap();
let nursery_category = &categories[nursery_start_index..nursery_end_index];
let mut nursery_rules: Vec<&str> = nursery_category
.split('\n')
.chain(Some(&rule_line[..]))
.collect();
nursery_rules.sort();
let new_nursery_category = nursery_rules.join("\n");
let categories = categories.replace(nursery_category, &new_nursery_category);
debug_assert!(categories.contains(&rule_name_lower_camel));
std::fs::write(categories_path, categories).unwrap();
}
// Generate test code
let tests_path = format!("crates/rome_js_analyze/tests/specs/nursery/{rule_name_lower_camel}");
let _ = std::fs::create_dir_all(tests_path);
let test_file =
format!("crates/rome_js_analyze/tests/specs/nursery/{rule_name_lower_camel}/valid.js");
if std::fs::File::open(&test_file).is_err() {
let _ = std::fs::write(
test_file,
"/* should not generate diagnostics */\n\n var a = 1;",
);
}
let test_file =
format!("crates/rome_js_analyze/tests/specs/nursery/{rule_name_lower_camel}/invalid.js");
if std::fs::File::open(&test_file).is_err() {
let _ = std::fs::write(test_file, "\n\n var a = 1;\na = 2;\n a = 3;");
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/parser_tests.rs | xtask/codegen/src/parser_tests.rs | //! Takes comments from rome_js_parser and turns them into test data.
//! This code is derived from rust_analyzer/xtask/codegen/gen_parser_tests
use std::{
collections::HashMap,
fs, mem,
path::{Path, PathBuf},
};
use crate::{update, Mode};
use xtask::{project_root, Result};
fn extract_comment_blocks(
text: &str,
allow_blocks_with_empty_lines: bool,
) -> Vec<(usize, Vec<String>)> {
let mut res = Vec::new();
let prefix = "// ";
let lines = text.lines().map(str::trim_start);
let mut block = (0, vec![]);
for (line_num, line) in lines.enumerate() {
if line == "//" && allow_blocks_with_empty_lines {
block.1.push(String::new());
continue;
}
let is_comment = line.starts_with(prefix);
if is_comment {
block.1.push(line[prefix.len()..].to_string());
} else {
if !block.1.is_empty() {
res.push(mem::take(&mut block));
}
block.0 = line_num + 2;
}
}
if !block.1.is_empty() {
res.push(block)
}
res
}
pub fn generate_parser_tests(mode: Mode) -> Result<()> {
let tests = tests_from_dir(&project_root().join(Path::new("crates/rome_js_parser/src")))?;
fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<bool> {
let tests_dir = project_root().join(into);
if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?;
}
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
let existing = existing_tests(&tests_dir, true)?;
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
panic!("Test is deleted: '{}'", t);
}
let mut some_file_was_updated = false;
for (name, test) in tests {
let path = match existing.get(name) {
Some((path, _test)) => path.clone(),
None => tests_dir
.join(name)
.with_extension(test.language.extension()),
};
if let crate::UpdateResult::Updated = update(&path, &test.text, &mode)? {
some_file_was_updated = true;
}
if let Some(options) = &test.options {
let path = tests_dir.join(name).with_extension("options.json");
if let crate::UpdateResult::Updated = update(&path, options, &mode)? {
some_file_was_updated = true;
}
}
}
Ok(some_file_was_updated)
}
let mut some_file_was_updated = false;
some_file_was_updated |=
install_tests(&tests.ok, "crates/rome_js_parser/test_data/inline/ok", mode)?;
some_file_was_updated |= install_tests(
&tests.err,
"crates/rome_js_parser/test_data/inline/err",
mode,
)?;
if some_file_was_updated {
let _ = filetime::set_file_mtime(
"crates/rome_js_parser/src/tests.rs",
filetime::FileTime::now(),
);
}
Ok(())
}
#[derive(Debug)]
struct Test {
pub name: String,
pub text: String,
pub ok: bool,
pub language: Language,
pub options: Option<String>,
}
#[derive(Debug)]
enum Language {
JavaScript,
TypeScript,
TypeScriptDefinition,
Jsx,
Tsx,
}
impl Language {
const fn extension(&self) -> &'static str {
match self {
Language::JavaScript => "js",
Language::TypeScript => "ts",
Language::TypeScriptDefinition => "d.ts",
Language::Jsx => "jsx",
Language::Tsx => "tsx",
}
}
fn from_file_name(name: &str) -> Option<Language> {
let language = match name.rsplit_once('.')? {
(_, "js") => Language::JavaScript,
(rest, "ts") => match rest.rsplit_once('.') {
Some((_, "d")) => Language::TypeScriptDefinition,
_ => Language::TypeScript,
},
(_, "jsx") => Language::Jsx,
(_, "tsx") => Language::Tsx,
_ => {
return None;
}
};
Some(language)
}
}
#[derive(Default, Debug)]
struct Tests {
pub ok: HashMap<String, Test>,
pub err: HashMap<String, Test>,
}
fn collect_tests(s: &str) -> Vec<Test> {
let mut res = Vec::new();
for comment_block in extract_comment_blocks(s, false).into_iter().map(|(_, x)| x) {
let first_line = &comment_block[0];
let (ok, suffix) = match first_line.split_once(' ') {
Some(("test", suffix)) => (true, suffix),
Some(("test_err", suffix)) => (false, suffix),
_ => continue,
};
let (language, suffix) = match suffix.split_once(' ') {
Some(("jsx", suffix)) => (Language::Jsx, suffix),
Some(("js", suffix)) => (Language::JavaScript, suffix),
Some(("ts", suffix)) => (Language::TypeScript, suffix),
Some(("d.ts", suffix)) => (Language::TypeScriptDefinition, suffix),
Some(("tsx", suffix)) => (Language::Tsx, suffix),
Some((_, suffix)) => (Language::JavaScript, suffix),
_ => panic!("wrong test configuration: {:?}", suffix),
};
let (name, options) = match suffix.split_once(' ') {
Some((name, options)) => (name, Some(options.to_string())),
_ => (suffix, None),
};
let text: String = comment_block[1..]
.iter()
.cloned()
.chain([String::new()])
.collect::<Vec<_>>()
.join("\n");
assert!(!text.trim().is_empty() && text.ends_with('\n'));
res.push(Test {
name: name.to_string(),
options,
text,
ok,
language,
})
}
res
}
fn tests_from_dir(dir: &Path) -> Result<Tests> {
let mut res = Tests::default();
for entry in ::walkdir::WalkDir::new(dir) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
if entry.path().extension().unwrap_or_default() != "rs" {
continue;
}
process_file(&mut res, entry.path())?;
}
return Ok(res);
fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
let text = fs::read_to_string(path)?;
for test in collect_tests(&text) {
if test.ok {
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
}
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
}
}
Ok(())
}
}
fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
let mut res = HashMap::new();
for file in fs::read_dir(dir)? {
let path = file?.path();
let language = path
.extension()
.and_then(|ext| ext.to_str())
.and_then(Language::from_file_name);
if let Some(language) = language {
let name = path
.file_stem()
.map(|x| x.to_string_lossy().to_string())
.unwrap();
let text = fs::read_to_string(&path)?;
let test = Test {
name: name.clone(),
options: None,
text,
ok,
language,
};
if let Some(old) = res.insert(name, (path, test)) {
println!("Duplicate test: {:?}", old);
}
}
}
Ok(res)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/json_kinds_src.rs | xtask/codegen/src/json_kinds_src.rs | use crate::kinds_src::KindsSrc;
pub const JSON_KINDS_SRC: KindsSrc = KindsSrc {
punct: &[
(":", "COLON"),
(",", "COMMA"),
("(", "L_PAREN"),
(")", "R_PAREN"),
("{", "L_CURLY"),
("}", "R_CURLY"),
("[", "L_BRACK"),
("]", "R_BRACK"),
],
keywords: &["null", "true", "false"],
literals: &["JSON_STRING_LITERAL", "JSON_NUMBER_LITERAL"],
// keep comment token for json 5 extension
tokens: &[
"ERROR_TOKEN",
"NEWLINE",
"WHITESPACE",
"IDENT",
"COMMENT",
"MULTILINE_COMMENT",
],
nodes: &[
"JSON_ROOT",
"JSON_NUMBER_VALUE",
"JSON_STRING_VALUE",
"JSON_BOOLEAN_VALUE",
"JSON_NULL_VALUE",
"JSON_ARRAY_VALUE",
"JSON_OBJECT_VALUE",
"JSON_MEMBER_LIST",
"JSON_MEMBER",
"JSON_MEMBER_NAME",
"JSON_ARRAY_ELEMENT_LIST",
// Bogus nodes
"JSON_BOGUS",
"JSON_BOGUS_VALUE",
],
};
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_syntax_factory.rs | xtask/codegen/src/generate_syntax_factory.rs | use super::kinds_src::AstSrc;
use crate::generate_nodes::token_kind_to_code;
use crate::kinds_src::TokenKind;
use crate::{kinds_src::Field, to_upper_snake_case, LanguageKind};
use quote::{format_ident, quote};
use xtask::Result;
pub fn generate_syntax_factory(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {
let (syntax_crate, syntax_kind, factory_kind) = match language_kind {
LanguageKind::Js => (
quote! { rome_js_syntax },
quote! { JsSyntaxKind },
quote! { JsSyntaxFactory },
),
LanguageKind::Css => (
quote! { rome_css_syntax },
quote! { CssSyntaxKind },
quote! { CssSyntaxFactory },
),
LanguageKind::Json => (
quote! { rome_json_syntax },
quote! { JsonSyntaxKind },
quote! { JsonSyntaxFactory },
),
};
let normal_node_arms = ast.nodes.iter().map(|node| {
let kind = format_ident!("{}", to_upper_snake_case(&node.name));
let expected_len = node.fields.len();
let fields = node.fields.iter().map(|field| {
let field_predicate = match field {
Field::Node { ty, .. } => {
let ast_type_name = format_ident!("{}", ty);
quote! {
#ast_type_name::can_cast(element.kind())
}
}
Field::Token { kind, .. } => match kind {
TokenKind::Single(expected) => {
let expected_kind = token_kind_to_code(expected, language_kind);
quote! { element.kind() == #expected_kind}
}
TokenKind::Many(expected) => {
let expected_kinds = expected
.iter()
.map(|kind| token_kind_to_code(kind, language_kind));
quote! {
matches!(element.kind(), #(#expected_kinds)|*)
}
}
},
};
quote! {
if let Some(element) = ¤t_element {
if #field_predicate {
slots.mark_present();
current_element = elements.next();
}
}
slots.next_slot();
}
});
quote! {
#kind => {
let mut elements = (&children).into_iter();
let mut slots: RawNodeSlots<#expected_len> = RawNodeSlots::default();
let mut current_element = elements.next();
#(#fields)*
// Additional unexpected elements
if current_element.is_some() {
return RawSyntaxNode::new(
#kind.to_bogus(),
children.into_iter().map(Some),
);
}
slots.into_node(#kind, children)
}
}
});
let lists = ast.lists().map(|(name, data)| {
let element_type = format_ident!("{}", data.element_name);
let kind = format_ident!("{}", to_upper_snake_case(name));
if let Some(separator) = &data.separator {
let allow_trailing = separator.allow_trailing;
let separator_kind = token_kind_to_code(&separator.separator_token, language_kind);
quote! {
#kind => Self::make_separated_list_syntax(kind, children, #element_type::can_cast, #separator_kind, #allow_trailing)
}
} else {
quote! {
#kind => Self::make_node_list_syntax(kind, children, #element_type::can_cast)
}
}
});
let bogus_kinds = ast
.bogus
.iter()
.map(|node| format_ident!("{}", to_upper_snake_case(node)));
let output = quote! {
use #syntax_crate::{*, #syntax_kind, #syntax_kind::*, T};
use rome_rowan::{AstNode, ParsedChildren, RawNodeSlots, RawSyntaxNode, SyntaxFactory, SyntaxKind};
#[derive(Debug)]
pub struct #factory_kind;
impl SyntaxFactory for #factory_kind {
type Kind = #syntax_kind;
#[allow(unused_mut)]
fn make_syntax(
kind: Self::Kind,
children: ParsedChildren<Self::Kind>,
) -> RawSyntaxNode<Self::Kind>
{
match kind {
#(#bogus_kinds)|* => {
RawSyntaxNode::new(kind, children.into_iter().map(Some))
},
#(#normal_node_arms),*,
#(#lists),*,
_ => unreachable!("Is {:?} a token?", kind),
}
}
}
};
let pretty = xtask::reformat(output)?;
Ok(pretty)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_syntax_kinds.rs | xtask/codegen/src/generate_syntax_kinds.rs | use crate::{to_upper_snake_case, LanguageKind, Result};
use proc_macro2::{Literal, Punct, Spacing};
use quote::{format_ident, quote};
use super::kinds_src::KindsSrc;
pub fn generate_syntax_kinds(grammar: KindsSrc, language_kind: LanguageKind) -> Result<String> {
let syntax_kind = language_kind.syntax_kind();
let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
// These tokens, when parsed to proc_macro2::TokenStream, generates a stream of bytes
// that can't be recognized by [quote].
// Hence, they need to be thread differently
if "{}[]()`".contains(token) {
let c = token.chars().next().unwrap();
quote! { #c }
} else if *token == "$=" {
let token = Literal::string(token);
quote! { #token }
} else {
let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
quote! { #(#cs)* }
}
});
let punctuation_strings = grammar.punct.iter().map(|(token, _name)| token);
let punctuation = grammar
.punct
.iter()
.map(|(_token, name)| format_ident!("{}", name))
.collect::<Vec<_>>();
let full_keywords_values = &grammar.keywords;
let full_keywords = full_keywords_values
.iter()
.map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)))
.collect::<Vec<_>>();
let all_keywords_values = grammar.keywords.to_vec();
let all_keywords_idents = all_keywords_values
.iter()
.map(|kw| format_ident!("{}", kw))
.collect::<Vec<_>>();
let all_keywords = all_keywords_values
.iter()
.map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
.collect::<Vec<_>>();
let all_keyword_strings = all_keywords_values.iter().map(|name| name.to_string());
let literals = grammar
.literals
.iter()
.map(|name| format_ident!("{}", name))
.collect::<Vec<_>>();
let tokens = grammar
.tokens
.iter()
.map(|name| format_ident!("{}", name))
.collect::<Vec<_>>();
let nodes = grammar
.nodes
.iter()
.map(|name| format_ident!("{}", name))
.collect::<Vec<_>>();
let lists = grammar
.nodes
.iter()
.filter_map(|name| {
if name.ends_with("_LIST") {
Some(format_ident!("{}", name))
} else {
None
}
})
.collect::<Vec<_>>();
let syntax_kind_impl = match language_kind {
LanguageKind::Js => {
quote! {
pub const fn to_string(&self) -> Option<&'static str> {
let tok = match self {
#(#punctuation => #punctuation_strings,)*
#(#all_keywords => #all_keyword_strings,)*
JS_STRING_LITERAL => "string literal",
_ => return None,
};
Some(tok)
}
}
}
LanguageKind::Css => {
quote! {
pub const fn to_string(&self) -> Option<&'static str> {
let tok = match self {
#(#punctuation => #punctuation_strings,)*
#(#all_keywords => #all_keyword_strings,)*
CSS_STRING_LITERAL => "string literal",
_ => return None,
};
Some(tok)
}
}
}
LanguageKind::Json => {
quote! {
pub const fn to_string(&self) -> Option<&'static str> {
let tok = match self {
#(#punctuation => #punctuation_strings,)*
#(#all_keywords => #all_keyword_strings,)*
JSON_STRING_LITERAL => "string literal",
_ => return None,
};
Some(tok)
}
}
}
};
let ast = quote! {
#![allow(clippy::all)]
#![allow(bad_style, missing_docs, unreachable_pub)]
/// The kind of syntax node, e.g. `IDENT`, `FUNCTION_KW`, or `FOR_STMT`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[repr(u16)]
pub enum #syntax_kind {
// Technical SyntaxKinds: they appear temporally during parsing,
// but never end up in the final tree
#[doc(hidden)]
TOMBSTONE,
/// Marks the end of the file.May have trivia attached
EOF,
#(#punctuation,)*
#(#all_keywords,)*
#(#literals,)*
#(#tokens,)*
#(#nodes,)*
// Technical kind so that we can cast from u16 safely
#[doc(hidden)]
__LAST,
}
use self::#syntax_kind::*;
impl #syntax_kind {
pub const fn is_punct(self) -> bool {
match self {
#(#punctuation)|* => true,
_ => false,
}
}
pub const fn is_literal(self) -> bool {
match self {
#(#literals)|* => true,
_ => false,
}
}
pub const fn is_list(self) -> bool {
match self {
#(#lists)|* => true,
_ => false,
}
}
pub fn from_keyword(ident: &str) -> Option<#syntax_kind> {
let kw = match ident {
#(#full_keywords_values => #full_keywords,)*
_ => return None,
};
Some(kw)
}
#syntax_kind_impl
}
/// Utility macro for creating a SyntaxKind through simple macro syntax
#[macro_export]
macro_rules! T {
#([#punctuation_values] => { $crate::#syntax_kind::#punctuation };)*
#([#all_keywords_idents] => { $crate::#syntax_kind::#all_keywords };)*
[ident] => { $crate::#syntax_kind::IDENT };
[EOF] => { $crate::#syntax_kind::EOF };
[#] => { $crate::#syntax_kind::HASH };
}
};
xtask::reformat(ast)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_nodes_mut.rs | xtask/codegen/src/generate_nodes_mut.rs | use crate::kinds_src::{AstSrc, Field};
use crate::LanguageKind;
use quote::{format_ident, quote};
use xtask::Result;
pub fn generate_nodes_mut(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {
let node_boilerplate_impls: Vec<_> = ast
.nodes
.iter()
.map(|node| {
let name = format_ident!("{}", node.name);
let methods: Vec<_> = node
.fields
.iter()
.enumerate()
.map(|(index, field)| {
let method_name = format_ident!("with_{}", field.method_name(language_kind));
let type_name = field.ty();
let element = match field {
Field::Token { .. } => {
quote! { element }
}
Field::Node { .. } => {
quote! { element.into_syntax() }
}
};
let element = quote! { #element.into() };
let (arg_type, element) = if field.is_optional() {
(
quote! { Option<#type_name> },
quote! { element.map(|element| #element) },
)
} else {
(quote! { #type_name }, quote! { Some(#element) })
};
quote! {
pub fn #method_name(self, element: #arg_type) -> Self {
Self::unwrap_cast(self.syntax.splice_slots(#index..=#index, once(#element)))
}
}
})
.collect();
quote! {
impl #name {
#(#methods)*
}
}
})
.collect();
let syntax_token = language_kind.syntax_token();
let ast = quote! {
use std::iter::once;
use rome_rowan::AstNode;
use crate::{generated::nodes::*, #syntax_token as SyntaxToken};
#(#node_boilerplate_impls)*
};
let ast = ast
.to_string()
.replace("T ! [ ", "T![")
.replace(" ] )", "])");
let pretty = xtask::reformat(ast)?;
Ok(pretty)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/css_kinds_src.rs | xtask/codegen/src/css_kinds_src.rs | use crate::kinds_src::KindsSrc;
pub const CSS_KINDS_SRC: KindsSrc = KindsSrc {
punct: &[
(";", "SEMICOLON"),
(",", "COMMA"),
("(", "L_PAREN"),
(")", "R_PAREN"),
("{", "L_CURLY"),
("}", "R_CURLY"),
("[", "L_BRACK"),
("]", "R_BRACK"),
("<", "L_ANGLE"),
(">", "R_ANGLE"),
("~", "TILDE"),
("#", "HASH"),
("&", "AMP"),
("|", "PIPE"),
("+", "PLUS"),
("*", "STAR"),
("/", "SLASH"),
("^", "CARET"),
("%", "PERCENT"),
(".", "DOT"),
(":", "COLON"),
("=", "EQ"),
("!", "BANG"),
("!=", "NEQ"),
("-", "MINUS"),
("<=", "LTEQ"),
(">=", "GTEQ"),
("+=", "PLUSEQ"),
("|=", "PIPEEQ"),
("&=", "AMPEQ"),
("^=", "CARETEQ"),
("/=", "SLASHEQ"),
("*=", "STAREQ"),
("%=", "PERCENTEQ"),
("@", "AT"),
("$=", "DOLLAR_EQ"),
("~=", "TILDE_EQ"),
("-->", "CDC"),
("<!--", "CDO"),
],
keywords: &[
"aliceblue",
"antiquewhite",
"aqua",
"aquamarine",
"azure",
"beige",
"bisque",
"black",
"blanchedalmond",
"blue",
"blueviolet",
"brown",
"burlywood",
"cadetblue",
"chartreuse",
"chocolate",
"coral",
"cornflowerblue",
"cornsilk",
"crimson",
"cyan",
"darkblue",
"darkcyan",
"darkgoldenrod",
"darkgray",
"darkgreen",
"darkkhaki",
"darkmagenta",
"darkolivegreen",
"darkorange",
"darkorchid",
"darkred",
"darksalmon",
"darkseagreen",
"darkslateblue",
"darkslategray",
"darkturquoise",
"darkviolet",
"deeppink",
"deepskyblue",
"dimgray",
"dodgerblue",
"firebrick",
"floralwhite",
"forestgreen",
"fuchsia",
"gainsboro",
"ghostwhite",
"gold",
"goldenrod",
"gray",
"green",
"greenyellow",
"honeydew",
"hotpink",
"indianred",
"indigo",
"ivory",
"khaki",
"lavender",
"lavenderblush",
"lawngreen",
"lemonchiffon",
"lightblue",
"lightcoral",
"lightcyan",
"lightgoldenrodyellow",
"lightgreen",
"lightgrey",
"lightpink",
"lightsalmon",
"lightseagreen",
"lightskyblue",
"lightslategray",
"lightsteelblue",
"lightyellow",
"lime",
"limegreen",
"linen",
"magenta",
"maroon",
"mediumaquamarine",
"mediumblue",
"mediumorchid",
"mediumpurple",
"mediumseagreen",
"mediumslateblue",
"mediumspringgreen",
"mediumturquoise",
"mediumvioletred",
"midnightblue",
"mintcream",
"mistyrose",
"moccasin",
"navajowhite",
"navy",
"navyblue",
"oldlace",
"olive",
"olivedrab",
"orange",
"orangered",
"orchid",
"palegoldenrod",
"palegreen",
"paleturquoise",
"palevioletred",
"papayawhip",
"peachpuff",
"peru",
"pink",
"plum",
"powderblue",
"purple",
"red",
"rosybrown",
"royalblue",
"saddlebrown",
"salmon",
"sandybrown",
"seagreen",
"seashell",
"sienna",
"silver",
"skyblue",
"slateblue",
"slategray",
"snow",
"springgreen",
"steelblue",
"tan",
"teal",
"thistle",
"tomato",
"turquoise",
"violet",
"wheat",
"white",
"whitesmoke",
"yellow",
"yellowgreen",
"media",
"keyframes",
"not",
"and",
"only",
"or",
"i",
"important",
"from",
"to",
"var",
],
literals: &[
"CSS_STRING_LITERAL",
"CSS_NUMBER_LITERAL",
"CSS_CUSTOM_PROPERTY",
"CSS_SPACE_LITERAL",
],
tokens: &[
"ERROR_TOKEN",
"IDENT",
"NEWLINE",
"WHITESPACE",
"COMMENT",
"MULTILINE_COMMENT",
],
nodes: &[
"CSS_ROOT",
"CSS_RULE_LIST",
"CSS_ID_SELECTOR_PATTERN",
"CSS_RULE",
"CSS_SELECTOR_LIST",
"CSS_SELECTOR",
"CSS_ANY_FUNCTION",
"CSS_AT_KEYFRAMES",
"CSS_AT_KEYFRAMES_BODY",
"CSS_AT_MEDIA",
"CSS_AT_MEDIA_QUERY",
"CSS_AT_MEDIA_QUERY_CONSEQUENT",
"CSS_AT_MEDIA_QUERY_FEATURE",
"CSS_AT_MEDIA_QUERY_FEATURE_BOOLEAN",
"CSS_AT_MEDIA_QUERY_FEATURE_COMPARE",
"CSS_AT_MEDIA_QUERY_FEATURE_PLAIN",
"CSS_AT_MEDIA_QUERY_FEATURE_RANGE",
"CSS_AT_MEDIA_QUERY_RANGE",
"CSS_ATTRIBUTE",
"CSS_ATTRIBUTE_MATCHER",
"CSS_ATTRIBUTE_META",
"CSS_ATTRIBUTE_MODIFIER",
"CSS_ATTRIBUTE_NAME",
"CSS_ATTRIBUTE_SELECTOR_PATTERN",
"CSS_BLOCK",
"CSS_CLASS_SELECTOR_PATTERN",
"CSS_COMBINATOR_SELECTOR_PATTERN",
"CSS_DECLARATION",
"CSS_DIMENSION",
"CSS_IDENTIFIER",
"CSS_KEYFRAMES_BLOCK",
"CSS_KEYFRAMES_SELECTOR",
"CSS_NUMBER",
"CSS_PARAMETER",
"CSS_PERCENTAGE",
"CSS_PSEUDO_CLASS_SELECTOR_PATTERN",
"CSS_PSEUDO_CLASS_SELECTOR_PATTERN_PARAMETERS",
"CSS_RATIO",
"CSS_SIMPLE_FUNCTION",
"CSS_STRING",
"CSS_TYPE_SELECTOR_PATTERN",
"CSS_UNIVERSAL_SELECTOR_PATTERN",
"CSS_VAR_FUNCTION",
"CSS_VAR_FUNCTION_VALUE",
"CSS_ANY_SELECTOR_PATTERN_LIST",
"CSS_AT_KEYFRAMES_ITEM_LIST",
"CSS_AT_MEDIA_QUERY_LIST",
"CSS_ATTRIBUTE_LIST",
"CSS_DECLARATION_LIST",
"CSS_KEYFRAMES_SELECTOR_LIST",
"CSS_PARAMETER_LIST",
"CSS_DECLARATION_IMPORTANT",
// Bogs nodes
"CSS_BOGUS",
],
};
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/unicode.rs | xtask/codegen/src/unicode.rs | use std::path::PathBuf;
use anyhow::{Context, Ok, Result};
use quote::quote;
mod paths {
pub const DERIVED_CORE_PROPERTIES: &str = "target/DerivedCoreProperties.txt";
pub const TABLES: &str = "crates/rome_js_unicode_table/src/tables.rs";
}
pub fn generate_tables() -> Result<()> {
let properties = Properties::cached_or_fetch()?;
let properties = ["ID_Continue", "ID_Start"]
.iter()
.map(|property| {
let ranges = properties
.extract(property)?
.into_iter()
.map(|(start, end)| quote! { (#start, #end) })
.collect::<Vec<_>>();
let fn_ident = quote::format_ident!("{}", property);
let table_ident = quote::format_ident!("{}_table", property);
Ok(quote! {
pub const #table_ident: &[(char, char)] = &[
#(#ranges),*
];
pub fn #fn_ident(c: char) -> bool { super::bsearch_range_table(c, #table_ident) }
})
})
.collect::<Result<Vec<_>, _>>()?;
let tokens = quote! {
//! Autogenerated file, do not edit by hand.
//! Run `cargo codegen unicode` and recommit this file when Unicode support has changed.
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]
fn bsearch_range_table(c: char, r: &[(char,char)]) -> bool {
use core::cmp::Ordering::{Equal, Less, Greater};
r.binary_search_by(|&(lo,hi)| {
// Because ASCII ranges are at the start of the tables, a search for an
// ASCII char will involve more `Greater` results (i.e. the `(lo,hi)`
// table entry is greater than `c`) than `Less` results. And given that
// ASCII chars are so common, it makes sense to favor them. Therefore,
// the `Greater` case is tested for before the `Less` case.
if lo > c { Greater }
else if hi < c { Less }
else { Equal }
}).is_ok()
}
pub mod derived_property {
#(#properties)*
}
};
let pretty = xtask::reformat(tokens)?;
std::fs::write(xtask::project_root().join(paths::TABLES), pretty)?;
Ok(())
}
struct Properties {
raw: String,
}
impl Properties {
/// Retrieve properties from cache, or from the unicode website if cache is missing.
/// # Errors
/// Return an error if reading cache or fetching fresh data from the unicode website fails.
pub fn cached_or_fetch() -> Result<Self> {
Self::from_cache().or_else(|_| {
let fetched = Self::fetch()?;
fetched.save_cache()?;
Ok(fetched)
})
}
fn path() -> PathBuf {
xtask::project_root().join(paths::DERIVED_CORE_PROPERTIES)
}
/// Retrieve properties from the unicode website.
/// # Errors
/// Return an error if the HTTP request fails.
fn fetch() -> Result<Self> {
let raw = ureq::get("http://www.unicode.org/Public/UNIDATA/DerivedCoreProperties.txt")
.call()?
.into_string()?;
println!("Loaded properties from `unicode.org`");
Ok(Self { raw })
}
/// Retrieve properties from cache, see [`Self::save_cache`].
/// # Errors
/// Return an error if the cache file couldn't be read.
fn from_cache() -> Result<Self> {
let path = Self::path();
let raw = std::fs::read_to_string(&path)?;
println!("Loaded properties from cache ({})", path.display());
Ok(Self { raw })
}
/// Save the properties on disk for later usage, see [`Self::from_cache`].
/// # Errors
/// Return an error if saving to disk fails.
fn save_cache(&self) -> Result<()> {
let path = Self::path();
std::fs::write(&path, &self.raw)?;
println!("Saved properties to cache ({})", path.display());
Ok(())
}
/// Extract code points matching the specified `property`.
/// The returned `Vec` is sorted and sequential code points are collapsed.
/// # Errors
/// Return an error if parsing fails.
pub fn extract(&self, property: &str) -> Result<Vec<(char, char)>> {
fn parse_code_point(input: &str) -> Result<char> {
char::from_u32(
u32::from_str_radix(input, 16)
.with_context(|| format!("unable to parse `{}` as a code point", &input))?,
)
.ok_or_else(|| anyhow::anyhow!("invalid char `{}`", &input))
}
self.raw
.lines()
.filter_map(|line| {
// Discard comments.
let line = line.split('#').next()?;
// https://www.unicode.org/reports/tr44/#Data_Fields
let mut fields = line.split(';');
// First field is a code point or range.
let code_point_or_range = fields.next()?.trim();
// Check if the remaining fields contains the property we are looking for.
if fields.any(|field| field.trim() == property) {
Some(code_point_or_range)
} else {
None
}
})
.try_fold(
Vec::<(char, char)>::new(),
|mut buffer, code_point_or_range| {
let range @ (start, end) = match code_point_or_range.split_once("..") {
Some((start, end)) => (parse_code_point(start)?, parse_code_point(end)?),
None => {
let code_point = parse_code_point(code_point_or_range)?;
(code_point, code_point)
}
};
if let Some((_, previous_end)) = buffer.last_mut() {
assert!(*previous_end < start, "need sorted table for binary search");
// If the ranges are continuous, collapse them.
if (*previous_end as u32) + 1 == (start as u32) {
*previous_end = end;
return Ok(buffer);
}
}
buffer.push(range);
Ok(buffer)
},
)
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/main.rs | xtask/codegen/src/main.rs | #[cfg(feature = "schema")]
mod generate_bindings;
#[cfg(feature = "configuration")]
mod generate_configuration;
mod generate_new_lintrule;
#[cfg(feature = "schema")]
mod generate_schema;
#[cfg(feature = "website")]
mod generate_website;
mod promote_rule;
use pico_args::Arguments;
use xtask::{project_root, pushd, Mode, Result};
#[cfg(feature = "aria")]
use crate::generate_aria::generate_aria;
#[cfg(feature = "schema")]
use crate::generate_bindings::generate_workspace_bindings;
#[cfg(feature = "configuration")]
use crate::generate_configuration::generate_rules_configuration;
#[cfg(feature = "schema")]
use crate::generate_schema::generate_configuration_schema;
#[cfg(feature = "website")]
use crate::generate_website::generate_files;
use crate::promote_rule::promote_rule;
use generate_new_lintrule::*;
use xtask_codegen::{
generate_analyzer, generate_ast, generate_formatters, generate_parser_tests, generate_tables,
};
fn main() -> Result<()> {
let _d = pushd(project_root());
let mut args = Arguments::from_env();
let command = args.subcommand()?.unwrap_or_default();
match command.as_str() {
"grammar" => {
generate_grammar(args);
Ok(())
}
"formatter" => {
generate_formatters();
Ok(())
}
"test" => {
generate_parser_tests(Mode::Overwrite)?;
Ok(())
}
"unicode" => {
generate_tables()?;
Ok(())
}
"analyzer" => {
generate_analyzer()?;
Ok(())
}
"newlintrule" => {
let path: String = args.value_from_str("--path").unwrap();
let rule_name: String = args.value_from_str("--name").unwrap();
generate_new_lintrule(&path, &rule_name);
Ok(())
}
"promoterule" => {
let rule: String = args.value_from_str("--rule").unwrap();
let group: String = args.value_from_str("--group").unwrap();
promote_rule(&rule, &group);
Ok(())
}
#[cfg(feature = "configuration")]
"configuration" => {
generate_rules_configuration(Mode::Overwrite)?;
Ok(())
}
#[cfg(feature = "schema")]
"schema" => {
generate_configuration_schema(Mode::Overwrite)?;
Ok(())
}
#[cfg(feature = "schema")]
"bindings" => {
generate_workspace_bindings(Mode::Overwrite)?;
Ok(())
}
#[cfg(feature = "website")]
"website" => {
generate_files()?;
Ok(())
}
"all" => {
generate_tables()?;
generate_grammar(args);
generate_parser_tests(Mode::Overwrite)?;
generate_formatters();
generate_analyzer()?;
#[cfg(feature = "website")]
generate_files()?;
#[cfg(feature = "configuration")]
generate_rules_configuration(Mode::Overwrite)?;
#[cfg(feature = "schema")]
generate_configuration_schema(Mode::Overwrite)?;
#[cfg(feature = "schema")]
generate_workspace_bindings(Mode::Overwrite)?;
#[cfg(feature = "aria")]
generate_aria(Mode::Overwrite)?;
Ok(())
}
_ => {
eprintln!(
"\
cargo codegen
Run codegen command.
USAGE:
cargo codegen <SUBCOMMAND> [option]
SUBCOMMANDS:
aria Generate aria bindings for lint rules
analyzer Generate factory functions for the analyzer and the configuration of the analyzers
configuration Generate the part of the configuration that depends on some metadata
schema Generate the JSON schema for the Rome configuration file format
bindings Generate TypeScript definitions for the JavaScript bindings to the Workspace API
grammar Transforms ungram files into AST
formatter Generates formatters for each language
test Extracts parser inline comments into test files
unicode Generates unicode table inside lexer
newlintrule Generates a template for an empty lint rule
all Run all generators
"
);
Ok(())
}
}
}
fn generate_grammar(args: Arguments) {
let arg_list = args.finish();
let language_list = arg_list
.into_iter()
.filter_map(|arg| arg.to_str().map(|item| item.to_string()))
.collect::<Vec<_>>();
let _ = generate_ast(Mode::Overwrite, language_list);
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/formatter.rs | xtask/codegen/src/formatter.rs | use std::{
collections::{BTreeMap, BTreeSet, HashSet, VecDeque},
env,
fs::{create_dir_all, read_dir, remove_file, File},
io::Write,
path::{Path, PathBuf},
};
use git2::{Repository, Status, StatusOptions};
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use xtask::project_root;
use crate::ast::load_ast;
use crate::{LanguageKind, ALL_LANGUAGE_KIND};
struct GitRepo {
repo: Repository,
allow_staged: bool,
staged: HashSet<PathBuf>,
dirty: HashSet<PathBuf>,
}
impl GitRepo {
fn open() -> Self {
let root = project_root();
let repo = Repository::discover(&root).expect("failed to open git repo");
let mut allow_staged = false;
let mut allow_dirty = false;
for arg in env::args() {
match arg.as_str() {
"--allow-staged" => {
allow_staged = true;
}
"--allow-dirty" => {
allow_dirty = true;
}
_ => {}
}
}
let mut repo_opts = StatusOptions::new();
repo_opts.include_ignored(false);
let statuses = repo
.statuses(Some(&mut repo_opts))
.expect("failed to read repository status");
let mut staged = HashSet::new();
let mut dirty = HashSet::new();
for status in statuses.iter() {
if let Some(path) = status.path() {
match status.status() {
Status::CURRENT => (),
Status::INDEX_NEW
| Status::INDEX_MODIFIED
| Status::INDEX_DELETED
| Status::INDEX_RENAMED
| Status::INDEX_TYPECHANGE => {
if !allow_staged {
staged.insert(root.join(path));
}
}
_ => {
if !allow_dirty {
dirty.insert(root.join(path));
}
}
};
}
}
drop(statuses);
Self {
repo,
allow_staged,
staged,
dirty,
}
}
fn check_path(&self, path: &Path) {
if self.dirty.contains(path) {
panic!("Codegen would overwrite '{}' but it has uncommited changes. Commit the file to git, or pass --allow-dirty to the command to proceed anyway", path.display());
}
if self.staged.contains(path) {
panic!("Codegen would overwrite '{}' but it has uncommited changes. Commit the file to git, or pass --allow-staged to the command to proceed anyway", path.display());
}
}
fn stage_paths(&self, paths: &[PathBuf]) {
// Do not overwrite a version of the file
// that's potentially already staged
if self.allow_staged {
return;
}
let root = project_root();
self.repo
.index()
.expect("could not open index for git repository")
.update_all(
paths.iter().map(|path| {
path.strip_prefix(&root).unwrap_or_else(|err| {
panic!(
"path '{}' is not inside of project '{}': {}",
path.display(),
root.display(),
err,
)
})
}),
None,
)
.expect("failed to stage updated files");
}
}
struct ModuleIndex {
root: PathBuf,
modules: BTreeMap<PathBuf, BTreeSet<String>>,
unused_files: HashSet<PathBuf>,
}
impl ModuleIndex {
fn new(root: PathBuf) -> Self {
let mut unused_files = HashSet::new();
let mut queue = VecDeque::from_iter(
NodeDialect::all()
.iter()
.map(|dialect| root.join(dialect.as_str())),
);
while let Some(dir) = queue.pop_front() {
if !dir.exists() {
continue;
}
let iter = read_dir(&dir)
.unwrap_or_else(|err| panic!("failed to read '{}': {}", dir.display(), err));
for entry in iter {
let entry = entry.expect("failed to read DirEntry");
let path = entry.path();
let file_type = entry.file_type().unwrap_or_else(|err| {
panic!("failed to read file type of '{}': {}", path.display(), err)
});
if file_type.is_dir() {
queue.push_back(path);
continue;
}
if file_type.is_file() {
unused_files.insert(path);
}
}
}
Self {
root,
modules: BTreeMap::default(),
unused_files,
}
}
/// Add a new module to the index
fn insert(&mut self, repo: &GitRepo, path: &Path) {
self.unused_files.remove(path);
// Walk up from the module file towards the root
let mut parent = path.parent();
let mut file_stem = path.file_stem();
while let (Some(path), Some(stem)) = (parent, file_stem) {
repo.check_path(&path.join("mod.rs"));
// Insert each module into its parent
let stem = stem.to_str().unwrap().to_owned();
self.modules.entry(path.into()).or_default().insert(stem);
parent = path.parent();
file_stem = path.file_stem();
// Stop at the root directory
if parent == Some(&self.root) {
break;
}
}
}
/// Create all the mod.rs files needed to import
/// all the modules in the index up to the root
fn print(mut self, stage: &mut Vec<PathBuf>) {
for (path, imports) in self.modules {
let mut content = String::new();
let stem = path.file_stem().unwrap().to_str().unwrap();
for import in imports {
// Clippy complains about child modules having the same
// names as their parent, eg. js/name/name.rs
if import == stem {
content.push_str("#[allow(clippy::module_inception)]\n");
}
content.push_str("pub(crate) mod ");
content.push_str(&import);
content.push_str(";\n");
}
let content = xtask::reformat_with_command(content, "cargo codegen formatter").unwrap();
let path = path.join("mod.rs");
let mut file = File::create(&path).unwrap();
file.write_all(content.as_bytes()).unwrap();
drop(file);
self.unused_files.remove(&path);
stage.push(path);
}
for file in self.unused_files {
remove_file(&file)
.unwrap_or_else(|err| panic!("failed to delete '{}': {}", file.display(), err));
stage.push(file);
}
}
}
enum NodeKind {
Node,
List { separated: bool },
Bogus,
Union { variants: Vec<String> },
}
pub fn generate_formatters() {
let repo = GitRepo::open();
for language in ALL_LANGUAGE_KIND {
generate_formatter(&repo, language);
}
}
fn generate_formatter(repo: &GitRepo, language_kind: LanguageKind) {
let ast = load_ast(language_kind);
// Store references to all the files created by the codegen
// script to build the module import files
let formatter_crate_path = project_root()
.join("crates")
.join(language_kind.formatter_crate_name());
if !formatter_crate_path.exists() {
return;
}
let mut modules = ModuleIndex::new(formatter_crate_path.join("src"));
let mut format_impls =
BoilerplateImpls::new(formatter_crate_path.join("src/generated.rs"), language_kind);
// Build an unified iterator over all the AstNode types
let names = ast
.nodes
.into_iter()
.map(|node| (NodeKind::Node, node.name))
.chain(ast.lists.into_iter().map(|(name, node)| {
(
NodeKind::List {
separated: node.separator.is_some(),
},
name,
)
}))
.chain(ast.bogus.into_iter().map(|name| (NodeKind::Bogus, name)))
.chain(ast.unions.into_iter().map(|node| {
(
NodeKind::Union {
variants: node.variants,
},
node.name,
)
}));
let mut stage = Vec::new();
// Create a default implementation for these nodes only if
// the file doesn't already exist
for (kind, name) in names {
let module = name_to_module(&kind, &name, language_kind);
let path = module.as_path();
modules.insert(repo, &path);
let node_id = Ident::new(&name, Span::call_site());
let format_id = Ident::new(&format!("Format{name}"), Span::call_site());
let qualified_format_id = {
let dialect = Ident::new(module.dialect.as_str(), Span::call_site());
let concept = Ident::new(module.concept.as_str(), Span::call_site());
let module = Ident::new(&module.name, Span::call_site());
quote! { crate::#dialect::#concept::#module::#format_id }
};
format_impls.push(&kind, &node_id, &qualified_format_id);
// Union nodes except for AnyFunction and AnyClass have a generated
// implementation, the codegen will always overwrite any existing file
let allow_overwrite = matches!(kind, NodeKind::Union { .. });
if !allow_overwrite && path.exists() {
continue;
}
let dir = path.parent().unwrap();
create_dir_all(dir).unwrap();
repo.check_path(&path);
let syntax_crate_ident = language_kind.syntax_crate_ident();
let formatter_ident = language_kind.formatter_ident();
let formatter_context_ident = language_kind.format_context_ident();
// Generate a default implementation of Format/FormatNode using format_list on
// non-separated lists, format on the wrapped node for unions and
// format_verbatim for all the other nodes
let tokens = match kind {
NodeKind::List { separated: false } => quote! {
use crate::prelude::*;
use #syntax_crate_ident::#node_id;
#[derive(Debug, Clone, Default)]
pub(crate) struct #format_id;
impl FormatRule<#node_id> for #format_id {
type Context = #formatter_context_ident;
fn fmt(&self, node: &#node_id, f: &mut #formatter_ident) -> FormatResult<()> {
f.join().entries(node.iter().formatted()).finish()
}
}
},
NodeKind::List { .. } => quote! {
use crate::prelude::*;
use #syntax_crate_ident::#node_id;
#[derive(Debug, Clone, Default)]
pub(crate) struct #format_id;
impl FormatRule<#node_id> for #format_id {
type Context = #formatter_context_ident;
fn fmt(&self, node: &#node_id, f: &mut #formatter_ident) -> FormatResult<()> {
format_verbatim_node(node.syntax()).fmt(f)
}
}
},
NodeKind::Node => {
quote! {
use crate::prelude::*;
use rome_rowan::AstNode;
use #syntax_crate_ident::#node_id;
#[derive(Debug, Clone, Default)]
pub(crate) struct #format_id;
impl FormatNodeRule<#node_id> for #format_id {
fn fmt_fields(&self, node: &#node_id, f: &mut #formatter_ident) -> FormatResult<()> {
format_verbatim_node(node.syntax()).fmt(f)
}
}
}
}
NodeKind::Bogus => {
quote! {
use crate::FormatBogusNodeRule;
use #syntax_crate_ident::#node_id;
#[derive(Debug, Clone, Default)]
pub(crate) struct #format_id;
impl FormatBogusNodeRule<#node_id> for #format_id {
}
}
}
NodeKind::Union { variants } => {
// For each variant of the union call to_format_element on the wrapped node
let match_arms: Vec<_> = variants
.into_iter()
.map(|variant| {
let variant = Ident::new(&variant, Span::call_site());
quote! { #node_id::#variant(node) => node.format().fmt(f), }
})
.collect();
quote! {
use crate::prelude::*;
use #syntax_crate_ident::#node_id;
#[derive(Debug, Clone, Default)]
pub(crate) struct #format_id;
impl FormatRule<#node_id> for #format_id {
type Context = #formatter_context_ident;
fn fmt(&self, node: &#node_id, f: &mut #formatter_ident) -> FormatResult<()> {
match node {
#( #match_arms )*
}
}
}
}
}
};
let tokens = if allow_overwrite {
xtask::reformat_with_command(tokens, "cargo codegen formatter").unwrap()
} else {
xtask::reformat_without_preamble(tokens).unwrap()
};
let mut file = File::create(&path).unwrap();
file.write_all(tokens.as_bytes()).unwrap();
drop(file);
stage.push(path);
}
modules.print(&mut stage);
format_impls.print(&mut stage);
repo.stage_paths(&stage);
}
struct BoilerplateImpls {
language: LanguageKind,
path: PathBuf,
impls: Vec<TokenStream>,
}
impl BoilerplateImpls {
fn new(file_name: PathBuf, language: LanguageKind) -> Self {
Self {
path: file_name,
impls: vec![],
language,
}
}
fn push(&mut self, kind: &NodeKind, node_id: &Ident, format_id: &TokenStream) {
let syntax_crate_ident = self.language.syntax_crate_ident();
let formatter_ident = self.language.formatter_ident();
let formatter_context_ident = self.language.format_context_ident();
let format_rule_impl = match kind {
NodeKind::List { .. } | NodeKind::Union { .. } => quote!(),
kind => {
let rule = if matches!(kind, NodeKind::Bogus) {
Ident::new("FormatBogusNodeRule", Span::call_site())
} else {
Ident::new("FormatNodeRule", Span::call_site())
};
quote! {
impl FormatRule<#syntax_crate_ident::#node_id> for #format_id {
type Context = #formatter_context_ident;
#[inline(always)]
fn fmt(&self, node: &#syntax_crate_ident::#node_id, f: &mut #formatter_ident) -> FormatResult<()> {
#rule::<#syntax_crate_ident::#node_id>::fmt(self, node, f)
}
}
}
}
};
self.impls.push(quote! {
#format_rule_impl
impl AsFormat<#formatter_context_ident> for #syntax_crate_ident::#node_id {
type Format<'a> = FormatRefWithRule<'a, #syntax_crate_ident::#node_id, #format_id>;
fn format(&self) -> Self::Format<'_> {
FormatRefWithRule::new(self, #format_id::default())
}
}
impl IntoFormat<#formatter_context_ident> for #syntax_crate_ident::#node_id {
type Format = FormatOwnedWithRule<#syntax_crate_ident::#node_id, #format_id>;
fn into_format(self) -> Self::Format {
FormatOwnedWithRule::new(self, #format_id::default())
}
}
});
}
fn print(self, stage: &mut Vec<PathBuf>) {
let impls = self.impls;
let formatter_ident = self.language.formatter_ident();
let formatter_context_ident = self.language.format_context_ident();
let tokens = quote! {
use rome_formatter::{FormatRefWithRule, FormatOwnedWithRule, FormatRule, FormatResult};
use crate::{AsFormat, IntoFormat, FormatNodeRule, FormatBogusNodeRule, #formatter_ident, #formatter_context_ident};
#( #impls )*
};
let content = xtask::reformat_with_command(tokens, "cargo codegen formatter").unwrap();
let mut file = File::create(&self.path).unwrap();
file.write_all(content.as_bytes()).unwrap();
stage.push(self.path);
}
}
enum NodeDialect {
Js,
Ts,
Jsx,
Json,
}
impl NodeDialect {
fn all() -> &'static [NodeDialect] {
&[
NodeDialect::Js,
NodeDialect::Ts,
NodeDialect::Jsx,
NodeDialect::Json,
]
}
fn is_jsx(&self) -> bool {
matches!(self, NodeDialect::Jsx)
}
fn as_str(&self) -> &'static str {
match self {
NodeDialect::Js => "js",
NodeDialect::Ts => "ts",
NodeDialect::Jsx => "jsx",
NodeDialect::Json => "json",
}
}
fn from_str(name: &str) -> NodeDialect {
match name {
"Jsx" => NodeDialect::Jsx,
"Js" => NodeDialect::Js,
"Ts" => NodeDialect::Ts,
"Json" => NodeDialect::Json,
_ => {
eprintln!("missing prefix {}", name);
NodeDialect::Js
}
}
}
}
enum NodeConcept {
Bogus,
List,
Union,
/// - auxiliary (everything else)
Auxiliary,
Expression,
Statement,
Declaration,
Object,
Class,
Assignment,
Binding,
Type,
/// - module (import /export)
Module,
Tag,
Attribute,
// JSON
Value,
}
impl NodeConcept {
fn as_str(&self) -> &'static str {
match self {
NodeConcept::Expression => "expressions",
NodeConcept::Statement => "statements",
NodeConcept::Declaration => "declarations",
NodeConcept::Object => "objects",
NodeConcept::Class => "classes",
NodeConcept::Assignment => "assignments",
NodeConcept::Binding => "bindings",
NodeConcept::Type => "types",
NodeConcept::Module => "module",
NodeConcept::Bogus => "bogus",
NodeConcept::List => "lists",
NodeConcept::Union => "any",
NodeConcept::Tag => "tag",
NodeConcept::Attribute => "attribute",
NodeConcept::Auxiliary => "auxiliary",
NodeConcept::Value => "value",
}
}
}
struct NodeModuleInformation {
language: LanguageKind,
dialect: NodeDialect,
concept: NodeConcept,
name: String,
}
impl NodeModuleInformation {
fn as_path(&self) -> PathBuf {
project_root()
.join("crates")
.join(self.language.formatter_crate_name())
.join("src")
.join(self.dialect.as_str())
.join(self.concept.as_str())
.join(format!("{}.rs", self.name))
}
}
/// Convert an AstNode name to a path / Rust module name
fn name_to_module(kind: &NodeKind, in_name: &str, language: LanguageKind) -> NodeModuleInformation {
let mut upper_case_indices = in_name.match_indices(|c: char| c.is_uppercase());
assert!(matches!(upper_case_indices.next(), Some((0, _))));
let (second_upper_start, _) = upper_case_indices.next().expect("Node name malformed");
let (mut dialect_prefix, mut name) = in_name.split_at(second_upper_start);
// AnyJsX
if dialect_prefix == "Any" {
let (third_upper_start, _) = upper_case_indices.next().expect("Node name malformed");
(dialect_prefix, name) = name.split_at(third_upper_start - dialect_prefix.len());
}
let dialect = NodeDialect::from_str(dialect_prefix);
// Classify nodes by concept
let concept = if matches!(kind, NodeKind::Bogus) {
NodeConcept::Bogus
} else if matches!(kind, NodeKind::List { .. }) {
NodeConcept::List
} else if matches!(kind, NodeKind::Union { .. }) {
NodeConcept::Union
} else {
match language {
LanguageKind::Js => match name {
_ if name.ends_with("Statement") => NodeConcept::Statement,
_ if name.ends_with("Declaration") => NodeConcept::Declaration,
_ if name.ends_with("Expression")
|| name.ends_with("Argument")
|| name.ends_with("Arguments") =>
{
NodeConcept::Expression
}
_ if name.ends_with("Binding")
|| name.starts_with("BindingPattern")
|| name.starts_with("ArrayBindingPattern")
|| name.starts_with("ObjectBindingPattern")
|| name.ends_with("Parameter")
|| name.ends_with("Parameters") =>
{
NodeConcept::Binding
}
_ if name.ends_with("Assignment")
|| name.starts_with("ArrayAssignmentPattern")
|| name.starts_with("ObjectAssignmentPattern") =>
{
NodeConcept::Assignment
}
"AssignmentWithDefault" => NodeConcept::Assignment,
_ if name.ends_with("ImportSpecifier")
|| name.ends_with("ImportSpecifiers")
|| name.starts_with("Export")
|| name.starts_with("Import") =>
{
NodeConcept::Module
}
"Export" | "Import" | "ModuleSource" | "LiteralExportName" => NodeConcept::Module,
_ if name.ends_with("ClassMember") => NodeConcept::Class,
"ExtendsClause" => NodeConcept::Class,
_ if name.ends_with("ObjectMember") | name.ends_with("MemberName") => {
NodeConcept::Object
}
// TypeScript
"Assertion" | "ConstAssertion" | "NonNull" | "TypeArgs" | "ExprWithTypeArgs" => {
NodeConcept::Expression
}
"ExternalModuleRef" | "ModuleRef" => NodeConcept::Module,
_ if name.ends_with("Type") => NodeConcept::Type,
_ if dialect.is_jsx()
&& (name.ends_with("Element")
|| name.ends_with("Tag")
|| name.ends_with("Fragment")) =>
{
NodeConcept::Tag
}
_ if dialect.is_jsx() && name.contains("Attribute") => NodeConcept::Attribute,
// Default to auxiliary
_ => NodeConcept::Auxiliary,
},
LanguageKind::Json => match name {
_ if name.ends_with("Value") => NodeConcept::Value,
_ => NodeConcept::Auxiliary,
},
LanguageKind::Css => NodeConcept::Auxiliary,
}
};
// Convert the names from CamelCase to snake_case
let mut stem = String::new();
for (index, char) in name.chars().enumerate() {
if char.is_lowercase() {
stem.push(char);
} else {
if index > 0 {
stem.push('_');
}
for char in char.to_lowercase() {
stem.push(char);
}
}
}
// "type" and "enum" are Rust keywords, add the "ts_"
// prefix to these modules to avoid parsing errors
let stem = match stem.as_str() {
"type" => String::from("ts_type"),
"enum" => String::from("ts_enum"),
_ => stem,
};
NodeModuleInformation {
name: stem,
language,
dialect,
concept,
}
}
impl LanguageKind {
fn formatter_ident(&self) -> Ident {
let name = match self {
LanguageKind::Js => "JsFormatter",
LanguageKind::Css => "CssFormatter",
LanguageKind::Json => "JsonFormatter",
};
Ident::new(name, Span::call_site())
}
fn format_context_ident(&self) -> Ident {
let name = match self {
LanguageKind::Js => "JsFormatContext",
LanguageKind::Css => "CssFormatContext",
LanguageKind::Json => "JsonFormatContext",
};
Ident::new(name, Span::call_site())
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/generate_macros.rs | xtask/codegen/src/generate_macros.rs | use super::kinds_src::AstSrc;
use crate::{to_upper_snake_case, LanguageKind, Result};
use quote::{format_ident, quote};
pub fn generate_macros(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {
let syntax_kind = language_kind.syntax_kind();
let syntax_node = language_kind.syntax_node();
let match_arms: Vec<_> = ast
.nodes
.iter()
.map(|node| {
let name = format_ident!("{}", node.name);
let node_kind = format_ident!("{}", to_upper_snake_case(&node.name));
(name, node_kind)
})
.chain(ast.bogus.iter().map(|node_name| {
let name = format_ident!("{}", node_name);
let node_kind = format_ident!("{}", to_upper_snake_case(node_name));
(name, node_kind)
}))
.chain(ast.lists().map(|(node_name, _)| {
let name = format_ident!("{}", node_name);
let node_kind = format_ident!("{}", to_upper_snake_case(node_name));
(name, node_kind)
}))
.map(|(name, node_kind)| {
quote! {
$crate::#syntax_kind::#node_kind => {
// SAFETY: The call to new_unchecked is guarded by matching on node.kind()
let $pattern = unsafe { $crate::#name::new_unchecked(node) };
$body
}
}
})
.collect();
let ast = quote! {
/// Reconstruct an AstNode from a SyntaxNode
///
/// This macros performs a match over the [kind](rome_rowan::SyntaxNode::kind)
/// of the provided [rome_rowan::SyntaxNode] and constructs the appropriate
/// AstNode type for it, then execute the provided expression over it.
///
/// # Examples
///
/// ```ignore
/// map_syntax_node!(syntax_node, node => node.format())
/// ```
#[macro_export]
macro_rules! map_syntax_node {
($node:expr, $pattern:pat => $body:expr) => {
match $node {
node => match $crate::#syntax_node::kind(&node) {
#( #match_arms, )*
_ => unreachable!()
}
}
};
}
pub(crate) use map_syntax_node;
};
xtask::reformat(ast)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/codegen/src/termcolorful.rs | xtask/codegen/src/termcolorful.rs | #[derive(Copy, Clone, Debug)]
#[allow(dead_code)]
pub(crate) enum Color {
Red,
Green,
Black,
Yellow,
Blue,
Purple,
Cyan,
White,
}
impl std::fmt::Display for Color {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Color::Black => write!(f, "30"),
Color::Red => write!(f, "31"),
Color::Green => write!(f, "32"),
Color::Yellow => write!(f, "33"),
Color::Blue => write!(f, "34"),
Color::Purple => write!(f, "35"),
Color::Cyan => write!(f, "36"),
Color::White => write!(f, "37"),
}
}
}
pub(crate) fn println_string_with_fg_color(content: String, color: Color) {
println!("\x1b[0;{}m{}\x1b[0m", color, content);
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/lib.rs | xtask/bench/src/lib.rs | mod features;
mod language;
mod test_case;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use std::time::Duration;
use criterion::{BatchSize, BenchmarkId};
use rome_rowan::NodeCache;
pub use crate::features::analyzer::benchmark_analyze_lib;
use crate::features::analyzer::AnalyzerMeasurement;
pub use crate::features::formatter::benchmark_format_lib;
use crate::features::formatter::{run_format, FormatterMeasurement};
pub use crate::features::parser::benchmark_parse_lib;
use crate::features::parser::ParseMeasurement;
use crate::language::Parse;
use crate::test_case::TestCase;
/// What feature to benchmark
#[derive(Eq, PartialEq)]
pub enum FeatureToBenchmark {
/// benchmark of the parser
Parser,
/// benchmark of the formatter
Formatter,
/// benchmark of the analyzer
Analyzer,
}
impl FromStr for FeatureToBenchmark {
type Err = pico_args::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"parser" => Ok(Self::Parser),
"formatter" => Ok(Self::Formatter),
"analyzer" => Ok(Self::Analyzer),
_ => Err(pico_args::Error::OptionWithoutAValue("feature")),
}
}
}
impl Display for FeatureToBenchmark {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
FeatureToBenchmark::Parser => write!(f, "parser"),
FeatureToBenchmark::Formatter => write!(f, "formatter"),
FeatureToBenchmark::Analyzer => write!(f, "analyzer"),
}
}
}
/// If groups the summary by their category and creates a small interface
/// where each bench result can create their summary
pub enum BenchmarkSummary {
Parser(ParseMeasurement),
Formatter(FormatterMeasurement),
Analyzer(AnalyzerMeasurement),
}
impl BenchmarkSummary {
pub fn summary(&self) -> String {
match self {
BenchmarkSummary::Parser(result) => result.summary(),
BenchmarkSummary::Formatter(result) => result.summary(),
BenchmarkSummary::Analyzer(result) => result.summary(),
}
}
}
impl Display for BenchmarkSummary {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
BenchmarkSummary::Parser(result) => std::fmt::Display::fmt(&result, f),
BenchmarkSummary::Formatter(result) => std::fmt::Display::fmt(&result, f),
BenchmarkSummary::Analyzer(result) => std::fmt::Display::fmt(&result, f),
}
}
}
fn err_to_string<E: std::fmt::Debug>(e: E) -> String {
format!("{:?}", e)
}
pub struct RunArgs {
pub filter: String,
pub criterion: bool,
pub baseline: Option<String>,
pub feature: FeatureToBenchmark,
pub suites: String,
}
pub fn run(args: RunArgs) {
let regex = regex::Regex::new(args.filter.as_str()).unwrap();
let mut all_suites = HashMap::new();
if args.feature == FeatureToBenchmark::Analyzer {
all_suites.insert("js", include_str!("analyzer-libs-js.txt"));
all_suites.insert("ts", include_str!("analyzer-libs-ts.txt"));
} else {
all_suites.insert("js", include_str!("libs-js.txt"));
all_suites.insert("ts", include_str!("libs-ts.txt"));
all_suites.insert("json", include_str!("libs-json.txt"));
}
let mut libs = vec![];
let suites_to_run = args.suites.split(',');
for suite in suites_to_run {
match suite {
"*" => {
libs.extend(all_suites.values().flat_map(|suite| suite.lines()));
}
key => match all_suites.get(key) {
Some(suite) => libs.extend(suite.lines()),
None => {
eprintln!("Unknown suite: {key}");
}
},
}
}
let mut summary = vec![];
for lib in libs {
if !regex.is_match(lib) {
continue;
}
let test_case = TestCase::try_from(lib);
match test_case {
Ok(test_case) => {
let parse = Parse::try_from_case(&test_case).expect("Supported language");
let code = test_case.code();
// Do all steps with criterion now
if args.criterion {
let mut criterion = criterion::Criterion::default()
.without_plots()
.measurement_time(Duration::new(10, 0));
if let Some(ref baseline) = args.baseline {
criterion = criterion.save_baseline(baseline.to_string());
}
let mut group = criterion.benchmark_group(args.feature.to_string());
group.throughput(criterion::Throughput::Bytes(code.len() as u64));
match args.feature {
FeatureToBenchmark::Parser => {
group.bench_function(
BenchmarkId::new(test_case.filename(), "uncached"),
|b| {
b.iter(|| {
criterion::black_box(parse.parse());
})
},
);
group.bench_function(
BenchmarkId::new(test_case.filename(), "cached"),
|b| {
b.iter_batched(
|| {
let mut cache = NodeCache::default();
parse.parse_with_cache(&mut cache);
cache
},
|mut cache| {
criterion::black_box(
parse.parse_with_cache(&mut cache),
);
},
BatchSize::SmallInput,
)
},
);
}
FeatureToBenchmark::Formatter => {
let parsed = parse.parse();
match parsed.format_node() {
None => {
continue;
}
Some(format_node) => {
group.bench_function(test_case.filename(), |b| {
b.iter(|| {
criterion::black_box(run_format(&format_node));
})
});
}
}
}
FeatureToBenchmark::Analyzer => {
let parsed = parse.parse();
match parsed.analyze() {
None => {
continue;
}
Some(analyze) => {
group.bench_function(test_case.filename(), |b| {
b.iter(|| {
analyze.analyze();
criterion::black_box(());
})
});
}
}
}
}
group.finish();
}
let result = match args.feature {
FeatureToBenchmark::Parser => benchmark_parse_lib(&test_case, &parse),
FeatureToBenchmark::Formatter => {
let parsed = parse.parse();
let format_node = parsed
.format_node()
.expect("Expect formatting to be supported");
benchmark_format_lib(test_case.filename(), &format_node)
}
FeatureToBenchmark::Analyzer => {
let parsed = parse.parse();
let analyze = parsed.analyze().expect("Expect analyze to be supported");
benchmark_analyze_lib(&test_case, &analyze)
}
};
summary.push(result.summary());
println!("Benchmark: {}", lib);
println!("{}", result);
}
Err(e) => println!("{:?}", e),
}
}
println!("Summary");
println!("-------");
for l in summary {
println!("{}", l);
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/language.rs | xtask/bench/src/language.rs | use crate::test_case::TestCase;
use criterion::black_box;
use rome_analyze::{AnalysisFilter, AnalyzerOptions, ControlFlow, Never, RuleCategories};
use rome_formatter::{FormatResult, Formatted, PrintResult, Printed};
use rome_js_analyze::analyze;
use rome_js_formatter::context::{JsFormatContext, JsFormatOptions};
use rome_js_parser::JsParserOptions;
use rome_js_syntax::{AnyJsRoot, JsFileSource, JsSyntaxNode};
use rome_json_formatter::context::{JsonFormatContext, JsonFormatOptions};
use rome_json_parser::JsonParserOptions;
use rome_json_syntax::JsonSyntaxNode;
use rome_parser::prelude::ParseDiagnostic;
use rome_rowan::NodeCache;
pub enum Parse<'a> {
JavaScript(JsFileSource, &'a str),
Json(&'a str),
}
impl<'a> Parse<'a> {
pub fn try_from_case(case: &TestCase) -> Option<Parse> {
match JsFileSource::try_from(case.path()) {
Ok(source_type) => Some(Parse::JavaScript(source_type, case.code())),
Err(_) => match case.extension() {
"json" => Some(Parse::Json(case.code())),
_ => None,
},
}
}
pub fn parse(&self) -> Parsed {
match self {
Parse::JavaScript(source_type, code) => Parsed::JavaScript(
rome_js_parser::parse(code, *source_type, JsParserOptions::default()),
*source_type,
),
Parse::Json(code) => Parsed::Json(rome_json_parser::parse_json(
code,
JsonParserOptions::default(),
)),
}
}
pub fn parse_with_cache(&self, cache: &mut NodeCache) -> Parsed {
match self {
Parse::JavaScript(source_type, code) => Parsed::JavaScript(
rome_js_parser::parse_js_with_cache(
code,
*source_type,
JsParserOptions::default(),
cache,
),
*source_type,
),
Parse::Json(code) => Parsed::Json(rome_json_parser::parse_json_with_cache(
code,
cache,
JsonParserOptions::default(),
)),
}
}
}
pub enum Parsed {
JavaScript(rome_js_parser::Parse<AnyJsRoot>, JsFileSource),
Json(rome_json_parser::JsonParse),
}
impl Parsed {
pub fn format_node(&self) -> Option<FormatNode> {
match self {
Parsed::JavaScript(parse, source_type) => {
Some(FormatNode::JavaScript(parse.syntax(), *source_type))
}
Parsed::Json(parse) => Some(FormatNode::Json(parse.syntax())),
}
}
pub fn analyze(&self) -> Option<Analyze> {
match self {
Parsed::JavaScript(parse, _) => Some(Analyze::JavaScript(parse.tree())),
Parsed::Json(_) => None,
}
}
pub fn into_diagnostics(self) -> Vec<ParseDiagnostic> {
match self {
Parsed::JavaScript(parse, _) => parse.into_diagnostics(),
Parsed::Json(parse) => parse.into_diagnostics(),
}
}
}
pub enum FormatNode {
JavaScript(JsSyntaxNode, JsFileSource),
Json(JsonSyntaxNode),
}
impl FormatNode {
pub fn format_node(&self) -> FormatResult<FormattedNode> {
match self {
Self::JavaScript(root, source_type) => {
rome_js_formatter::format_node(JsFormatOptions::new(*source_type), root)
.map(FormattedNode::JavaScript)
}
FormatNode::Json(root) => {
rome_json_formatter::format_node(JsonFormatOptions::default(), root)
.map(FormattedNode::Json)
}
}
}
}
pub enum FormattedNode {
JavaScript(Formatted<JsFormatContext>),
Json(Formatted<JsonFormatContext>),
}
impl FormattedNode {
pub fn print(&self) -> PrintResult<Printed> {
match self {
FormattedNode::JavaScript(formatted) => formatted.print(),
FormattedNode::Json(formatted) => formatted.print(),
}
}
}
pub enum Analyze {
JavaScript(AnyJsRoot),
}
impl Analyze {
pub fn analyze(&self) {
match self {
Analyze::JavaScript(root) => {
let filter = AnalysisFilter {
categories: RuleCategories::SYNTAX | RuleCategories::LINT,
..AnalysisFilter::default()
};
let options = AnalyzerOptions::default();
analyze(root, filter, &options, JsFileSource::default(), |event| {
black_box(event.diagnostic());
black_box(event.actions());
ControlFlow::<Never>::Continue(())
});
}
}
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/test_case.rs | xtask/bench/src/test_case.rs | use crate::err_to_string;
use ansi_rgb::{red, Foreground};
use std::env;
use std::path::{Path, PathBuf};
use std::str::FromStr;
pub struct TestCase {
code: String,
id: String,
path: PathBuf,
}
impl TestCase {
pub fn try_from(test_case: &str) -> Result<TestCase, String> {
let url = url::Url::from_str(test_case).map_err(err_to_string)?;
let segments = url
.path_segments()
.ok_or_else(|| "lib url has no segments".to_string())?;
let filename = segments
.last()
.ok_or_else(|| "lib url has no segments".to_string())?;
let path = Path::new(
&env::var("CARGO_MANIFEST_DIR")
.unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()),
)
.ancestors()
.nth(2)
.unwrap()
.join("target")
.join(filename);
let content = std::fs::read_to_string(&path)
.map_err(err_to_string)
.or_else(|_| {
println!(
"[{}] - Downloading [{}] to [{}]",
filename,
test_case,
path.display()
);
match ureq::get(test_case).call() {
Ok(response) => {
let mut reader = response.into_reader();
let mut writer = std::fs::File::create(&path).map_err(err_to_string)?;
if let Err(err) = std::io::copy(&mut reader, &mut writer) {
drop(writer);
std::fs::remove_file(&path).ok();
return Err(err_to_string(err));
}
std::fs::read_to_string(&path).map_err(err_to_string)
}
Err(e) => Err(err_to_string(e)),
}
});
content.map(|code| {
println!("[{}] - using [{}]", filename.fg(red()), path.display());
TestCase {
id: filename.to_string(),
code,
path,
}
})
}
pub fn filename(&self) -> &str {
&self.id
}
pub fn path(&self) -> &Path {
self.path.as_path()
}
pub fn code(&self) -> &str {
&self.code
}
pub fn extension(&self) -> &str {
self.path
.extension()
.expect("Expected test case to have extension")
.to_str()
.expect("Expected extension to be valid UTF8")
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/main.rs | xtask/bench/src/main.rs | use pico_args::Arguments;
use xtask::{project_root, pushd, Result};
use xtask_bench::{run, FeatureToBenchmark, RunArgs};
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOCATOR: dhat::Alloc = dhat::Alloc;
#[cfg(all(target_os = "windows", not(feature = "dhat-heap")))]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(not(target_os = "windows"), not(feature = "dhat-heap")))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
fn main() -> Result<(), pico_args::Error> {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let _d = pushd(project_root());
let mut args = Arguments::from_env();
if args.contains("--help") {
eprintln!(
"\
cargo bench
Benchmark parser and formatter.
USAGE:
cargo bench [option]
OPTIONS
--save-baseline Allows different runs to be compared.
--feature Possible values: parser, formatter
--criterion=[true/false] Run a series of statistical test to assess with the this run is faster or slower than previous runs.
--suites=<IDS> Runs the specified benchmarks. Use comma as separator.
Valid values are:
*: will run all benchmarks;
js: will benchmark all javascript libraries;
ts: will benchmark all typescript libraries;
Default is \"*\".
--filter=<file> Filters out tests that don't match the query.
--help Prints this help.
"
);
return Ok(());
}
// on pr branch, run
// git checkout main
// cargo benchmark --save-baseline main
// git checkout -
// cargo benchmark --save-baseline pr
// critcmp main pr # (cargo install critcmp)
let filter: String = args
.opt_value_from_str("--filter")
.unwrap()
.unwrap_or_else(|| ".*".to_string());
let criterion: bool = args
.opt_value_from_str("--criterion")
.unwrap()
.unwrap_or(true);
let suites = args
.opt_value_from_str("--suites")
.unwrap()
.unwrap_or_else(|| "*".to_string());
let baseline: Option<String> = args.opt_value_from_str("--save-baseline").unwrap();
// "feature" is a mandatory option and will throw an error if it's missing or incorrect
let feature: FeatureToBenchmark = args.value_from_str("--feature")?;
run(RunArgs {
filter,
criterion,
baseline,
feature,
suites,
});
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/features/analyzer.rs | xtask/bench/src/features/analyzer.rs | use crate::language::Analyze;
use crate::test_case::TestCase;
use crate::BenchmarkSummary;
use std::fmt::{Display, Formatter};
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct AnalyzerMeasurement {
id: String,
analysis: Duration,
}
pub fn benchmark_analyze_lib(case: &TestCase, analyze: &Analyze) -> BenchmarkSummary {
let analyzer_timer = timing::start();
analyze.analyze();
let analyzer_duration = analyzer_timer.stop();
BenchmarkSummary::Analyzer(AnalyzerMeasurement {
id: case.filename().to_string(),
analysis: analyzer_duration,
})
}
impl AnalyzerMeasurement {
fn total(&self) -> Duration {
self.analysis
}
pub(crate) fn summary(&self) -> String {
format!("{}, Analysis: {:?}", self.id, self.total())
}
}
impl Display for AnalyzerMeasurement {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let _ = writeln!(f, "\tAnalysis: {:>10?}", self.analysis);
let _ = writeln!(f, "\t ----------");
let _ = writeln!(f, "\tTotal: {:>10?}", self.total());
Ok(())
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/features/parser.rs | xtask/bench/src/features/parser.rs | #[cfg(feature = "dhat-heap")]
use crate::features::print_stats;
use crate::language::Parse;
use crate::test_case::TestCase;
use crate::BenchmarkSummary;
use itertools::Itertools;
use rome_diagnostics::console::fmt::Termcolor;
use rome_diagnostics::console::markup;
use rome_diagnostics::termcolor::Buffer;
use rome_diagnostics::DiagnosticExt;
use rome_diagnostics::PrintDiagnostic;
use rome_parser::diagnostic::ParseDiagnostic;
use std::fmt::{Display, Formatter};
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct ParseMeasurement {
id: String,
code: String,
duration: Duration,
diagnostics: Vec<ParseDiagnostic>,
}
pub fn benchmark_parse_lib(case: &TestCase, parse: &Parse) -> BenchmarkSummary {
#[cfg(feature = "dhat-heap")]
println!("Start");
#[cfg(feature = "dhat-heap")]
let stats = print_stats(dhat::HeapStats::get(), None);
let parser_timer = timing::start();
let parsed = parse.parse();
let parse_duration = parser_timer.stop();
#[cfg(feature = "dhat-heap")]
println!("Parsed");
#[cfg(feature = "dhat-heap")]
print_stats(dhat::HeapStats::get(), Some(stats));
BenchmarkSummary::Parser(ParseMeasurement {
id: case.filename().to_string(),
code: case.code().to_string(),
duration: parse_duration,
diagnostics: parsed.into_diagnostics(),
})
}
impl ParseMeasurement {
pub(crate) fn summary(&self) -> String {
format!("{}, Total Time: {:?}", self.id, self.duration,)
}
}
impl Display for ParseMeasurement {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let _ = writeln!(f, "\tDuration: {:>10?}", self.duration);
let _ = writeln!(f, "\tDiagnostics");
let diagnostics = &self
.diagnostics
.iter()
.map(|diagnostic| rome_diagnostics::Error::from(diagnostic.clone()))
.group_by(|x| x.severity());
for (severity, items) in diagnostics {
let _ = writeln!(f, "\t\t{:?}: {}", severity, items.count());
}
let mut buffer = Buffer::no_color();
for diagnostic in self.diagnostics.iter().filter(|diag| diag.is_error()) {
let error = diagnostic
.clone()
.with_file_path(self.id.to_string())
.with_file_source_code(self.code.clone());
rome_diagnostics::console::fmt::Formatter::new(&mut Termcolor(&mut buffer))
.write_markup(markup! {
{PrintDiagnostic::verbose(&error)}
})
.unwrap();
}
Ok(())
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/features/mod.rs | xtask/bench/src/features/mod.rs | pub mod analyzer;
pub mod formatter;
pub mod parser;
#[cfg(feature = "dhat-heap")]
fn print_stats(current: dhat::HeapStats, before: Option<dhat::HeapStats>) -> dhat::HeapStats {
use humansize::{format_size_i, DECIMAL};
println!("\tMemory");
println!("\t\tCurrent Blocks: {}", current.curr_blocks);
println!(
"\t\tCurrent Bytes: {}",
format_size_i(current.curr_bytes, DECIMAL)
);
println!("\t\tMax Blocks: {}", current.max_blocks);
println!(
"\t\tMax Bytes: {}",
format_size_i(current.max_bytes, DECIMAL)
);
if let Some(before) = before {
let new_blocks = current.total_blocks - before.total_blocks;
let new_bytes = current.total_bytes - before.total_bytes;
println!("\t\tNew Blocks: {new_blocks}",);
println!("\t\tNew Bytes: {}", format_size_i(new_bytes, DECIMAL));
}
println!("\t\tTotal Blocks: {}", current.total_blocks);
println!(
"\t\tTotal Bytes: {}",
format_size_i(current.total_bytes, DECIMAL)
);
current
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/bench/src/features/formatter.rs | xtask/bench/src/features/formatter.rs | #[cfg(feature = "dhat-heap")]
use crate::features::print_stats;
use crate::language::FormatNode;
use crate::BenchmarkSummary;
use rome_formatter::Printed;
use std::fmt::{Display, Formatter};
use std::time::Duration;
#[derive(Debug, Clone)]
pub struct FormatterMeasurement {
id: String,
formatting: Duration,
}
pub fn benchmark_format_lib(id: &str, format_node: &FormatNode) -> BenchmarkSummary {
let formatter_timer = timing::start();
criterion::black_box(run_format(format_node));
let formatter_duration = formatter_timer.stop();
BenchmarkSummary::Formatter(FormatterMeasurement {
id: id.to_string(),
formatting: formatter_duration,
})
}
pub fn run_format(format_node: &FormatNode) -> Printed {
#[cfg(feature = "dhat-heap")]
let stats = {
println!("Start");
print_stats(dhat::HeapStats::get(), None)
};
let formatted = format_node.format_node().unwrap();
#[cfg(feature = "dhat-heap")]
let stats = {
println!("Formatted");
print_stats(dhat::HeapStats::get(), Some(stats))
};
let printed = formatted.print();
drop(formatted);
#[cfg(feature = "dhat-heap")]
{
println!("Printed");
print_stats(dhat::HeapStats::get(), Some(stats));
}
printed.expect("Document to be valid")
}
impl FormatterMeasurement {
fn total(&self) -> Duration {
self.formatting
}
pub(crate) fn summary(&self) -> String {
format!("{}, Formatting: {:?}", self.id, self.total(),)
}
}
impl Display for FormatterMeasurement {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let _ = writeln!(f, "\tFormatting: {:>10?}", self.formatting);
let _ = writeln!(f, "\t ----------");
let _ = writeln!(f, "\tTotal: {:>10?}", self.total());
Ok(())
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/contributors/src/main.rs | xtask/contributors/src/main.rs | use pico_args::Arguments;
use serde::{Deserialize, Serialize};
use std::fmt::Write;
use xtask::glue::fs2;
use xtask::*;
/// A token is needed to run this script. To create a token, go to <https://github.com/settings/tokens>
/// and give it read access to the repository.
///
/// Only users that have read rights can run this script
fn main() -> Result<()> {
let root = project_root().join("website/src/components");
let mut args = Arguments::from_env();
let token: String = args.value_from_str("--token").unwrap();
let contributors = get_contributors(&token);
let mut content = String::new();
let command = "Use the command `cargo contributors`".to_string();
write!(
content,
"{{/** {} */}}",
prepend_generated_preamble(command)
)?;
content.push('\n');
content.push_str("<h3>Code contributors</h3>");
content.push('\n');
content.push_str("<ul class=\"credits-people-list contributors\">");
content.push('\n');
for contributor in contributors {
let mut contributor_html = String::new();
let escaped_login = html_escape::encode_text(&contributor.login);
let escaped_avatar = html_escape::encode_text(&contributor.avatar_url);
contributor_html.push_str("<li><a href=\"https://github.com/rome/tools/commits?author=");
html_escape::encode_double_quoted_attribute_to_string(
format!("{}", escaped_login),
&mut contributor_html,
);
contributor_html.push_str("\">");
contributor_html.push_str("<img src=\"");
html_escape::encode_double_quoted_attribute_to_string(
format!("{}", escaped_avatar),
&mut contributor_html,
);
content.push_str(&contributor_html);
write!(content, "\" alt=\"{}\" />", contributor.login)?;
write!(content, "<span>{}</span>", escaped_login)?;
content.push_str("</a></li>");
content.push('\n');
}
content.push_str("</ul>");
fs2::write(root.join("Contributors.astro"), content)?;
Ok(())
}
#[derive(Debug, Deserialize, Serialize)]
struct Contributor {
avatar_url: String,
login: String,
}
fn get_contributors(token: &str) -> Vec<Contributor> {
let mut contributors = Vec::new();
contributors_request(
"https://api.github.com/repos/rome/tools/contributors",
token,
&mut contributors,
);
contributors
}
fn contributors_request(url: &str, token: &str, contributors: &mut Vec<Contributor>) {
let request = ureq::get(url)
.set("User-Agent", "@rome")
.set("Authorization", &format!("token {token}"));
match request.call() {
Ok(response) => {
if let Some(link) = response.header("link") {
if link.contains("rel=\"next\"") {
let start_index = link
.find("rel=\"prev\", ")
.map(|index| index + "rel=\"prev\", ".len())
.unwrap_or(0);
// SAFETY: checked before
let end_index = link.find("; rel=\"next\"").unwrap();
let url = &link[start_index..end_index];
let url = url.replace(['<', '>'], "");
contributors_request(&url, token, contributors);
}
}
let result: Result<Vec<Contributor>, std::io::Error> = response.into_json();
if let Ok(new_contributors) = result {
contributors.extend(new_contributors);
}
}
Err(err) => {
eprintln!("{:?}", err);
}
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/lintdoc/src/main.rs | xtask/lintdoc/src/main.rs | use pulldown_cmark::{html::write_html, CodeBlockKind, Event, LinkType, Parser, Tag};
use rome_analyze::{
AnalysisFilter, AnalyzerOptions, ControlFlow, GroupCategory, Queryable, RegistryVisitor, Rule,
RuleCategory, RuleFilter, RuleGroup, RuleMetadata,
};
use rome_console::fmt::Termcolor;
use rome_console::{
fmt::{Formatter, HTML},
markup, Console, Markup, MarkupBuf,
};
use rome_diagnostics::termcolor::NoColor;
use rome_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic};
use rome_js_parser::JsParserOptions;
use rome_js_syntax::{JsFileSource, JsLanguage, Language, LanguageVariant, ModuleKind};
use rome_json_parser::JsonParserOptions;
use rome_json_syntax::JsonLanguage;
use rome_service::settings::WorkspaceSettings;
use std::{
collections::BTreeMap,
fmt::Write as _,
io::{self, Write as _},
path::Path,
slice,
str::{self, FromStr},
};
use xtask::{glue::fs2, *};
fn main() -> Result<()> {
let root = project_root().join("website/src/pages/lint/rules");
let reference_groups = project_root().join("website/src/components/generated/Groups.astro");
let reference_number_of_rules =
project_root().join("website/src/components/generated/NumberOfRules.astro");
// Clear the rules directory ignoring "not found" errors
if let Err(err) = fs2::remove_dir_all(&root) {
let is_not_found = err
.source()
.and_then(|err| err.downcast_ref::<io::Error>())
.map_or(false, |err| matches!(err.kind(), io::ErrorKind::NotFound));
if !is_not_found {
return Err(err);
}
}
fs2::create_dir_all(&root)?;
// Content of the index page
let mut index = Vec::new();
let mut reference_buffer = Vec::new();
writeln!(index, "---")?;
writeln!(index, "title: Lint Rules")?;
writeln!(index, "parent: linter/index")?;
writeln!(index, "emoji: 📏")?;
writeln!(index, "description: List of available lint rules.")?;
writeln!(index, "category: reference")?;
writeln!(index, "mainClass: rules")?;
writeln!(index, "---")?;
writeln!(index)?;
writeln!(index, "# Rules")?;
writeln!(index)?;
// Accumulate errors for all lint rules to print all outstanding issues on
// failure instead of just the first one
let mut errors = Vec::new();
#[derive(Default)]
struct LintRulesVisitor {
groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>,
number_or_rules: u16,
}
impl RegistryVisitor<JsLanguage> for LintRulesVisitor {
fn record_category<C: GroupCategory<Language = JsLanguage>>(&mut self) {
if matches!(C::CATEGORY, RuleCategory::Lint) {
C::record_groups(self);
}
}
fn record_rule<R>(&mut self)
where
R: Rule + 'static,
R::Query: Queryable<Language = JsLanguage>,
<R::Query as Queryable>::Output: Clone,
{
self.number_or_rules += 1;
self.groups
.entry(<R::Group as RuleGroup>::NAME)
.or_insert_with(BTreeMap::new)
.insert(R::METADATA.name, R::METADATA);
}
}
impl RegistryVisitor<JsonLanguage> for LintRulesVisitor {
fn record_category<C: GroupCategory<Language = JsonLanguage>>(&mut self) {
if matches!(C::CATEGORY, RuleCategory::Lint) {
C::record_groups(self);
}
}
fn record_rule<R>(&mut self)
where
R: Rule + 'static,
R::Query: Queryable<Language = JsonLanguage>,
<R::Query as Queryable>::Output: Clone,
{
self.number_or_rules += 1;
self.groups
.entry(<R::Group as RuleGroup>::NAME)
.or_insert_with(BTreeMap::new)
.insert(R::METADATA.name, R::METADATA);
}
}
let mut visitor = LintRulesVisitor::default();
rome_js_analyze::visit_registry(&mut visitor);
rome_json_analyze::visit_registry(&mut visitor);
let LintRulesVisitor {
mut groups,
number_or_rules,
} = visitor;
let nursery_rules = groups
.remove("nursery")
.expect("Expected nursery group to exist");
writeln!(
reference_buffer,
"<!-- this file is auto generated, use `cargo lintdoc` to update it -->"
)?;
write!(reference_buffer, "<ul>")?;
for (group, rules) in groups {
generate_group(group, rules, &root, &mut index, &mut errors)?;
generate_reference(group, &mut reference_buffer)?;
}
generate_group("nursery", nursery_rules, &root, &mut index, &mut errors)?;
generate_reference("nursery", &mut reference_buffer)?;
write!(reference_buffer, "</ul>")?;
if !errors.is_empty() {
bail!(
"failed to generate documentation pages for the following rules:\n{}",
errors
.into_iter()
.map(|(rule, err)| format!("- {rule}: {err:?}\n"))
.collect::<String>()
);
}
let number_of_rules_buffer = format!(
"<!-- this file is auto generated, use `cargo lintdoc` to update it -->\n \
<p>Rome's linter has a total of <strong><a href='/lint/rules'>{} rules</a></strong><p>",
number_or_rules
);
fs2::write(root.join("index.mdx"), index)?;
fs2::write(reference_groups, reference_buffer)?;
fs2::write(reference_number_of_rules, number_of_rules_buffer)?;
Ok(())
}
fn generate_group(
group: &'static str,
rules: BTreeMap<&'static str, RuleMetadata>,
root: &Path,
mut index: &mut dyn io::Write,
errors: &mut Vec<(&'static str, Error)>,
) -> io::Result<()> {
let (group_name, description) = extract_group_metadata(group);
let is_nursery = group == "nursery";
writeln!(index, "\n## {group_name}")?;
writeln!(index)?;
write_markup_to_string(index, description)?;
writeln!(index)?;
writeln!(index, "<div class=\"category-rules\">")?;
for (rule, meta) in rules {
let is_recommended = !is_nursery && meta.recommended;
match generate_rule(root, group, rule, meta.docs, meta.version, is_recommended) {
Ok(summary) => {
writeln!(index, "<section class=\"rule\">")?;
writeln!(index, "<h3 data-toc-exclude id=\"{rule}\">")?;
writeln!(index, " <a href=\"/lint/rules/{rule}\">{rule}</a>")?;
if is_recommended {
writeln!(index, " <span class=\"recommended\">recommended</span>")?;
}
writeln!(index, "</h3>")?;
write_html(&mut index, summary.into_iter())?;
writeln!(index, "\n</section>")?;
}
Err(err) => {
errors.push((rule, err));
}
}
}
writeln!(index, "\n</div>")?;
Ok(())
}
/// Generates the documentation page for a single lint rule
fn generate_rule(
root: &Path,
group: &'static str,
rule: &'static str,
docs: &'static str,
version: &'static str,
recommended: bool,
) -> Result<Vec<Event<'static>>> {
let mut content = Vec::new();
// Write the header for this lint rule
writeln!(content, "---")?;
writeln!(content, "title: Lint Rule {rule}")?;
writeln!(content, "parent: lint/rules/index")?;
writeln!(content, "---")?;
writeln!(content)?;
writeln!(content, "# {rule} (since v{version})")?;
writeln!(content)?;
if recommended {
writeln!(content, "> This rule is recommended by Rome.")?;
writeln!(content)?;
}
let summary = parse_documentation(group, rule, docs, &mut content)?;
writeln!(content, "## Related links")?;
writeln!(content)?;
writeln!(content, "- [Disable a rule](/linter/#disable-a-lint-rule)")?;
writeln!(content, "- [Rule options](/linter/#rule-options)")?;
fs2::write(root.join(format!("{rule}.md")), content)?;
Ok(summary)
}
/// Parse the documentation fragment for a lint rule (in markdown) and generates
/// the content for the corresponding documentation page
fn parse_documentation(
group: &'static str,
rule: &'static str,
docs: &'static str,
content: &mut Vec<u8>,
) -> Result<Vec<Event<'static>>> {
let parser = Parser::new(docs);
// Parser events for the first paragraph of documentation in the resulting
// content, used as a short summary of what the rule does in the rules page
let mut summary = Vec::new();
let mut is_summary = false;
// Tracks the content of the current code block if it's using a
// language supported for analysis
let mut language = None;
let mut list_order = None;
for event in parser {
if is_summary {
if matches!(event, Event::End(Tag::Paragraph)) {
is_summary = false;
} else {
summary.push(event.clone());
}
}
match event {
// CodeBlock-specific handling
Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(meta))) => {
// Track the content of code blocks to pass them through the analyzer
let test = CodeBlockTest::from_str(meta.as_ref())?;
// Erase the lintdoc-specific attributes in the output by
// re-generating the language ID from the source type
write!(content, "```")?;
if !meta.is_empty() {
if let BlockType::Js(source_type) = test.block_type {
match source_type.language() {
Language::JavaScript => write!(content, "js")?,
Language::TypeScript { .. } => write!(content, "ts")?,
}
match source_type.variant() {
LanguageVariant::Standard => {}
LanguageVariant::Jsx => write!(content, "x")?,
}
}
}
writeln!(content)?;
language = Some((test, String::new()));
}
Event::End(Tag::CodeBlock(_)) => {
writeln!(content, "```")?;
writeln!(content)?;
if let Some((test, block)) = language.take() {
if test.expect_diagnostic {
write!(
content,
"<pre class=\"language-text\"><code class=\"language-text\">"
)?;
}
assert_lint(group, rule, &test, &block, content)
.context("snapshot test failed")?;
if test.expect_diagnostic {
writeln!(content, "</code></pre>")?;
writeln!(content)?;
}
}
}
Event::Text(text) => {
if let Some((_, block)) = &mut language {
write!(block, "{text}")?;
}
write!(content, "{text}")?;
}
// Other markdown events are emitted as-is
Event::Start(Tag::Heading(level, ..)) => {
write!(content, "{} ", "#".repeat(level as usize))?;
}
Event::End(Tag::Heading(..)) => {
writeln!(content)?;
writeln!(content)?;
}
Event::Start(Tag::Paragraph) => {
if summary.is_empty() && !is_summary {
is_summary = true;
}
}
Event::End(Tag::Paragraph) => {
writeln!(content)?;
writeln!(content)?;
}
Event::Code(text) => {
write!(content, "`{text}`")?;
}
Event::Start(Tag::Link(kind, _, _)) => match kind {
LinkType::Inline => {
write!(content, "[")?;
}
LinkType::Shortcut => {
write!(content, "[")?;
}
_ => {
panic!("unimplemented link type")
}
},
Event::End(Tag::Link(_, url, title)) => {
write!(content, "]({url}")?;
if !title.is_empty() {
write!(content, " \"{title}\"")?;
}
write!(content, ")")?;
}
Event::SoftBreak => {
writeln!(content)?;
}
Event::Start(Tag::List(num)) => {
if let Some(num) = num {
list_order = Some(num);
}
}
Event::End(Tag::List(_)) => {
list_order = None;
writeln!(content)?;
}
Event::Start(Tag::Item) => {
if let Some(num) = list_order {
write!(content, "{num}. ")?;
} else {
write!(content, "- ")?;
}
}
Event::End(Tag::Item) => {
list_order = list_order.map(|item| item + 1);
writeln!(content)?;
}
Event::Start(Tag::Strong) => {
write!(content, "**")?;
}
Event::End(Tag::Strong) => {
write!(content, "**")?;
}
Event::Start(Tag::Emphasis) => {
write!(content, "_")?;
}
Event::End(Tag::Emphasis) => {
write!(content, "_")?;
}
Event::Start(Tag::Strikethrough) => {
write!(content, "~")?;
}
Event::End(Tag::Strikethrough) => {
write!(content, "~")?;
}
Event::Start(Tag::BlockQuote) => {
write!(content, ">")?;
}
Event::End(Tag::BlockQuote) => {
writeln!(content)?;
}
_ => {
// TODO: Implement remaining events as required
bail!("unimplemented event {event:?}")
}
}
}
Ok(summary)
}
enum BlockType {
Js(JsFileSource),
Json,
}
struct CodeBlockTest {
block_type: BlockType,
expect_diagnostic: bool,
ignore: bool,
}
impl FromStr for CodeBlockTest {
type Err = xtask::Error;
fn from_str(input: &str) -> Result<Self> {
// This is based on the parsing logic for code block languages in `rustdoc`:
// https://github.com/rust-lang/rust/blob/6ac8adad1f7d733b5b97d1df4e7f96e73a46db42/src/librustdoc/html/markdown.rs#L873
let tokens = input
.split(|c| c == ',' || c == ' ' || c == '\t')
.map(str::trim)
.filter(|token| !token.is_empty());
let mut test = CodeBlockTest {
block_type: BlockType::Js(JsFileSource::default()),
expect_diagnostic: false,
ignore: false,
};
for token in tokens {
match token {
// Determine the language, using the same list of extensions as `compute_source_type_from_path_or_extension`
"cjs" => {
test.block_type = BlockType::Js(
JsFileSource::js_module().with_module_kind(ModuleKind::Script),
);
}
"js" | "mjs" | "jsx" => {
test.block_type = BlockType::Js(JsFileSource::jsx());
}
"ts" | "mts" | "cts" => {
test.block_type = BlockType::Js(JsFileSource::ts());
}
"tsx" => {
test.block_type = BlockType::Js(JsFileSource::tsx());
}
// Other attributes
"expect_diagnostic" => {
test.expect_diagnostic = true;
}
"ignore" => {
test.ignore = true;
}
"json" => {
test.block_type = BlockType::Json;
}
_ => {
bail!("unknown code block attribute {token:?}")
}
}
}
Ok(test)
}
}
/// Parse and analyze the provided code block, and asserts that it emits
/// exactly zero or one diagnostic depending on the value of `expect_diagnostic`.
/// That diagnostic is then emitted as text into the `content` buffer
fn assert_lint(
group: &'static str,
rule: &'static str,
test: &CodeBlockTest,
code: &str,
content: &mut Vec<u8>,
) -> Result<()> {
let file = format!("{group}/{rule}.js");
let mut write = HTML(content);
let mut diagnostic_count = 0;
let mut all_diagnostics = vec![];
let mut write_diagnostic = |code: &str, diag: rome_diagnostics::Error| {
let category = diag.category().map_or("", |code| code.name());
Formatter::new(&mut write).write_markup(markup! {
{PrintDiagnostic::verbose(&diag)}
})?;
all_diagnostics.push(diag);
// Fail the test if the analysis returns more diagnostics than expected
if test.expect_diagnostic {
// Print all diagnostics to help the user
if all_diagnostics.len() > 1 {
let mut console = rome_console::EnvConsole::default();
for diag in all_diagnostics.iter() {
console.println(
rome_console::LogLevel::Error,
markup! {
{PrintDiagnostic::verbose(diag)}
},
);
}
}
ensure!(
diagnostic_count == 0,
"analysis returned multiple diagnostics, code snippet: \n\n{}",
code
);
} else {
// Print all diagnostics to help the user
let mut console = rome_console::EnvConsole::default();
for diag in all_diagnostics.iter() {
console.println(
rome_console::LogLevel::Error,
markup! {
{PrintDiagnostic::verbose(diag)}
},
);
}
bail!(format!(
"analysis returned an unexpected diagnostic, code `snippet:\n\n{:?}\n\n{}",
category, code
));
}
diagnostic_count += 1;
Ok(())
};
if test.ignore {
return Ok(());
}
match test.block_type {
BlockType::Js(source_type) => {
let parse = rome_js_parser::parse(code, source_type, JsParserOptions::default());
if parse.has_errors() {
for diag in parse.into_diagnostics() {
let error = diag
.with_file_path(file.clone())
.with_file_source_code(code);
write_diagnostic(code, error)?;
}
} else {
let root = parse.tree();
let settings = WorkspaceSettings::default();
let rule_filter = RuleFilter::Rule(group, rule);
let filter = AnalysisFilter {
enabled_rules: Some(slice::from_ref(&rule_filter)),
..AnalysisFilter::default()
};
let options = AnalyzerOptions::default();
let (_, diagnostics) = rome_js_analyze::analyze(
&root,
filter,
&options,
source_type,
|signal| {
if let Some(mut diag) = signal.diagnostic() {
let category = diag.category().expect("linter diagnostic has no code");
let severity = settings.get_severity_from_rule_code(category).expect(
"If you see this error, it means you need to run cargo codegen-configuration",
);
for action in signal.actions() {
if !action.is_suppression() {
diag = diag.add_code_suggestion(action.into());
}
}
let error = diag
.with_severity(severity)
.with_file_path(file.clone())
.with_file_source_code(code);
let res = write_diagnostic(code, error);
// Abort the analysis on error
if let Err(err) = res {
return ControlFlow::Break(err);
}
}
ControlFlow::Continue(())
},
);
// Result is Some(_) if analysis aborted with an error
for diagnostic in diagnostics {
write_diagnostic(code, diagnostic)?;
}
}
if test.expect_diagnostic {
// Fail the test if the analysis didn't emit any diagnostic
ensure!(
diagnostic_count == 1,
"analysis returned no diagnostics.\n code snippet:\n {}",
code
);
}
}
BlockType::Json => {
let parse = rome_json_parser::parse_json(code, JsonParserOptions::default());
if parse.has_errors() {
for diag in parse.into_diagnostics() {
let error = diag
.with_file_path(file.clone())
.with_file_source_code(code);
write_diagnostic(code, error)?;
}
} else {
let root = parse.tree();
let settings = WorkspaceSettings::default();
let rule_filter = RuleFilter::Rule(group, rule);
let filter = AnalysisFilter {
enabled_rules: Some(slice::from_ref(&rule_filter)),
..AnalysisFilter::default()
};
let options = AnalyzerOptions::default();
let (_, diagnostics) = rome_json_analyze::analyze(
&root.value().unwrap(),
filter,
&options,
|signal| {
if let Some(mut diag) = signal.diagnostic() {
let category = diag.category().expect("linter diagnostic has no code");
let severity = settings.get_severity_from_rule_code(category).expect(
"If you see this error, it means you need to run cargo codegen-configuration",
);
for action in signal.actions() {
if !action.is_suppression() {
diag = diag.add_code_suggestion(action.into());
}
}
let error = diag
.with_severity(severity)
.with_file_path(file.clone())
.with_file_source_code(code);
let res = write_diagnostic(code, error);
// Abort the analysis on error
if let Err(err) = res {
return ControlFlow::Break(err);
}
}
ControlFlow::Continue(())
},
);
// Result is Some(_) if analysis aborted with an error
for diagnostic in diagnostics {
write_diagnostic(code, diagnostic)?;
}
}
}
}
Ok(())
}
fn generate_reference(group: &'static str, buffer: &mut dyn io::Write) -> io::Result<()> {
let (group_name, description) = extract_group_metadata(group);
let description = markup_to_string(&description.to_owned());
let description = description.replace('\n', " ");
writeln!(
buffer,
"<li><code>{}</code>: {}</li>",
group_name.to_lowercase(),
description
)
}
fn extract_group_metadata(group: &str) -> (&str, Markup) {
match group {
"a11y" => (
"Accessibility",
markup! {
"Rules focused on preventing accessibility problems."
},
),
"complexity" => (
"Complexity",
markup! {
"Rules that focus on inspecting complex code that could be simplified."
},
),
"correctness" => (
"Correctness",
markup! {
"Rules that detect code that is guaranteed to be incorrect or useless."
},
),
"nursery" => (
"Nursery",
markup! {
"New rules that are still under development.
Nursery rules require explicit opt-in via configuration on stable versions because they may still have bugs or performance problems.
They are enabled by default on nightly builds, but as they are unstable their diagnostic severity may be set to either error or
warning, depending on whether we intend for the rule to be recommended or not when it eventually gets stabilized.
Nursery rules get promoted to other groups once they become stable or may be removed.
Rules that belong to this group "<Emphasis>"are not subject to semantic version"</Emphasis>"."
},
),
"performance" => (
"Performance",
markup! {
"Rules catching ways your code could be written to run faster, or generally be more efficient."
},
),
"security" => (
"Security",
markup! {
"Rules that detect potential security flaws."
},
),
"style" => (
"Style",
markup! {
"Rules enforcing a consistent and idiomatic way of writing your code."
},
),
"suspicious" => (
"Suspicious",
markup! {
"Rules that detect code that is likely to be incorrect or useless."
},
),
_ => panic!("Unknown group ID {group:?}"),
}
}
pub fn write_markup_to_string(buffer: &mut dyn io::Write, markup: Markup) -> io::Result<()> {
let mut write = HTML(buffer);
let mut fmt = Formatter::new(&mut write);
fmt.write_markup(markup)
}
fn markup_to_string(markup: &MarkupBuf) -> String {
let mut buffer = Vec::new();
let mut write = Termcolor(NoColor::new(&mut buffer));
let mut fmt = Formatter::new(&mut write);
fmt.write_markup(markup! { {markup} })
.expect("to have written in the buffer");
String::from_utf8(buffer).expect("to have convert a buffer into a String")
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/lib.rs | xtask/coverage/src/lib.rs | pub mod compare;
pub mod js;
pub mod jsx;
mod reporters;
pub mod results;
mod runner;
pub mod symbols;
pub mod ts;
mod util;
pub use crate::reporters::SummaryDetailLevel;
use crate::js::test262::Test262TestSuite;
use crate::reporters::{
DefaultReporter, JsonReporter, MulticastTestReporter, OutputTarget, SummaryReporter,
TestReporter,
};
use crate::runner::{run_test_suite, TestRunContext, TestSuite};
use jsx::jsx_babel::BabelJsxTestSuite;
use rome_parser::diagnostic::ParseDiagnostic;
use serde::{Deserialize, Serialize};
use std::any::Any;
use symbols::msts::SymbolsMicrosoftTestSuite;
use ts::ts_babel::BabelTypescriptTestSuite;
use ts::ts_microsoft::MicrosoftTypescriptTestSuite;
use util::decode_maybe_utf16_string;
#[derive(Debug, Serialize, Deserialize)]
pub struct TestResult {
#[serde(rename = "o")]
pub outcome: Outcome,
#[serde(rename = "h")]
pub test_case: String,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Copy, Clone)]
pub enum Outcome {
Passed,
Failed,
Panicked,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TestResults {
#[serde(rename = "s")]
pub summary: Summary,
#[serde(rename = "p")]
pub details: Vec<TestResult>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Summary {
#[serde(rename = "a")]
pub tests_ran: u32,
#[serde(rename = "pa")]
pub passed: u32,
#[serde(rename = "f")]
pub failed: u32,
#[serde(rename = "pc")]
pub panics: u32,
#[serde(rename = "c")]
pub coverage: f64,
}
impl Default for TestResults {
fn default() -> Self {
Self {
summary: Summary {
tests_ran: 0,
passed: 0,
failed: 0,
panics: 0,
coverage: 0.0,
},
details: vec![],
}
}
}
impl TestResults {
pub fn new() -> Self {
Self::default()
}
pub fn store_results(&mut self, results: Vec<TestResult>) {
self.details = results;
let passed = self.passed_tests() as u32;
let tests_ran = self.details.len();
let coverage = (passed as f64 / tests_ran as f64) * 100.0;
self.summary = Summary {
tests_ran: self.details.len() as u32,
passed,
failed: self.errored_tests() as u32,
panics: self.panicked_tests() as u32,
coverage,
};
}
pub fn panicked_tests(&self) -> usize {
self.details
.iter()
.filter(|res| matches!(res.outcome, Outcome::Panicked))
.count()
}
pub fn errored_tests(&self) -> usize {
self.details
.iter()
.filter(|res| matches!(res.outcome, Outcome::Failed))
.count()
}
pub fn passed_tests(&self) -> usize {
self.details
.iter()
.filter(|res| res.outcome == Outcome::Passed)
.count()
}
}
pub fn run(
suites: Option<&str>,
filter: Option<&str>,
json: bool,
detail_level: SummaryDetailLevel,
) {
let mut reporters = MulticastTestReporter::new(Box::<DefaultReporter>::default());
let output_target = if json {
reporters.add(Box::<JsonReporter>::default());
OutputTarget::stderr()
} else {
OutputTarget::stdout()
};
reporters.add(Box::new(SummaryReporter::new(detail_level, output_target)));
let mut context = TestRunContext {
filter: filter.map(|s| s.to_string()),
reporter: &mut reporters,
pool: &yastl::Pool::new(
std::thread::available_parallelism()
.map_or(2, usize::from)
.max(2),
),
};
let mut ran_any_tests = false;
for test_suite in get_test_suites(suites) {
let result = run_test_suite(test_suite.as_ref(), &mut context);
ran_any_tests = ran_any_tests || result.summary.tests_ran > 0
}
reporters.run_completed();
if !ran_any_tests {
std::process::exit(1);
}
}
const ALL_SUITES: &str = "*";
const ALL_JS_SUITES: &str = "js";
const ALL_TS_SUITES: &str = "ts";
const ALL_JSX_SUITES: &str = "jsx";
const ALL_SYMBOLS_SUITES: &str = "symbols";
fn get_test_suites(suites: Option<&str>) -> Vec<Box<dyn TestSuite>> {
let suites = suites.unwrap_or("*").to_lowercase();
let mut ids: Vec<_> = suites.split(',').collect();
let mut suites: Vec<Box<dyn TestSuite>> = vec![];
while let Some(id) = ids.pop() {
match id {
ALL_JS_SUITES | "javascript" => ids.extend(["js/262"]),
ALL_TS_SUITES | "typescript" => ids.extend(["ts/microsoft", "ts/babel"]),
ALL_JSX_SUITES => ids.extend(["jsx/babel"]),
ALL_SYMBOLS_SUITES => ids.extend(["symbols/microsoft"]),
ALL_SUITES => ids.extend(["js", "ts", "jsx", "symbols"]),
"js/262" => suites.push(Box::new(Test262TestSuite)),
"ts/microsoft" => suites.push(Box::new(MicrosoftTypescriptTestSuite)),
"ts/babel" => suites.push(Box::new(BabelTypescriptTestSuite)),
"jsx/babel" => suites.push(Box::new(BabelJsxTestSuite)),
"symbols/microsoft" => suites.push(Box::new(SymbolsMicrosoftTestSuite)),
_ => {}
}
}
suites
}
fn check_file_encoding(path: &std::path::Path) -> Option<String> {
let buffer = std::fs::read(path).unwrap();
decode_maybe_utf16_string(&buffer)
.ok()
.map(|decoded| decoded.to_string())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/runner.rs | xtask/coverage/src/runner.rs | use super::*;
use crate::reporters::TestReporter;
use rome_diagnostics::console::fmt::{Formatter, Termcolor};
use rome_diagnostics::console::markup;
use rome_diagnostics::termcolor::Buffer;
use rome_diagnostics::Error;
use rome_diagnostics::PrintDiagnostic;
use rome_js_parser::{parse, JsParserOptions, Parse};
use rome_js_syntax::{AnyJsRoot, JsFileSource, JsSyntaxNode};
use rome_rowan::SyntaxKind;
use std::fmt::Debug;
use std::panic::RefUnwindSafe;
use std::path::Path;
use walkdir::WalkDir;
use yastl::Pool;
pub(crate) struct TestRunResult {
pub(crate) outcome: TestRunOutcome,
pub(crate) test_case: String,
}
pub(crate) enum TestRunOutcome {
Passed(TestCaseFiles),
IncorrectlyPassed(TestCaseFiles),
IncorrectlyErrored {
files: TestCaseFiles,
errors: Vec<ParseDiagnostic>,
},
Panicked(Box<dyn Any + Send + 'static>),
}
impl TestRunOutcome {
pub fn is_failed(&self) -> bool {
!matches!(self, TestRunOutcome::Passed(_))
}
pub fn files(&self) -> Option<&TestCaseFiles> {
match self {
TestRunOutcome::Passed(files)
| TestRunOutcome::IncorrectlyPassed(files)
| TestRunOutcome::IncorrectlyErrored { files, .. } => Some(files),
_ => None,
}
}
pub fn panic_message(&self) -> Option<&str> {
match self {
TestRunOutcome::Panicked(panic) => panic
.downcast_ref::<String>()
.map(|s| s.as_str())
.or_else(|| panic.downcast_ref::<&'static str>().copied()),
_ => None,
}
}
}
impl From<TestRunOutcome> for Outcome {
fn from(run_outcome: TestRunOutcome) -> Self {
match run_outcome {
TestRunOutcome::Passed(_) => Outcome::Passed,
TestRunOutcome::IncorrectlyPassed(_) | TestRunOutcome::IncorrectlyErrored { .. } => {
Outcome::Failed
}
TestRunOutcome::Panicked(_) => Outcome::Panicked,
}
}
}
/// A test case may parse multiple files. Represents a single file of a test case
#[derive(Debug, Clone)]
pub(crate) struct TestCaseFile {
/// The (test case relative) name of the file
name: String,
/// The code of the file
code: String,
/// The source type used to parse the file
source_type: JsFileSource,
options: JsParserOptions,
}
impl TestCaseFile {
pub(crate) fn parse(&self) -> Parse<AnyJsRoot> {
parse(&self.code, self.source_type, self.options.clone())
}
pub(crate) fn name(&self) -> &str {
&self.name
}
pub(crate) fn code(&self) -> &str {
&self.code
}
}
pub(crate) fn create_bogus_node_in_tree_diagnostic(node: JsSyntaxNode) -> ParseDiagnostic {
assert!(node.kind().is_bogus());
ParseDiagnostic::new(
"There are no parse errors but the parsed tree contains bogus nodes.",
node.text_trimmed_range()
)
.hint( "This bogus node is present in the parsed tree but the parser didn't emit a diagnostic for it. Change the parser to either emit a diagnostic, fix the grammar, or the parsing.")
}
#[derive(Clone, Debug)]
pub(crate) struct TestCaseFiles {
files: Vec<TestCaseFile>,
}
impl TestCaseFiles {
pub(crate) fn single(
name: String,
code: String,
source_type: JsFileSource,
options: JsParserOptions,
) -> Self {
Self {
files: vec![TestCaseFile {
name,
code,
source_type,
options,
}],
}
}
pub(crate) fn new() -> Self {
Self { files: vec![] }
}
pub(crate) fn add(
&mut self,
name: String,
code: String,
source_type: JsFileSource,
options: JsParserOptions,
) {
self.files.push(TestCaseFile {
name,
code,
source_type,
options,
})
}
pub(crate) fn is_empty(&self) -> bool {
self.files.is_empty()
}
pub(crate) fn emit_errors(&self, errors: &[Error], buffer: &mut Buffer) {
for error in errors {
if let Err(err) = Formatter::new(&mut Termcolor(&mut *buffer)).write_markup(markup! {
{PrintDiagnostic::verbose(error)}
}) {
eprintln!("Failed to print diagnostic: {}", err);
}
}
}
}
impl<'a> IntoIterator for &'a TestCaseFiles {
type Item = &'a TestCaseFile;
type IntoIter = std::slice::Iter<'a, TestCaseFile>;
fn into_iter(self) -> Self::IntoIter {
self.files.iter()
}
}
pub(crate) trait TestCase: RefUnwindSafe + Send + Sync {
/// Returns the (display) name of the test case. Used to uniquely identify the test case
fn name(&self) -> &str;
/// Runs the test case
fn run(&self) -> TestRunOutcome;
}
pub(crate) trait TestSuite: Send + Sync {
fn name(&self) -> &str;
fn base_path(&self) -> &str;
fn is_test(&self, path: &Path) -> bool;
fn load_test(&self, path: &Path) -> Option<Box<dyn TestCase>>;
}
pub(crate) struct TestSuiteInstance {
name: String,
tests: Vec<Box<dyn TestCase>>,
}
impl TestSuiteInstance {
pub fn new(suite: &dyn TestSuite, tests: Vec<Box<dyn TestCase>>) -> Self {
Self {
tests,
name: suite.name().to_string(),
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn len(&self) -> usize {
self.tests.len()
}
pub fn iter(&self) -> impl Iterator<Item = &Box<dyn TestCase>> {
self.tests.iter()
}
}
pub(crate) struct TestRunContext<'a> {
pub(crate) filter: Option<String>,
pub(crate) reporter: &'a mut dyn TestReporter,
pub(crate) pool: &'a Pool,
}
pub(crate) fn run_test_suite(
test_suite: &dyn TestSuite,
context: &mut TestRunContext,
) -> TestResults {
context.reporter.test_suite_started(test_suite);
let instance = load_tests(test_suite, context);
context.reporter.test_suite_run_started(&instance);
std::panic::set_hook(Box::new(|info| {
use std::io::Write;
let backtrace = backtrace::Backtrace::default();
let mut stacktrace = vec![];
// Skip frames inside the backtrace lib
for frame in backtrace.frames().iter().skip(6) {
if let Some(s) = frame.symbols().get(0) {
if let Some(file) = s.filename() {
// We don't care about std or cargo registry libs
let file_path = file.as_os_str().to_str().unwrap();
if file_path.starts_with("/rustc") || file_path.contains(".cargo") {
continue;
}
let _ = write!(stacktrace, "{}", file.display());
} else if let Some(name) = s.name().and_then(|x| x.as_str()) {
let _ = write!(stacktrace, "{}", name);
} else {
let _ = write!(stacktrace, "<unknown>");
}
match (s.lineno(), s.colno()) {
(Some(line), Some(col)) => {
let _ = write!(stacktrace, " @ line {} col {}", line, col);
}
(Some(line), None) => {
let _ = write!(stacktrace, " @ line {}", line);
}
(None, Some(col)) => {
let _ = write!(stacktrace, " @ col {}", col);
}
_ => {}
}
let _ = writeln!(stacktrace);
}
}
let stacktrace = String::from_utf8(stacktrace).unwrap();
let mut msg = vec![];
let _ = write!(msg, "{}", info);
let msg = String::from_utf8(msg).unwrap();
tracing::error!(
panic = msg.as_str(),
stacktrace = stacktrace.as_str(),
"Test panicked"
);
}));
let mut test_results = TestResults::new();
let (tx, rx) = std::sync::mpsc::channel();
context.pool.scoped(|scope| {
scope.execute(|| {
let mut results: Vec<TestResult> = Vec::with_capacity(instance.len());
for result in rx {
context.reporter.test_completed(&result);
results.push(TestResult {
test_case: result.test_case,
outcome: result.outcome.into(),
});
}
test_results.store_results(results);
});
for test in instance.iter() {
let tx = tx.clone();
scope.execute(move || {
let test_ref = test.as_ref();
let outcome = match std::panic::catch_unwind(|| test_ref.run()) {
Ok(result) => result,
Err(panic) => {
let error = panic
.downcast_ref::<String>()
.map(|x| x.to_string())
.or_else(|| panic.downcast_ref::<&str>().map(|x| x.to_string()))
.unwrap_or_default();
tracing::error!(
panic = error.as_str(),
name = test.name(),
"Test panicked"
);
TestRunOutcome::Panicked(panic)
}
};
tx.send(TestRunResult {
test_case: test.name().to_owned(),
outcome,
})
.unwrap();
});
}
drop(tx);
});
context
.reporter
.test_suite_completed(&instance, &test_results);
let _ = std::panic::take_hook();
test_results
}
fn load_tests(suite: &dyn TestSuite, context: &mut TestRunContext) -> TestSuiteInstance {
let paths = WalkDir::new(suite.base_path())
.into_iter()
.filter_map(Result::ok)
.filter(|file| {
let path = file.path();
if !path.is_file() {
return false;
}
if !suite.is_test(path) {
return false;
}
if let Some(filter) = &context.filter {
let normalized_path = path.to_string_lossy().replace('\\', "/");
let normalized_query = filter.replace('\\', "/");
normalized_path.contains(&normalized_query)
} else {
true
}
})
.map(|file| file.path().to_owned())
.collect::<Vec<_>>();
context.reporter.tests_discovered(suite, paths.len());
let (tx, rx) = std::sync::mpsc::channel();
let mut tests: Vec<Box<dyn TestCase>> = Vec::with_capacity(paths.len());
context.pool.scoped(|scope| {
scope.execute(|| {
for test in rx {
context.reporter.test_loaded();
if let Some(test) = test {
tests.push(test);
}
}
});
for path in paths {
let tx = tx.clone();
scope.execute(move || {
tx.send(suite.load_test(&path)).unwrap();
});
}
drop(tx);
});
TestSuiteInstance::new(suite, tests)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/results.rs | xtask/coverage/src/results.rs | use ascii_table::{Align, AsciiTable};
use std::collections::{HashMap, HashSet};
use crate::{Outcome, TestResult, TestResults};
pub fn emit_compare(
base_results: &TestResults,
new_results: &TestResults,
test_suite: &str,
markdown: bool,
) {
let base_total = base_results.summary.tests_ran as isize;
let new_total = new_results.summary.tests_ran as isize;
let total_diff = new_total - base_total;
let base_passed = base_results.summary.passed as isize;
let new_passed = new_results.summary.passed as isize;
let passed_diff = new_passed - base_passed;
let base_failed = base_results.summary.failed as isize;
let new_failed = new_results.summary.failed as isize;
let failed_diff = new_failed - base_failed;
let base_panics = base_results.summary.panics as isize;
let new_panics = new_results.summary.panics as isize;
let panics_diff = new_panics - base_panics;
let base_coverage = base_results.summary.coverage;
let new_coverage = new_results.summary.coverage;
let coverage_diff = new_coverage - base_coverage;
let report_diff = compare_diffs(base_results, new_results);
if markdown {
/// Generates a proper diff format, with some bold text if things change.
fn diff_format(diff: isize, i_am_passed_results: bool, show_increase: bool) -> String {
let good = "✅ ";
let bad = "❌ ";
let up = "⏫ ";
let down = "⏬ ";
let emoji = if show_increase {
match diff.cmp(&0) {
std::cmp::Ordering::Less => {
if i_am_passed_results {
format!("{}{}", bad, down)
} else {
format!("{}{}", good, down)
}
}
std::cmp::Ordering::Equal => String::new(),
std::cmp::Ordering::Greater => {
if i_am_passed_results {
format!("{}{}", good, up)
} else {
format!("{}{}", bad, up)
}
}
}
} else {
String::new()
};
format!(
"{}{}{}{}{}",
emoji,
if diff != 0 { "**" } else { "" },
if diff > 0 { "+" } else { "" },
diff,
if diff != 0 { "**" } else { "" }
)
}
println!("\n### {}\n", test_suite);
println!("| Test result | `main` count | This PR count | Difference |");
println!("| :---------: | :----------: | :-----------: | :--------: |");
println!(
"| Total | {} | {} | {} |",
base_total,
new_total,
diff_format(total_diff, false, false)
);
println!(
"| Passed | {} | {} | {} |",
base_passed,
new_passed,
diff_format(passed_diff, true, true)
);
println!(
"| Failed | {} | {} | {} |",
base_failed,
new_failed,
diff_format(failed_diff, false, true)
);
println!(
"| Panics | {} | {} | {} |",
base_panics,
new_panics,
diff_format(panics_diff, false, true)
);
println!(
"| Coverage | {:.2}% | {:.2}% | {} |",
base_coverage,
new_coverage,
format_args!(
"{}{}{:.2}%{}",
if coverage_diff.abs() > f64::EPSILON {
"**"
} else {
""
},
if coverage_diff > 0_f64 { "+" } else { "" },
coverage_diff,
if coverage_diff.abs() > f64::EPSILON {
"**"
} else {
""
},
),
);
fn summary(title: &str, tests: &[&TestResult]) {
if !tests.is_empty() {
println!();
println!(
"<details><summary><b>{} ({}):</b></summary>",
title,
tests.len()
);
println!("\n```");
let mut test_cases = tests.iter().map(|test| &test.test_case).collect::<Vec<_>>();
test_cases.sort_unstable();
for test_case in test_cases {
println!("{}", test_case);
}
println!("```");
println!("</details>");
}
}
summary(":fire: Regression", &report_diff.regression);
summary(":tada: Fixed", &report_diff.fixed);
summary(":boom: Failed to Panic", &report_diff.failed_to_panic);
summary(
":interrobang: Panic To Failed",
&report_diff.panic_to_failed,
);
summary(":heavy_plus_sign: Added Tests", &report_diff.added_tests);
summary(
":heavy_minus_sign: Removed Tests",
&report_diff.removed_tests,
);
} else {
let mut table = AsciiTable::default();
println!("{} conformance changes:", test_suite);
table
.column(0)
.set_header("Tests result")
.set_align(Align::Left);
table
.column(1)
.set_header("main branch")
.set_align(Align::Right);
table.column(2).set_header("PR").set_align(Align::Right);
table
.column(3)
.set_header("Difference")
.set_align(Align::Right);
let passed_diff = base_passed - new_passed;
let failed_diff = base_failed - new_failed;
let panics_diff = base_panics - new_panics;
let passed_row: Vec<&dyn std::fmt::Display> =
vec![&"Passed", &base_passed, &new_passed, &passed_diff];
let failed_row: Vec<&dyn std::fmt::Display> =
vec![&"Failed", &base_failed, &new_failed, &failed_diff];
let panics_row: Vec<&dyn std::fmt::Display> =
vec![&"Panics", &base_panics, &new_panics, &panics_diff];
table.print(vec![passed_row, failed_row, panics_row]);
}
}
struct ReportDiff<'a> {
pub regression: Vec<&'a TestResult>,
pub fixed: Vec<&'a TestResult>,
pub failed_to_panic: Vec<&'a TestResult>,
pub panic_to_failed: Vec<&'a TestResult>,
pub added_tests: Vec<&'a TestResult>,
pub removed_tests: Vec<&'a TestResult>,
}
impl<'a> ReportDiff<'a> {
pub fn new() -> Self {
Self {
regression: vec![],
fixed: vec![],
failed_to_panic: vec![],
panic_to_failed: vec![],
added_tests: vec![],
removed_tests: vec![],
}
}
}
fn compare_diffs<'a>(
base_results: &'a TestResults,
new_results: &'a TestResults,
) -> ReportDiff<'a> {
let mut report_diff = ReportDiff::new();
let mut all_test_cases: HashSet<&str> = HashSet::new();
let mut base_by_test_case: HashMap<&str, &TestResult> = HashMap::new();
for detail in base_results.details.iter() {
all_test_cases.insert(&detail.test_case);
base_by_test_case.insert(&detail.test_case, detail);
}
let mut new_by_test_case: HashMap<&str, &TestResult> = HashMap::new();
for detail in new_results.details.iter() {
all_test_cases.insert(&detail.test_case);
new_by_test_case.insert(&detail.test_case, detail);
}
for path in all_test_cases {
let base_result = base_by_test_case.get(path);
let new_result = new_by_test_case.get(path);
match (base_result, new_result) {
(None, Some(new)) => {
report_diff.added_tests.push(new);
}
(Some(base), None) => {
report_diff.removed_tests.push(base);
}
(Some(base), Some(new)) => {
match (&base.outcome, &new.outcome) {
(Outcome::Passed, Outcome::Failed | Outcome::Panicked) => {
report_diff.regression.push(new)
}
(Outcome::Failed | Outcome::Panicked, Outcome::Passed) => {
report_diff.fixed.push(new)
}
(Outcome::Failed, Outcome::Panicked) => report_diff.failed_to_panic.push(new),
(Outcome::Panicked, Outcome::Failed) => report_diff.panic_to_failed.push(new),
// we want to ignore cases where both results yield the same enum
// this means that their status hasn't changed, not worth tracking
_ => {}
}
}
_ => unreachable!(),
}
}
report_diff
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/util.rs | xtask/coverage/src/util.rs | use std::borrow::Cow;
use std::char::{decode_utf16, DecodeUtf16Error};
pub(crate) fn decode_maybe_utf16_string(mut content: &[u8]) -> Result<Cow<str>, DecodeUtf16Error> {
enum FileEncoding {
Unknown,
Utf8,
Utf16Le,
Utf16Be,
}
let mut encoding = FileEncoding::Unknown;
// Read the BOM if present and skip it
let bom = content.get(0..3);
if let Some(&[0xef, 0xbb, 0xbf]) = bom {
content = &content[3..];
encoding = FileEncoding::Utf8;
} else if let Some(&[0xfe, 0xff, _]) = bom {
content = &content[2..];
encoding = FileEncoding::Utf16Be;
} else if let Some(&[0xff, 0xfe, _]) = bom {
content = &content[2..];
encoding = FileEncoding::Utf16Le;
}
if matches!(encoding, FileEncoding::Unknown | FileEncoding::Utf8) {
// Attempt to parse as UTF-8
let result = std::str::from_utf8(content);
if let FileEncoding::Utf8 = encoding {
// If the file is known to be UTF-8 unwrap the result
return Ok(Cow::Borrowed(result.unwrap()));
} else if let Ok(result) = result {
// Otherwise, only return if the parsing was successful
return Ok(Cow::Borrowed(result));
}
}
// If a UTF-16 BOM was found or an error was encountered, attempt to parse as UTF-16
let content_str = decode_utf16(content.chunks(2).map(|bytes| match encoding {
FileEncoding::Utf16Be => u16::from_be_bytes([bytes[0], bytes[1]]),
FileEncoding::Utf16Le => u16::from_le_bytes([bytes[0], bytes[1]]),
// If the encoding is unknown attempt to decode in native endianness
FileEncoding::Unknown => u16::from_ne_bytes([bytes[0], bytes[1]]),
FileEncoding::Utf8 => unreachable!(),
}))
.collect::<Result<String, _>>()?;
Ok(Cow::Owned(content_str))
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/main.rs | xtask/coverage/src/main.rs | use pico_args::Arguments;
use xtask::{project_root, pushd, Result};
use xtask_coverage::{compare::coverage_compare, run, SummaryDetailLevel};
fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_writer(std::io::stderr)
.init();
let _d = pushd(project_root());
let mut args = Arguments::from_env();
let sub_command = args.subcommand()?;
if sub_command.as_deref() == Some("compare") {
// on pr branch, run
// git checkout main
// cargo coverage js --json > base_results.json
// git checkout -
// cargo coverage js --json > new_results.json
// cargo coverage compare ./base_results.json ./new_results.json --markdown
let markdown = args.contains("--markdown");
let free = args.finish();
let base_result_path = free.get(0).and_then(|arg| arg.to_str());
let new_result_path = free.get(1).and_then(|arg| arg.to_str());
coverage_compare(base_result_path, new_result_path, markdown);
return Ok(());
}
if args.contains("--help") {
eprintln!(
"\
cargo coverage
Run coverage command.
USAGE:
cargo coverage <SUBCOMMAND> [option]
SUBCOMMANDS:
compare Compares output between two --json outputs
OPTIONS
--markdown Emits supported output into markdown format. Supported by `compare` subcommand.
--json Prints the test results in JSON. This mode will send all other test output and user messages to stderr.
--detailed=[debug] Prints a detailed summary at the end for all failing tests. Includes in depth details if set to `debug`.
--suites=<IDS> Runs the specified tests suites. Use comma as separator.
Valid values are:
*: will run all suites
js: will run all javascript suites; Same as \"js/262\";
ts: will run all typescript suites; Same as \"ts/microsoft,ts/babel\";
jsx: will run all jsx suites; Same as \"jsx/babel\";
js/262: will run https://github.com/tc39/test262/tree/main/test;
ts/microsoft: will run https://github.com/microsoft/Typescript/tree/main/tests/cases
ts/babel: will run https://github.com/babel/babel/tree/main/packages/babel-parser/test/fixtures/typescript
jsx/babel: will run https://github.com/babel/babel/tree/main/packages/babel-parser/test/fixtures/jsx/basic
Default is \"*\".
--filter=<file> Filters out tests that don't match the query.
--help Prints this help.
"
);
return Ok(());
}
let json = args.contains("--json");
let suites: Option<String> = args.opt_value_from_str("--suites").unwrap();
let filter: Option<String> = args.opt_value_from_str("--filter").unwrap();
let detail_level: Option<SummaryDetailLevel> =
args.opt_value_from_str("--detailed").unwrap_or_else(|_| {
if args.contains("--detailed") {
Some(SummaryDetailLevel::Failing)
} else {
Some(SummaryDetailLevel::Coverage)
}
});
args.finish();
run(
suites.as_deref(),
filter.as_deref(),
json,
detail_level.unwrap_or(SummaryDetailLevel::Coverage),
);
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/reporters.rs | xtask/coverage/src/reporters.rs | use crate::runner::{TestCaseFiles, TestRunOutcome, TestRunResult, TestSuite, TestSuiteInstance};
use crate::{Summary, TestResults};
use ascii_table::{Align, AsciiTable};
use atty::Stream;
use colored::Colorize;
use indicatif::ProgressBar;
use rome_diagnostics::termcolor::Buffer;
use rome_diagnostics::{DiagnosticExt, Error};
use serde_json::Value;
use std::collections::HashMap;
use std::io::Write;
use std::str::FromStr;
use std::time::Instant;
pub(crate) trait TestReporter: Send + Sync {
/// Called at the beginning of processing a test suite
fn test_suite_started(&mut self, _suite: &dyn TestSuite) {}
/// Called after all potential tests have been discovered for a test suite
fn tests_discovered(&mut self, _suite: &dyn TestSuite, _count: usize) {}
/// Called after loading a single test of a test suite
fn test_loaded(&mut self) {}
/// Called after all tests for a test suite have been loaded, right before the runner executes the tests
fn test_suite_run_started(&mut self, _suite: &TestSuiteInstance) {}
/// A test run completed
fn test_completed(&mut self, _result: &TestRunResult) {}
/// A test suite completed
fn test_suite_completed(&mut self, _suite: &TestSuiteInstance, _result: &TestResults) {}
/// Called when all test suites have completed
fn run_completed(&mut self) {}
}
/// Prints a progress bar showing which tests are executed
pub(crate) struct DefaultReporter {
pb: ProgressBar,
start: Instant,
}
impl Default for DefaultReporter {
fn default() -> Self {
Self {
pb: ProgressBar::hidden(),
start: Instant::now(),
}
}
}
impl TestReporter for DefaultReporter {
fn test_suite_started(&mut self, _suite: &dyn TestSuite) {
self.start = Instant::now();
}
fn tests_discovered(&mut self, _suite: &dyn TestSuite, count: usize) {
self.pb.finish_and_clear();
let pb = ProgressBar::new(count as u64);
pb.set_message(format!("{} test files", "Loading".bold().cyan()));
pb.set_style(
indicatif::ProgressStyle::with_template("{msg} [{bar:40}]")
.unwrap()
.progress_chars("=> "),
);
self.pb = pb;
}
fn test_loaded(&mut self) {
self.pb.inc(1);
}
fn test_suite_run_started(&mut self, suite: &TestSuiteInstance) {
self.pb.finish_and_clear();
self.pb.println(format!(
"{} {} test files in {:.2}s",
"Loaded".bold().bright_green(),
suite.len(),
self.start.elapsed().as_secs_f32()
));
self.pb = ProgressBar::new(suite.len() as u64)
.with_message(format!("{} tests", "Running".bold().cyan()));
self.start = Instant::now();
}
fn test_completed(&mut self, result: &TestRunResult) {
self.pb.inc(1);
let label = match &result.outcome {
TestRunOutcome::Passed(_) => "PASS".bold().green(),
TestRunOutcome::IncorrectlyPassed(_)
| TestRunOutcome::IncorrectlyErrored { .. }
| TestRunOutcome::Panicked(_) => "FAIL".bold().red(),
};
self.pb
.println(format!("{} {}", label, result.test_case.blue()));
}
fn test_suite_completed(&mut self, suite: &TestSuiteInstance, _results: &TestResults) {
self.pb.finish_and_clear();
self.pb.println(format!(
"{}: {} {} tests in {:.2}s",
suite.name(),
"Ran".bold().bright_green(),
suite.len(),
self.start.elapsed().as_secs_f32()
));
}
}
pub enum SummaryDetailLevel {
/// Only prints the coverage table
Coverage,
/// Prints the coverage table as well as all failing tests with their diagnostics
Failing,
/// Prints the RAST of the parsed syntax and the diagnostics for all tests (including tests that pass with expected diagnostics).
Debug,
}
impl FromStr for SummaryDetailLevel {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"coverage" => SummaryDetailLevel::Coverage,
"failing" => SummaryDetailLevel::Failing,
"debug" => SummaryDetailLevel::Debug,
_ => return Err(String::from(
"Unknown summary detail level. Valid values are: 'coverage', 'failing, and 'rast'.",
)),
})
}
}
impl SummaryDetailLevel {
fn is_coverage_only(&self) -> bool {
matches!(self, SummaryDetailLevel::Coverage)
}
fn is_debug(&self) -> bool {
matches!(self, SummaryDetailLevel::Debug)
}
}
/// Reporter that prints a summary for each phase (tests loaded, test suite completed) to the console output
pub(crate) struct SummaryReporter {
/// Buffer to store the detailed output of tests
buffer: Buffer,
/// The results of the ran test suites
results: HashMap<String, Summary>,
output_target: OutputTarget,
detail_level: SummaryDetailLevel,
}
pub(crate) enum OutputTarget {
Stdout(std::io::Stdout),
Stderr(std::io::Stderr),
}
impl OutputTarget {
pub fn stdout() -> Self {
OutputTarget::Stdout(std::io::stdout())
}
pub fn stderr() -> Self {
OutputTarget::Stderr(std::io::stderr())
}
}
impl Write for OutputTarget {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
match self {
OutputTarget::Stderr(stderr) => stderr.write(buf),
OutputTarget::Stdout(stdout) => stdout.write(buf),
}
}
fn flush(&mut self) -> std::io::Result<()> {
match self {
OutputTarget::Stderr(stderr) => stderr.flush(),
OutputTarget::Stdout(stdout) => stdout.flush(),
}
}
}
impl SummaryReporter {
pub fn new(detail_level: SummaryDetailLevel, output_target: OutputTarget) -> Self {
let buffer = if atty::is(Stream::Stdout) {
Buffer::ansi()
} else {
// piping to a file
Buffer::no_color()
};
Self {
results: HashMap::default(),
buffer,
output_target,
detail_level,
}
}
fn writeln(&mut self, msg: &str) {
writeln!(self.buffer, "{}", msg).unwrap();
}
fn summary_table(results: HashMap<String, Summary>) -> String {
let mut table = AsciiTable::default();
let has_multiple_test_suites = results.len() > 1;
let offset = if has_multiple_test_suites {
table
.column(0)
.set_header("Test suite")
.set_align(Align::Left);
1
} else {
0
};
table
.column(offset)
.set_header("Tests ran".to_string())
.set_align(Align::Right);
table
.column(offset + 1)
.set_header("Passed")
.set_align(Align::Right);
table
.column(offset + 2)
.set_header("Failed")
.set_align(Align::Right);
table
.column(offset + 3)
.set_header("Panics")
.set_align(Align::Right);
table
.column(offset + 4)
.set_header("Coverage")
.set_align(Align::Right);
let mut results: Vec<_> = results.into_iter().collect();
results.sort_by(|(l, _), (r, _)| l.cmp(r));
let rows = results.into_iter().map(|(suite, summary)| {
let panicked = summary.panics;
let errored = summary.failed;
let passed = summary.passed;
let coverage = if summary.coverage.is_nan() {
"\u{221E}".to_string()
} else {
format!("{:.2}", summary.coverage)
};
let total = panicked + errored + passed;
let mut values = if has_multiple_test_suites {
vec![suite]
} else {
Vec::default()
};
values.extend([
total.to_string(),
passed.to_string(),
errored.to_string(),
panicked.to_string(),
coverage,
]);
values
});
table.format(rows)
}
fn write_errors(&mut self, errors: &[Error], files: &TestCaseFiles) {
files.emit_errors(errors, &mut self.buffer);
self.writeln("");
}
}
impl TestReporter for SummaryReporter {
fn test_completed(&mut self, result: &TestRunResult) {
if self.detail_level.is_coverage_only() {
return;
}
match &result.outcome {
TestRunOutcome::Passed(files) => {
if self.detail_level.is_debug() {
self.writeln(&format!("{} {}", "[PASS]".bold().green(), result.test_case));
let mut all_errors = Vec::new();
for file in files {
if let Some(errors) = file.parse().ok().err() {
all_errors.extend(errors.into_iter().map(|error| {
error
.with_file_path(file.name())
.with_file_source_code(file.code())
}));
}
}
if !all_errors.is_empty() {
self.write_errors(&all_errors, files);
}
}
}
TestRunOutcome::Panicked(_) => {
let panic = result.outcome.panic_message();
self.writeln(&format!(
"{} {}: {}",
"[PANIC]".bold().red(),
result.test_case,
panic.unwrap_or("Unknown panic reason")
));
}
TestRunOutcome::IncorrectlyPassed(_) => {
self.writeln(&format!(
"{} {}: Incorrectly passed",
"[FAIL]".bold().red(),
result.test_case
));
}
TestRunOutcome::IncorrectlyErrored { errors, files } => {
self.writeln(&format!(
"{} {}: Incorrectly errored:",
"[FAIL]".bold().red(),
result.test_case
));
let errors: Vec<_> = errors
.iter()
.map(|diagnostic| Error::from(diagnostic.clone()))
.collect();
self.write_errors(&errors, files);
}
}
if self.detail_level.is_debug() {
if let Some(files) = result.outcome.files() {
for file in files {
let program = file.parse();
self.writeln(&format!(
"RAST Output for {}:\n{:#?}\n",
&file.name().bold(),
program.syntax()
));
}
}
}
}
fn test_suite_completed(&mut self, suite: &TestSuiteInstance, results: &TestResults) {
self.results
.insert(suite.name().to_string(), results.summary.clone());
}
fn run_completed(&mut self) {
let results = std::mem::take(&mut self.results);
let table = Self::summary_table(results);
self.output_target
.write_all(self.buffer.as_slice())
.unwrap();
writeln!(self.output_target, "{}", table).unwrap();
}
}
#[derive(Default)]
pub(crate) struct JsonReporter {
results: HashMap<String, Value>,
}
impl TestReporter for JsonReporter {
fn test_suite_completed(&mut self, suite: &TestSuiteInstance, result: &TestResults) {
self.results.insert(
suite.name().to_string(),
serde_json::to_value(result).unwrap(),
);
}
fn run_completed(&mut self) {
let results = std::mem::take(&mut self.results);
println!("{}", serde_json::to_string(&results).unwrap());
}
}
/// Test reporter that forwards the event to multiple reporters.
/// Allows composing different reporters for a single test run
pub(crate) struct MulticastTestReporter(Vec<Box<dyn TestReporter>>);
impl MulticastTestReporter {
pub fn new(reporter: Box<dyn TestReporter>) -> Self {
Self(vec![reporter])
}
pub fn add(&mut self, reporter: Box<dyn TestReporter>) {
self.0.push(reporter);
}
}
impl TestReporter for MulticastTestReporter {
fn test_suite_started(&mut self, test_suite: &dyn TestSuite) {
for reporter in &mut self.0 {
reporter.test_suite_started(test_suite);
}
}
fn tests_discovered(&mut self, test_suite: &dyn TestSuite, count: usize) {
for reporter in &mut self.0 {
reporter.tests_discovered(test_suite, count);
}
}
fn test_loaded(&mut self) {
for reporter in &mut self.0 {
reporter.test_loaded();
}
}
fn test_suite_run_started(&mut self, suite: &TestSuiteInstance) {
for reporter in &mut self.0 {
reporter.test_suite_run_started(suite);
}
}
fn test_completed(&mut self, result: &TestRunResult) {
for reporter in &mut self.0 {
reporter.test_completed(result);
}
}
fn test_suite_completed(&mut self, suite: &TestSuiteInstance, result: &TestResults) {
for reporter in &mut self.0 {
reporter.test_suite_completed(suite, result);
}
}
fn run_completed(&mut self) {
for reporter in &mut self.0 {
reporter.run_completed();
}
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/compare.rs | xtask/coverage/src/compare.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use xtask::project_root;
use crate::results::emit_compare;
use crate::util::decode_maybe_utf16_string;
use crate::TestResults;
// this is the filename of the results coming from `main` branch
const BASE_RESULT_FILE: &str = "base_results.json";
// this is the filename of the results coming from the current PR
const NEW_RESULT_FILE: &str = "new_results.json";
pub fn coverage_compare(
base_result_path: Option<&str>,
new_result_path: Option<&str>,
markdown: bool,
) {
// resolve the path passed as argument, or retrieve the default one
let base_result_dir = if let Some(base_result_path) = base_result_path {
PathBuf::from(base_result_path)
} else {
project_root().join(BASE_RESULT_FILE)
};
// resolve the path passed as argument, or retrieve the default one
let new_result_dir = if let Some(new_result_path) = new_result_path {
PathBuf::from(new_result_path)
} else {
project_root().join(NEW_RESULT_FILE)
};
if !base_result_dir.exists() {
panic!(
"The path to the base results doesn't exist: {:?}",
base_result_dir
);
}
if !&new_result_dir.exists() {
panic!(
"The path to the new results doesn't exist: {:?}",
new_result_dir
);
}
let mut base_results = read_test_results(base_result_dir.as_path(), "base")
.into_iter()
.collect::<Vec<_>>();
// Sort suite names to get a stable result in CI comments
base_results.sort_unstable_by(|(suite_a, _), (suite_b, _)| suite_a.cmp(suite_b));
let mut new_results = read_test_results(new_result_dir.as_path(), "new");
for (suite, base) in base_results.into_iter() {
let new_result = new_results.remove(&suite).unwrap_or_else(TestResults::new);
emit_compare(&base, &new_result, suite.as_str(), markdown);
}
for (suite, new) in new_results.drain() {
emit_compare(&TestResults::new(), &new, suite.as_str(), markdown);
}
}
fn read_test_results(path: &Path, name: &'static str) -> HashMap<String, TestResults> {
let mut file = File::open(path)
.unwrap_or_else(|err| panic!("Can't read the file of the {} results: {:?}", name, err));
let mut buffer = Vec::new();
file.read_to_end(&mut buffer)
.unwrap_or_else(|err| panic!("Can't read the file of the {} results: {:?}", name, err));
let content = decode_maybe_utf16_string(&buffer)
.unwrap_or_else(|err| panic!("Can't read the file of the {} results: {:?}", name, err));
serde_json::from_str(&content).unwrap_or_else(|err| {
panic!(
"Can't parse the JSON file of the {} results: {:?}",
name, err
)
})
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/jsx/jsx_babel.rs | xtask/coverage/src/jsx/jsx_babel.rs | use crate::runner::create_bogus_node_in_tree_diagnostic;
use crate::{
check_file_encoding,
runner::{TestCase, TestCaseFiles, TestRunOutcome, TestSuite},
};
use rome_js_parser::{parse, JsParserOptions};
use rome_js_syntax::{JsFileSource, ModuleKind};
use rome_rowan::SyntaxKind;
use std::path::Path;
const OK_PATH: &str = "xtask/coverage/babel/packages/babel-parser/test/fixtures/jsx/basic";
struct BabelJsxTestCase {
name: String,
code: String,
}
impl BabelJsxTestCase {
fn new(path: &Path, code: String) -> Self {
let name = path
.components()
.rev()
.nth(1)
.and_then(|x| x.as_os_str().to_str())
.unwrap_or("")
.to_string();
Self { name, code }
}
}
impl TestCase for BabelJsxTestCase {
fn name(&self) -> &str {
&self.name
}
fn run(&self) -> TestRunOutcome {
let source_type = JsFileSource::jsx().with_module_kind(ModuleKind::Script);
let options = JsParserOptions::default().with_parse_class_parameter_decorators();
let files = TestCaseFiles::single(
self.name().to_string(),
self.code.clone(),
source_type,
options.clone(),
);
let result = parse(&self.code, source_type, options);
if result.diagnostics().is_empty() {
if let Some(bogus) = result
.syntax()
.descendants()
.find(|descendant| descendant.kind().is_bogus())
{
TestRunOutcome::IncorrectlyErrored {
files,
errors: vec![create_bogus_node_in_tree_diagnostic(bogus)],
}
} else {
TestRunOutcome::Passed(files)
}
} else {
TestRunOutcome::IncorrectlyErrored {
files,
errors: result.diagnostics().to_vec(),
}
}
}
}
#[derive(Default)]
pub(crate) struct BabelJsxTestSuite;
impl TestSuite for BabelJsxTestSuite {
fn name(&self) -> &str {
"jsx/babel"
}
fn base_path(&self) -> &str {
OK_PATH
}
fn is_test(&self, path: &std::path::Path) -> bool {
path.extension().map(|x| x == "js").unwrap_or(false)
}
fn load_test(&self, path: &std::path::Path) -> Option<Box<dyn crate::runner::TestCase>> {
let code = check_file_encoding(path)?;
Some(Box::new(BabelJsxTestCase::new(path, code)))
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/jsx/mod.rs | xtask/coverage/src/jsx/mod.rs | pub mod jsx_babel;
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/js/mod.rs | xtask/coverage/src/js/mod.rs | pub mod test262;
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/js/test262.rs | xtask/coverage/src/js/test262.rs | use crate::runner::{
create_bogus_node_in_tree_diagnostic, TestCase, TestCaseFiles, TestRunOutcome, TestSuite,
};
use regex::Regex;
use rome_js_parser::{parse, JsParserOptions};
use rome_js_syntax::JsFileSource;
use rome_rowan::syntax::SyntaxKind;
use rome_rowan::AstNode;
use serde::Deserialize;
use std::io;
use std::path::Path;
const BASE_PATH: &str = "xtask/coverage/test262/test";
/// Representation of the YAML metadata in Test262 tests.
// taken from the boa project
#[derive(Debug, Clone, Deserialize)]
pub struct MetaData {
pub description: Box<str>,
pub esid: Option<Box<str>>,
pub es5id: Option<Box<str>>,
pub es6id: Option<Box<str>>,
#[serde(default)]
pub info: Box<str>,
#[serde(default)]
pub features: Box<[Box<str>]>,
#[serde(default)]
pub includes: Box<[Box<str>]>,
#[serde(default)]
pub flags: Box<[TestFlag]>,
#[serde(default)]
pub negative: Option<Negative>,
#[serde(default)]
pub locale: Box<[Box<str>]>,
}
/// Negative test information structure.
#[derive(Debug, Clone, Deserialize)]
pub struct Negative {
pub phase: Phase,
#[serde(rename = "type")]
pub error_type: Box<str>,
}
/// Individual test flag.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum TestFlag {
OnlyStrict,
NoStrict,
Module,
Raw,
Async,
Generated,
#[serde(rename = "CanBlockIsFalse")]
CanBlockIsFalse,
#[serde(rename = "CanBlockIsTrue")]
CanBlockIsTrue,
#[serde(rename = "non-deterministic")]
NonDeterministic,
}
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum Phase {
Parse,
Early,
Resolution,
Runtime,
}
#[derive(Debug)]
struct Test262TestCase {
name: String,
code: String,
meta: MetaData,
}
impl Test262TestCase {
fn new(path: &Path, code: String, meta: MetaData) -> Self {
let name = path.strip_prefix(BASE_PATH).unwrap().display().to_string();
Self { name, code, meta }
}
fn execute_test(&self, append_use_strict: bool, source_type: JsFileSource) -> TestRunOutcome {
let code = if append_use_strict {
format!("\"use strict\";\n{}", self.code)
} else {
self.code.clone()
};
let should_fail = self
.meta
.negative
.as_ref()
.filter(|neg| neg.phase == Phase::Parse)
.is_some();
let options = JsParserOptions::default().with_parse_class_parameter_decorators();
let files = TestCaseFiles::single(
self.name.clone(),
self.code.clone(),
source_type,
options.clone(),
);
match parse(&code, source_type, options).ok() {
Ok(root) if !should_fail => {
if let Some(bogus) = root
.syntax()
.descendants()
.find(|descendant| descendant.kind().is_bogus())
{
TestRunOutcome::IncorrectlyErrored {
errors: vec![create_bogus_node_in_tree_diagnostic(bogus)],
files,
}
} else {
TestRunOutcome::Passed(files)
}
}
Err(_) if should_fail => TestRunOutcome::Passed(files),
Ok(_) if should_fail => TestRunOutcome::IncorrectlyPassed(files),
Err(errors) if !should_fail => TestRunOutcome::IncorrectlyErrored { errors, files },
_ => unreachable!(),
}
}
}
impl TestCase for Test262TestCase {
fn name(&self) -> &str {
&self.name
}
fn run(&self) -> TestRunOutcome {
let meta = &self.meta;
if meta.flags.contains(&TestFlag::OnlyStrict) {
self.execute_test(true, JsFileSource::js_script())
} else if meta.flags.contains(&TestFlag::Module) {
self.execute_test(false, JsFileSource::js_module())
} else if meta.flags.contains(&TestFlag::NoStrict) || meta.flags.contains(&TestFlag::Raw) {
self.execute_test(false, JsFileSource::js_script())
} else {
let l = self.execute_test(false, JsFileSource::js_script());
let r = self.execute_test(true, JsFileSource::js_script());
merge_outcomes(l, r)
}
}
}
pub(crate) struct Test262TestSuite;
impl TestSuite for Test262TestSuite {
fn name(&self) -> &str {
"js/262"
}
fn base_path(&self) -> &str {
BASE_PATH
}
fn is_test(&self, path: &Path) -> bool {
match path.extension() {
None => false,
Some(ext) => ext == "js",
}
}
fn load_test(&self, path: &Path) -> Option<Box<dyn TestCase>> {
let code = std::fs::read_to_string(path).ok()?;
let meta = read_metadata(&code).ok()?;
if !meta
.negative
.as_ref()
.map_or(true, |negative| negative.phase == Phase::Parse)
{
None
} else {
Some(Box::new(Test262TestCase::new(path, code, meta)))
}
}
}
fn read_metadata(code: &str) -> io::Result<MetaData> {
use once_cell::sync::Lazy;
/// Regular expression to retrieve the metadata of a test.
static META_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"/\*\-{3}((?:.|\n)*)\-{3}\*/"#)
.expect("could not compile metadata regular expression")
});
let yaml = META_REGEX
.captures(code)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))?
.get(1)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))?
.as_str();
serde_yaml::from_str(yaml).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
fn merge_outcomes(l: TestRunOutcome, r: TestRunOutcome) -> TestRunOutcome {
if l.is_failed() {
l
} else {
r
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/symbols/utils.rs | xtask/coverage/src/symbols/utils.rs | pub fn parse_str<'a>(input: &'a str, s: &'a str) -> Option<(&'a str, &'a str)> {
input
.strip_prefix(s)
.map(|stripped| (stripped, &input[0..s.len()]))
}
pub fn parse_until_chr(input: &'_ str, f: impl Fn(char) -> bool) -> Option<(&'_ str, &'_ str)> {
let mut qty = 0;
for chr in input.chars() {
if f(chr) {
break;
}
qty += chr.len_utf8();
}
if qty > 0 {
Some((&input[qty..], &input[0..qty]))
} else {
None
}
}
pub fn parse_whitespace0(input: &'_ str) -> (&'_ str, &'_ str) {
parse_until_chr(input, |x| !x.is_whitespace()).unwrap_or((input, ""))
}
pub fn parse_separated_list<T>(
input: &str,
item: impl Fn(&str) -> Option<(&str, T)>,
separator: impl Fn(&str) -> &str,
trivia: impl Fn(&str) -> &str,
) -> (&str, Vec<T>) {
let mut list = vec![];
let mut input = input;
loop {
let s = trivia(input);
let s = if let Some((s, item)) = item(s) {
list.push(item);
s
} else {
break;
};
let s = trivia(s);
let s = separator(s);
input = s;
}
(input, list)
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/symbols/mod.rs | xtask/coverage/src/symbols/mod.rs | pub mod msts;
mod utils;
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/symbols/msts.rs | xtask/coverage/src/symbols/msts.rs | use rome_js_semantic::SemanticEvent;
use rome_js_syntax::JsFileSource;
use super::utils::{parse_separated_list, parse_str, parse_until_chr, parse_whitespace0};
use crate::check_file_encoding;
use crate::runner::{TestCase, TestCaseFiles, TestRunOutcome, TestSuite};
use rome_js_parser::JsParserOptions;
use std::fmt::Write;
use std::path::{Path, PathBuf};
use std::str::FromStr;
const CASES_PATH: &str = "xtask/coverage/Typescript/tests/baselines/reference";
const BASE_PATH: &str = "xtask/coverage/Typescript";
#[derive(Debug)]
struct SymbolsMicrosoftTestCase {
path: PathBuf,
name: String,
}
impl SymbolsMicrosoftTestCase {
fn new(path: &Path) -> Self {
Self {
path: path.to_path_buf(),
name: path.file_name().unwrap().to_string_lossy().to_string(),
}
}
}
impl TestCase for SymbolsMicrosoftTestCase {
fn name(&self) -> &str {
&self.name
}
fn run(&self) -> TestRunOutcome {
let options = JsParserOptions::default().with_parse_class_parameter_decorators();
let symbols = check_file_encoding(&self.path).unwrap();
let expected = load_symbols_file(&symbols);
let mut full_path = PathBuf::from_str(BASE_PATH).unwrap();
full_path.push(expected.code_file);
// Some .symbols files point to .ts files that do no exist.
// In this case, the best we can do is recover the original source from
// the .symbol file itself.
let code = if !full_path.exists() {
tracing::warn!("Not Found: {full_path:?}");
symbols.lines().fold(String::new(), |mut s, line| {
if !line.starts_with('>')
&& !line.starts_with("=== ")
&& !line.starts_with("///<reference ")
{
s.push_str(line);
s.push('\n');
}
s
})
} else {
match std::fs::read_to_string(&full_path) {
Ok(code) => code,
Err(_) => {
return TestRunOutcome::IncorrectlyErrored {
files: TestCaseFiles::single(
self.name.clone(),
"".to_string(),
JsFileSource::tsx(),
options,
),
errors: vec![],
}
}
}
};
let t = TestCaseFiles::single(
self.name.clone(),
code.clone(),
JsFileSource::tsx(),
options.clone(),
);
let r = rome_js_parser::parse(&code, JsFileSource::tsx(), options);
let mut actual: Vec<_> = rome_js_semantic::semantic_events(r.syntax())
.into_iter()
.filter(|x| {
// We filter any event pointing to string literals.
// We do the same below because TS classifies some string literals as symbols and we also
// filter them below.
match x {
SemanticEvent::DeclarationFound { .. }
| SemanticEvent::Read { .. }
| SemanticEvent::HoistedRead { .. }
| SemanticEvent::Write { .. }
| SemanticEvent::HoistedWrite { .. } => {
let name = x.str(&code);
!name.contains('\"') && !name.contains('\'')
}
_ => false,
}
})
.collect();
actual.sort_by_key(|x| x.range().start());
// Print to debug! detailed information
// on symbols that are different from the
// expected
let mut expecteds = expected.symbols.iter();
let mut actuals = actual.iter();
loop {
let expected = expecteds.next();
let actual = actuals.next();
if expected.is_none() && actual.is_none() {
break;
}
let mut debug_text = String::new();
debug_text.push_str("expected: ");
if let Some(symbol) = expected {
write!(debug_text, "[{}]", &symbol.name).unwrap();
}
debug_text.push_str(" - actual: ");
if let Some(actual) = actual {
let name = actual.str(&code).trim();
write!(debug_text, "[{}]", name).unwrap();
}
match (expected, actual) {
(Some(expected), Some(actual)) if expected.name != actual.str(&code).trim() => {
debug_text.push_str(" <<<<<<<<<<<<<<<<<<<< Diff here");
}
_ => {}
}
tracing::debug!("{}", debug_text);
}
if expected.symbols.len() != actual.len() {
TestRunOutcome::IncorrectlyErrored {
files: t,
errors: vec![],
}
} else {
for (expected, actual) in expected.symbols.iter().zip(actual) {
let are_names_eq = expected.name == actual.str(&code).trim();
if !are_names_eq {
return TestRunOutcome::IncorrectlyErrored {
files: t,
errors: vec![],
};
}
}
TestRunOutcome::Passed(t)
}
}
}
#[derive(Default)]
pub(crate) struct SymbolsMicrosoftTestSuite;
impl TestSuite for SymbolsMicrosoftTestSuite {
fn name(&self) -> &str {
"symbols/microsoft"
}
fn base_path(&self) -> &str {
CASES_PATH
}
fn is_test(&self, path: &Path) -> bool {
match path.extension() {
Some(ext) if ext == "symbols" => {
// only accepts if there is no *.errors.txt file
let fullpath = path.with_extension("errors.txt");
std::fs::metadata(fullpath).is_err()
}
_ => false,
}
}
fn load_test(&self, path: &Path) -> Option<Box<dyn TestCase>> {
Some(Box::new(SymbolsMicrosoftTestCase::new(path)))
}
}
#[derive(Debug)]
#[allow(dead_code)]
struct Decl {
file: String,
row_start: Option<usize>,
col_start: Option<usize>,
}
#[allow(dead_code)]
#[derive(Debug)]
struct Symbol {
name: String,
path: String,
decls: Vec<Decl>,
}
#[derive(Debug)]
struct SymbolsFile {
code_file: PathBuf,
symbols: Vec<Symbol>,
}
/// This function parses lines like:
/// >Cell : Symbol(Cell, Decl(2dArrays.ts, 0, 0))
/// | | | | \--+---> line and column ofthe first char of the leading trivia where the declaration
/// | | | \--> File where the declaration of this symbol is
/// | | \--> States that this Symbol is a declaration
/// | \--> Complete Path of the Symbol
/// \--> text of the symbol
/// To understand how the Typescript codebase generate this line
/// see xtask\coverage\Typescript\src\harness\typeWriter.ts
fn parse_symbol(input: &str) -> Option<Symbol> {
let (input, _) = parse_str(input, ">")?;
let (input, name) = parse_until_chr(input, |x| x.is_whitespace() || x == ':')?;
if name.contains('.')
|| name.contains('[')
|| name.contains('\"')
|| name.contains('\'')
|| name == "undefined"
{
return None;
}
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, ":")?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, "Symbol")?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, "(")?;
let (input, path) = parse_until_chr(input, |x| x.is_whitespace() || x == ',' || x == ')')?;
let (input, _) = parse_whitespace0(input);
let decls = if !input.starts_with(')') {
let (input, _) = parse_str(input, ",")?;
let (input, _) = parse_whitespace0(input);
let (_, decls) = parse_separated_list(
input,
parse_decl,
|s| parse_str(s, ",").map(|x| x.0).unwrap_or(s),
|s| parse_whitespace0(s).0,
);
decls
} else {
vec![]
};
Some(Symbol {
name: name.to_string(),
path: path.to_string(),
decls,
})
}
fn parse_decl(input: &str) -> Option<(&str, Decl)> {
let (input, _) = parse_str(input, "Decl")?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, "(")?;
let (input, _) = parse_whitespace0(input);
let (input, file) = parse_until_chr(input, |x| x.is_whitespace() || x == ',')?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, ",")?;
let (input, _) = parse_whitespace0(input);
let (input, row_start) = parse_until_chr(input, |x| x.is_whitespace() || x == ',')?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, ",")?;
let (input, _) = parse_whitespace0(input);
let (input, col_start) = parse_until_chr(input, |x| x.is_whitespace() || x == ')')?;
let (input, _) = parse_whitespace0(input);
let (input, _) = parse_str(input, ")")?;
Some((
input,
Decl {
file: file.to_string(),
row_start: row_start.parse().ok(),
col_start: col_start.parse().ok(),
},
))
}
/// This method will load .symbols file
/// from the Typescript test suite.
/// Each file is composed of:
/// first line pointing to the original ts file;
/// For each line of the source file: the actual ts line;
/// and if the line contains any symbols, one line per symbol as described by the method [parse_symbol];
fn load_symbols_file(txt: &str) -> SymbolsFile {
let mut lines = txt.lines();
// first line example
// === tests/cases/compiler/2dArrays.ts ===
let code_file = lines.next().unwrap().replace("===", "").trim().to_string();
let mut symbols = vec![];
for line in lines {
if let Some(symbol) = parse_symbol(line) {
symbols.push(symbol);
}
}
SymbolsFile {
code_file: PathBuf::from(code_file),
symbols,
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/ts/ts_microsoft.rs | xtask/coverage/src/ts/ts_microsoft.rs | use crate::check_file_encoding;
use crate::runner::{
create_bogus_node_in_tree_diagnostic, TestCase, TestCaseFiles, TestRunOutcome, TestSuite,
};
use regex::Regex;
use rome_js_parser::JsParserOptions;
use rome_js_syntax::{JsFileSource, ModuleKind};
use rome_rowan::{AstNode, SyntaxKind};
use std::convert::TryFrom;
use std::fmt::Write;
use std::path::Path;
const CASES_PATH: &str = "xtask/coverage/Typescript/tests/cases";
const REFERENCE_PATH: &str = "xtask/coverage/Typescript/tests/baselines/reference";
#[derive(Debug)]
struct MicrosoftTypeScriptTestCase {
code: String,
name: String,
}
impl MicrosoftTypeScriptTestCase {
fn new(path: &Path, code: String) -> Self {
let name = path.strip_prefix(CASES_PATH).unwrap().display().to_string();
Self { name, code }
}
}
impl TestCase for MicrosoftTypeScriptTestCase {
fn name(&self) -> &str {
&self.name
}
fn run(&self) -> TestRunOutcome {
let TestCaseMetadata { files, run_options } = extract_metadata(&self.code, &self.name);
let mut all_errors = Vec::new();
let mut bogus_errors = Vec::new();
for file in &files {
match file.parse().ok() {
Ok(root) => {
if let Some(bogus) = root
.syntax()
.descendants()
.find(|descendant| descendant.kind().is_bogus())
{
bogus_errors.push(create_bogus_node_in_tree_diagnostic(bogus));
}
}
Err(errors) => all_errors.extend(errors),
}
}
let expected_errors = should_error(&self.name, &run_options);
if all_errors.is_empty() && expected_errors {
TestRunOutcome::IncorrectlyPassed(files)
} else if !all_errors.is_empty() && !expected_errors {
TestRunOutcome::IncorrectlyErrored {
errors: all_errors,
files,
}
} else if !bogus_errors.is_empty() {
TestRunOutcome::IncorrectlyErrored {
errors: bogus_errors,
files,
}
} else {
TestRunOutcome::Passed(files)
}
}
}
#[derive(Default)]
pub(crate) struct MicrosoftTypescriptTestSuite;
impl TestSuite for MicrosoftTypescriptTestSuite {
fn name(&self) -> &str {
"ts/microsoft"
}
fn base_path(&self) -> &str {
CASES_PATH
}
fn is_test(&self, path: &Path) -> bool {
match path.extension() {
None => false,
Some(ext) => ext == "ts",
}
}
fn load_test(&self, path: &Path) -> Option<Box<dyn TestCase>> {
let code = check_file_encoding(path)?;
Some(Box::new(MicrosoftTypeScriptTestCase::new(path, code)))
}
}
struct TestCaseMetadata {
files: TestCaseFiles,
run_options: Vec<String>,
}
/// TypeScript supports multiple files in a single test case.
/// These files start with `// @<option-name>: <option-value>` and are followed by the file's content.
/// This function extracts the individual files with their content and drops unsupported files.
fn extract_metadata(code: &str, path: &str) -> TestCaseMetadata {
// Returns a match for a test option. Test options have the form `// @name: value`
let options_regex =
Regex::new(r"(?m)^/{2}\s*@(?P<name>\w+)\s*:\s*(?P<value>[^\r\n]*)").unwrap();
let mut files = TestCaseFiles::new();
let line_ending = infer_line_ending(code);
let mut current_file_content = String::new();
let mut current_file_name: Option<String> = None;
let mut run_options: Vec<String> = vec![];
for line in code.lines() {
if let Some(option) = options_regex.captures(line) {
let option_name = option.name("name").unwrap().as_str().to_lowercase();
let option_value = option.name("value").unwrap().as_str().trim();
if option_name == "alwaysstrict" {
write!(current_file_content, "\"use strict\";{}", line_ending).unwrap();
} else if matches!(option_name.as_str(), "module" | "target") && files.is_empty() {
run_options.extend(
option_value
.split(',')
.map(|module_value| format!("{option_name}={}", module_value.trim())),
);
}
if option_name != "filename" {
continue; // omit options from the file content
}
if let Some(current_name) = current_file_name.take() {
add_file_if_supported(
&mut files,
current_name,
std::mem::take(&mut current_file_content),
);
}
current_file_name = Some(option_value.to_string());
} else {
// regular content line
if current_file_content.is_empty() && line.trim().is_empty() {
// skip leading whitespace
continue;
}
write!(current_file_content, "{}{}", line, line_ending).unwrap();
}
}
if let Some(current_name) = current_file_name.take() {
add_file_if_supported(&mut files, current_name, current_file_content)
} else if files.is_empty() {
let path = Path::new(path);
// Single file case without any options
add_file_if_supported(
&mut files,
path.file_name().unwrap().to_str().unwrap().to_string(),
current_file_content,
);
}
TestCaseMetadata { files, run_options }
}
fn add_file_if_supported(files: &mut TestCaseFiles, name: String, content: String) {
let path = Path::new(&name);
// Skip files that aren't JS/TS files (JSON, CSS...)
if let Ok(mut source_type) = JsFileSource::try_from(path) {
let is_module_regex = Regex::new("(import|export)\\s").unwrap();
// A very basic heuristic to determine if a module is a `Script` or a `Module`.
// The TypeScript parser automatically detects whatever a file is a module or a script
// by the presence of any module syntax. Rome's parser doesn't support this today
// because it would require moving any "strict mode" or "module" specific checks
// into a second compiler pass. The reason this is needed is that the module syntax
// may appear at the very end of the file after the parser has already processed
// some syntax that is invalid in strict mode (for example, an "arguments" variable).
if !is_module_regex.is_match(&content) {
source_type = source_type.with_module_kind(ModuleKind::Script);
}
files.add(
name,
content,
source_type,
JsParserOptions::default().with_parse_class_parameter_decorators(),
)
}
}
/// Detect the line ending used in the file
fn infer_line_ending(code: &str) -> &'static str {
if let Some(index) = code.find('\r') {
if index + 1 < code.len() && code.as_bytes()[index + 1] == b'\n' {
"\r\n"
} else {
"\r"
}
} else {
"\n"
}
}
fn should_error(name: &str, run_options: &[String]) -> bool {
let error_reference_file = Path::new(REFERENCE_PATH).join(
Path::new(name)
.with_extension("errors.txt")
.file_name()
.unwrap(),
);
if error_reference_file.exists() {
return true;
}
run_options.iter().any(|option| {
let errors_file_name = Path::new(name)
.file_stem()
.and_then(|name| name.to_str())
.map(|name| format!("{name}({option}).errors.txt"))
.unwrap();
let path = Path::new(REFERENCE_PATH).join(errors_file_name);
path.exists()
})
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/ts/ts_babel.rs | xtask/coverage/src/ts/ts_babel.rs | use crate::runner::create_bogus_node_in_tree_diagnostic;
use crate::{
check_file_encoding,
runner::{TestCase, TestCaseFiles, TestRunOutcome, TestSuite},
};
use rome_js_parser::JsParserOptions;
use rome_js_syntax::{JsFileSource, LanguageVariant};
use rome_rowan::SyntaxKind;
use std::path::Path;
const CASES_PATH: &str = "xtask/coverage/babel/packages/babel-parser/test/fixtures/typescript";
struct BabelTypescriptTestCase {
name: String,
expected_to_fail: bool,
code: String,
variant: LanguageVariant,
}
impl BabelTypescriptTestCase {
fn new(path: &Path, code: String, expected_to_fail: bool, variant: LanguageVariant) -> Self {
let name = path
.parent()
.unwrap()
.strip_prefix(CASES_PATH)
.unwrap()
.display()
.to_string();
Self {
name,
code,
expected_to_fail,
variant,
}
}
}
impl TestCase for BabelTypescriptTestCase {
fn name(&self) -> &str {
&self.name
}
fn run(&self) -> TestRunOutcome {
let source_type = JsFileSource::ts().with_variant(self.variant);
let options = JsParserOptions::default().with_parse_class_parameter_decorators();
let files = TestCaseFiles::single(
self.name().to_string(),
self.code.clone(),
source_type,
options.clone(),
);
let result = rome_js_parser::parse(&self.code, source_type, options);
if self.expected_to_fail && result.diagnostics().is_empty() {
TestRunOutcome::IncorrectlyPassed(files)
} else if self.expected_to_fail {
TestRunOutcome::Passed(files)
} else if !result.diagnostics().is_empty() {
TestRunOutcome::IncorrectlyErrored {
files,
errors: result.diagnostics().to_vec(),
}
} else if let Some(bogus) = result
.syntax()
.descendants()
.find(|descendant| descendant.kind().is_bogus())
{
TestRunOutcome::IncorrectlyErrored {
files,
errors: vec![create_bogus_node_in_tree_diagnostic(bogus)],
}
} else {
TestRunOutcome::Passed(files)
}
}
}
#[derive(Default)]
pub(crate) struct BabelTypescriptTestSuite;
impl TestSuite for BabelTypescriptTestSuite {
fn name(&self) -> &str {
"ts/babel"
}
fn base_path(&self) -> &str {
CASES_PATH
}
fn is_test(&self, path: &std::path::Path) -> bool {
path.extension().map(|x| x == "ts").unwrap_or(false)
}
fn load_test(&self, path: &std::path::Path) -> Option<Box<dyn crate::runner::TestCase>> {
let code = check_file_encoding(path)?;
let output_json_path = path.with_file_name("output.json");
let options_path = path.with_file_name("options.json");
let mut should_fail = false;
let mut variant = LanguageVariant::Standard;
if output_json_path.exists() {
if let Some(content) = check_file_encoding(&output_json_path) {
should_fail = content.contains("\"errors\":");
}
}
if options_path.exists() {
if let Some(content) = check_file_encoding(&options_path) {
should_fail = should_fail || content.contains("\"throws\":");
if content.contains("jsx") {
variant = LanguageVariant::Jsx;
}
}
};
Some(Box::new(BabelTypescriptTestCase::new(
path,
code,
should_fail,
variant,
)))
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/coverage/src/ts/mod.rs | xtask/coverage/src/ts/mod.rs | pub mod ts_babel;
pub mod ts_microsoft;
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/src/lib.rs | xtask/src/lib.rs | //! Codegen tools mostly used to generate ast and syntax definitions. Adapted from rust analyzer's codegen
pub mod glue;
use std::{
env,
fmt::Display,
path::{Path, PathBuf},
};
pub use crate::glue::{pushd, pushenv};
pub use anyhow::{anyhow, bail, ensure, Context as _, Error, Result};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Mode {
Overwrite,
Verify,
}
pub fn project_root() -> PathBuf {
Path::new(
&env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()),
)
.ancestors()
.nth(2)
.unwrap()
.to_path_buf()
}
pub fn run_rustfmt(mode: Mode) -> Result<()> {
let _dir = pushd(project_root());
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?;
match mode {
Mode::Overwrite => run!("cargo fmt"),
Mode::Verify => run!("cargo fmt -- --check"),
}?;
Ok(())
}
pub fn reformat(text: impl Display) -> Result<String> {
reformat_without_preamble(text).map(prepend_generated_preamble)
}
pub fn reformat_with_command(text: impl Display, command: impl Display) -> Result<String> {
reformat_without_preamble(text).map(|formatted| {
format!("//! This is a generated file. Don't modify it by hand! Run '{}' to re-generate the file.\n\n{}", command, formatted)
})
}
pub const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/codegen`";
pub fn prepend_generated_preamble(content: impl Display) -> String {
format!("//! {}\n\n{}", PREAMBLE, content)
}
pub fn reformat_without_preamble(text: impl Display) -> Result<String> {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?;
let output = run!(
"rustfmt --config fn_single_line=true";
<text.to_string().as_bytes()
)?;
Ok(format!("{}\n", output))
}
pub fn ensure_rustfmt() -> Result<()> {
let out = run!("rustfmt --version")?;
if !out.contains("stable") {
bail!(
"Failed to run rustfmt from toolchain 'stable'. \
Please run `rustup component add rustfmt --toolchain stable` to install it.",
)
}
Ok(())
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
rome/tools | https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/xtask/src/glue.rs | xtask/src/glue.rs | //! A shell but bad, some cross platform glue code
use std::{
cell::RefCell,
env,
ffi::OsString,
io::Write,
path::{Path, PathBuf},
process::{Command, Stdio},
};
use anyhow::{bail, Context, Result};
pub mod fs2 {
use std::{fs, path::Path};
use anyhow::{Context, Result};
pub fn read_dir<P: AsRef<Path>>(path: P) -> Result<fs::ReadDir> {
let path = path.as_ref();
fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))
}
pub fn read_to_string<P: AsRef<Path>>(path: P) -> Result<String> {
let path = path.as_ref();
fs::read_to_string(path).with_context(|| format!("Failed to read {}", path.display()))
}
pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
let path = path.as_ref();
fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display()))
}
pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
let from = from.as_ref();
let to = to.as_ref();
fs::copy(from, to)
.with_context(|| format!("Failed to copy {} to {}", from.display(), to.display()))
}
pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
let path = path.as_ref();
fs::remove_file(path).with_context(|| format!("Failed to remove file {}", path.display()))
}
pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
let path = path.as_ref();
fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display()))
}
pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
let path = path.as_ref();
fs::create_dir_all(path).with_context(|| format!("Failed to create dir {}", path.display()))
}
}
#[macro_export]
macro_rules! run {
($($expr:expr),*) => {
run!($($expr),*; echo = true)
};
($($expr:expr),* ; echo = $echo:expr) => {
$crate::glue::run_process(format!($($expr),*), $echo, None)
};
($($expr:expr),* ; <$stdin:expr) => {
$crate::glue::run_process(format!($($expr),*), false, Some($stdin))
};
}
pub use crate::run;
pub struct Pushd {
_p: (),
}
pub fn pushd(path: impl Into<PathBuf>) -> Pushd {
Env::with(|env| env.pushd(path.into()));
Pushd { _p: () }
}
impl Drop for Pushd {
fn drop(&mut self) {
Env::with(|env| env.popd())
}
}
pub struct Pushenv {
_p: (),
}
pub fn pushenv(var: &str, value: &str) -> Pushenv {
Env::with(|env| env.pushenv(var.into(), value.into()));
Pushenv { _p: () }
}
impl Drop for Pushenv {
fn drop(&mut self) {
Env::with(|env| env.popenv())
}
}
pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
if !path.exists() {
return Ok(());
}
if path.is_file() {
fs2::remove_file(path)
} else {
fs2::remove_dir_all(path)
}
}
#[doc(hidden)]
pub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result<String> {
run_process_inner(&cmd, echo, stdin).with_context(|| format!("process `{}` failed", cmd))
}
pub fn date_iso() -> Result<String> {
run!("date --iso --utc")
}
fn run_process_inner(cmd: &str, echo: bool, stdin: Option<&[u8]>) -> Result<String> {
let mut args = shelx(cmd);
let binary = args.remove(0);
let current_dir = Env::with(|it| it.cwd().to_path_buf());
if echo {
println!("> {}", cmd)
}
let mut command = Command::new(binary);
command
.args(args)
.current_dir(current_dir)
.stderr(Stdio::inherit());
let output = match stdin {
None => command.stdin(Stdio::null()).output(),
Some(stdin) => {
command.stdin(Stdio::piped()).stdout(Stdio::piped());
let mut process = command.spawn()?;
process.stdin.take().unwrap().write_all(stdin)?;
process.wait_with_output()
}
}?;
let stdout = String::from_utf8(output.stdout)?;
if echo {
print!("{}", stdout)
}
if !output.status.success() {
bail!("{}", output.status)
}
Ok(stdout.trim().to_string())
}
fn shelx(cmd: &str) -> Vec<String> {
let mut res = Vec::new();
for (string_piece, in_quotes) in cmd.split('\'').zip([false, true].iter().copied().cycle()) {
if in_quotes {
res.push(string_piece.to_string())
} else if !string_piece.is_empty() {
res.extend(
string_piece
.split_ascii_whitespace()
.map(|it| it.to_string()),
)
}
}
res
}
struct Env {
pushd_stack: Vec<PathBuf>,
pushenv_stack: Vec<(OsString, Option<OsString>)>,
}
impl Env {
fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
thread_local! {
static ENV: RefCell<Env> = RefCell::new(Env {
pushd_stack: vec![env::current_dir().unwrap()],
pushenv_stack: vec![],
});
}
ENV.with(|it| f(&mut it.borrow_mut()))
}
fn pushd(&mut self, dir: PathBuf) {
let dir = self.cwd().join(dir);
self.pushd_stack.push(dir);
env::set_current_dir(self.cwd()).unwrap();
}
fn popd(&mut self) {
self.pushd_stack.pop().unwrap();
env::set_current_dir(self.cwd()).unwrap();
}
fn pushenv(&mut self, var: OsString, value: OsString) {
self.pushenv_stack.push((var.clone(), env::var_os(&var)));
env::set_var(var, value)
}
fn popenv(&mut self) {
let (var, value) = self.pushenv_stack.pop().unwrap();
match value {
None => env::remove_var(var),
Some(value) => env::set_var(var, value),
}
}
fn cwd(&self) -> &Path {
self.pushd_stack.last().unwrap()
}
}
| rust | MIT | 392d188a49d70e495f13b1bb08cd7d9c43690f9b | 2026-01-04T15:38:12.578592Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/build.rs | build.rs | use std::env;
use std::fs;
use std::path::Path;
use std::time::{SystemTime, UNIX_EPOCH};
fn main() {
// Embed build timestamp as seconds since Unix epoch
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
// Write timestamp to a file in OUT_DIR so cargo detects the change
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("build_timestamp.txt");
fs::write(&dest_path, timestamp.to_string()).unwrap();
println!("cargo:rustc-env=BUILD_TIMESTAMP={}", timestamp);
// Always rerun build script (no rerun-if-changed means always run)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/setup.rs | src/setup.rs | use std::collections::BTreeMap;
use anyhow::Result;
use crate::{
cli::{SetupOpts, TaskRunOpts},
tasks::{self, load_project_config},
};
pub fn run(opts: SetupOpts) -> Result<()> {
let (config_path, cfg) = load_project_config(opts.config)?;
if cfg.aliases.is_empty() {
if tasks::find_task(&cfg, "setup").is_some() {
return tasks::run(TaskRunOpts {
config: config_path,
delegate_to_hub: false,
hub_host: std::net::IpAddr::from([127, 0, 0, 1]),
hub_port: 9050,
name: "setup".to_string(),
args: Vec::new(),
});
}
println!(
"# No aliases defined in {}. Add an alias table like:",
config_path.display()
);
println!("# [[alias]]");
println!("# fr = \"f run\"");
return Ok(());
}
println!("# flow aliases from {}", config_path.display());
println!(
"# Apply them in your shell with: eval \"$(f setup --config {})\"",
config_path.display()
);
for line in format_alias_lines(&cfg.aliases) {
println!("{line}");
}
Ok(())
}
fn format_alias_lines(aliases: &std::collections::HashMap<String, String>) -> Vec<String> {
let mut ordered = BTreeMap::new();
for (name, target) in aliases {
ordered.insert(name, target);
}
ordered
.into_iter()
.map(|(name, target)| format!("alias {name}='{}'", escape_single_quotes(target)))
.collect()
}
fn escape_single_quotes(value: &str) -> String {
value.replace('\'', "'\\''")
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
#[test]
fn formats_alias_lines_in_order() {
let mut aliases = HashMap::new();
aliases.insert("fr".to_string(), "f run".to_string());
aliases.insert("ft".to_string(), "f tasks".to_string());
let lines = format_alias_lines(&aliases);
assert_eq!(
lines,
vec![
"alias fr='f run'".to_string(),
"alias ft='f tasks'".to_string()
]
);
}
#[test]
fn escapes_single_quotes_in_commands() {
let cmd = "echo 'hello'";
assert_eq!(escape_single_quotes(cmd), "echo '\\''hello'\\''");
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/db.rs | src/db.rs | use std::path::PathBuf;
use anyhow::{Context, Result};
use rusqlite::Connection;
/// Path to the shared SQLite database.
pub fn db_path() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow/flow.db")
}
/// Open the SQLite database, creating parent directories if needed.
pub fn open_db() -> Result<Connection> {
let path = db_path();
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("failed to create db dir {}", parent.display()))?;
}
Connection::open(path).context("failed to open flow.db")
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/config.rs | src/config.rs | use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use anyhow::{Context, Result};
use serde::{Deserialize, Deserializer, Serialize};
use shellexpand::tilde;
use crate::fixup;
/// Top-level configuration for flowd, currently focused on managed servers.
#[derive(Debug, Clone, Deserialize)]
pub struct Config {
#[serde(default)]
pub version: Option<u32>,
/// Optional human-friendly project name (applies to local project configs).
#[serde(
default,
rename = "name",
alias = "project_name",
alias = "project-name"
)]
pub project_name: Option<String>,
/// Flow-specific settings (primary_task, etc.)
#[serde(default)]
pub flow: FlowSettings,
#[serde(default)]
pub options: OptionsConfig,
#[serde(default, alias = "server", alias = "server-local")]
pub servers: Vec<ServerConfig>,
#[serde(default, rename = "server-remote")]
pub remote_servers: Vec<RemoteServerConfig>,
#[serde(default)]
pub tasks: Vec<TaskConfig>,
#[serde(default, alias = "deps")]
pub dependencies: HashMap<String, DependencySpec>,
#[serde(default, alias = "alias", deserialize_with = "deserialize_aliases")]
pub aliases: HashMap<String, String>,
#[serde(default, rename = "commands")]
pub command_files: Vec<CommandFileConfig>,
#[serde(default)]
pub storage: Option<StorageConfig>,
#[serde(default)]
pub flox: Option<FloxConfig>,
#[serde(default, alias = "watcher", alias = "always-run")]
pub watchers: Vec<WatcherConfig>,
#[serde(default)]
pub stream: Option<StreamConfig>,
#[serde(default, rename = "server-hub")]
pub server_hub: Option<ServerHubConfig>,
/// Background daemons that flow can manage (start/stop/status).
#[serde(default, alias = "daemon")]
pub daemons: Vec<DaemonConfig>,
/// Host deployment config for Linux servers.
#[serde(default)]
pub host: Option<crate::deploy::HostConfig>,
/// Cloudflare Workers deployment config.
#[serde(default)]
pub cloudflare: Option<crate::deploy::CloudflareConfig>,
/// Railway deployment config.
#[serde(default)]
pub railway: Option<crate::deploy::RailwayConfig>,
/// Commit workflow config (fixers, review instructions).
#[serde(default)]
pub commit: Option<CommitConfig>,
}
/// Configuration for commit workflow.
#[derive(Debug, Clone, Deserialize, Default)]
pub struct CommitConfig {
/// Pre-commit fixers to run before staging.
/// Built-in: "mdx-comments", "trailing-whitespace", "end-of-file"
/// Custom: "cmd:prettier --write"
#[serde(default)]
pub fixers: Vec<String>,
/// Custom instructions passed to AI code review.
#[serde(default)]
pub review_instructions: Option<String>,
/// File path to load review instructions from.
#[serde(default)]
pub review_instructions_file: Option<String>,
}
impl Default for Config {
fn default() -> Self {
Self {
version: None,
project_name: None,
flow: FlowSettings::default(),
options: OptionsConfig::default(),
servers: Vec::new(),
remote_servers: Vec::new(),
tasks: Vec::new(),
dependencies: HashMap::new(),
aliases: HashMap::new(),
command_files: Vec::new(),
storage: None,
flox: None,
watchers: Vec::new(),
stream: None,
server_hub: None,
daemons: Vec::new(),
host: None,
cloudflare: None,
railway: None,
commit: None,
}
}
}
/// Flow-specific settings for autonomous agent workflows.
#[derive(Debug, Clone, Default, Deserialize)]
pub struct FlowSettings {
/// The primary task to run after code changes (e.g., "release", "deploy").
#[serde(default, alias = "primary-task")]
pub primary_task: Option<String>,
/// Task to run when invoking `f release`.
#[serde(default, rename = "release_task", alias = "release-task")]
pub release_task: Option<String>,
}
/// Global feature toggles.
#[derive(Debug, Clone, Default, Deserialize)]
pub struct OptionsConfig {
#[serde(default, rename = "trace_terminal_io")]
pub trace_terminal_io: bool,
#[serde(
default,
rename = "commit_with_check_async",
alias = "commit-with-check-async"
)]
pub commit_with_check_async: Option<bool>,
#[serde(
default,
rename = "commit_with_check_use_repo_root",
alias = "commit-with-check-use-repo-root"
)]
pub commit_with_check_use_repo_root: Option<bool>,
#[serde(
default,
rename = "commit_with_check_timeout_secs",
alias = "commit-with-check-timeout-secs"
)]
pub commit_with_check_timeout_secs: Option<u64>,
/// Remote Claude review URL for commitWithCheck.
#[serde(
default,
rename = "commit_with_check_review_url",
alias = "commit-with-check-review-url"
)]
pub commit_with_check_review_url: Option<String>,
/// Optional auth token for remote review.
#[serde(
default,
rename = "commit_with_check_review_token",
alias = "commit-with-check-review-token"
)]
pub commit_with_check_review_token: Option<String>,
/// Enable mirroring commits to gitedit.dev for commitWithCheck.
#[serde(
default,
rename = "commit_with_check_gitedit_mirror",
alias = "commit-with-check-gitedit-mirror"
)]
pub commit_with_check_gitedit_mirror: Option<bool>,
/// Enable mirroring commits to gitedit.dev (opt-in per project).
#[serde(default, rename = "gitedit_mirror", alias = "gitedit-mirror")]
pub gitedit_mirror: Option<bool>,
/// Custom gitedit API URL (defaults to https://gitedit.dev).
#[serde(default, rename = "gitedit_url", alias = "gitedit-url")]
pub gitedit_url: Option<String>,
/// Override repo full name for gitedit sync (e.g., "giteditdev/gitedit").
#[serde(
default,
rename = "gitedit_repo_full_name",
alias = "gitedit-repo-full-name"
)]
pub gitedit_repo_full_name: Option<String>,
}
impl OptionsConfig {
fn merge(&mut self, other: OptionsConfig) {
if other.trace_terminal_io {
self.trace_terminal_io = true;
}
if other.commit_with_check_async.is_some() {
self.commit_with_check_async = other.commit_with_check_async;
}
if other.commit_with_check_use_repo_root.is_some() {
self.commit_with_check_use_repo_root = other.commit_with_check_use_repo_root;
}
if other.commit_with_check_timeout_secs.is_some() {
self.commit_with_check_timeout_secs = other.commit_with_check_timeout_secs;
}
if other.commit_with_check_review_url.is_some() {
self.commit_with_check_review_url = other.commit_with_check_review_url;
}
if other.commit_with_check_review_token.is_some() {
self.commit_with_check_review_token = other.commit_with_check_review_token;
}
if other.commit_with_check_gitedit_mirror.is_some() {
self.commit_with_check_gitedit_mirror = other.commit_with_check_gitedit_mirror;
}
if other.gitedit_mirror.is_some() {
self.gitedit_mirror = other.gitedit_mirror;
}
if other.gitedit_url.is_some() {
self.gitedit_url = other.gitedit_url;
}
if other.gitedit_repo_full_name.is_some() {
self.gitedit_repo_full_name = other.gitedit_repo_full_name;
}
}
}
/// Configuration for a single managed HTTP server process.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ServerConfig {
/// Human-friendly name used in the TUI and HTTP API.
pub name: String,
/// Program to execute, e.g. "node", "cargo".
pub command: String,
/// Arguments passed to the command.
pub args: Vec<String>,
/// Optional port the server listens on (for display only).
pub port: Option<u16>,
/// Optional working directory for the process.
pub working_dir: Option<PathBuf>,
/// Additional environment variables.
pub env: HashMap<String, String>,
/// Whether this server should be started automatically with the daemon.
pub autostart: bool,
}
impl<'de> Deserialize<'de> for ServerConfig {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct RawServerConfig {
#[serde(default)]
name: Option<String>,
command: String,
#[serde(default)]
args: Vec<String>,
#[serde(default)]
port: Option<u16>,
#[serde(default, alias = "path")]
working_dir: Option<String>,
#[serde(default)]
env: HashMap<String, String>,
#[serde(default = "default_autostart")]
autostart: bool,
}
let raw = RawServerConfig::deserialize(deserializer)?;
let mut command = raw.command;
let mut args = raw.args;
if args.is_empty() {
if let Ok(parts) = shell_words::split(&command) {
if let Some((head, tail)) = parts.split_first() {
command = head.clone();
args = tail.to_vec();
}
}
}
let name = raw
.name
.or_else(|| {
raw.working_dir.as_ref().and_then(|dir| {
Path::new(dir)
.file_name()
.map(|n| n.to_string_lossy().to_string())
.filter(|s| !s.is_empty())
})
})
.unwrap_or_else(|| {
if command.is_empty() {
"server".to_string()
} else {
command.clone()
}
});
let command = expand_path(&command).to_string_lossy().into_owned();
Ok(ServerConfig {
name,
command,
args,
port: raw.port,
working_dir: raw.working_dir.map(|dir| expand_path(&dir)),
env: raw.env,
autostart: raw.autostart,
})
}
}
fn default_autostart() -> bool {
true
}
/// Local project automation task description.
#[derive(Debug, Clone, Deserialize)]
pub struct TaskConfig {
/// Unique identifier for the task (used when selecting it interactively).
pub name: String,
/// Shell command that should be executed for this task.
pub command: String,
/// Whether this task should be handed off to the hub daemon instead of running locally.
#[serde(default, rename = "delegate-to-hub", alias = "delegate_to_hub")]
pub delegate_to_hub: bool,
/// Whether this task should run automatically when entering the project root.
#[serde(default)]
pub activate_on_cd_to_root: bool,
/// Optional task-specific dependencies that must be made available before the command runs.
#[serde(default)]
pub dependencies: Vec<String>,
/// Optional human-friendly description.
#[serde(default, alias = "desc")]
pub description: Option<String>,
/// Optional short aliases that `f run` should recognize (e.g. "dcr" for "deploy-cli-release").
#[serde(
default,
alias = "shortcut",
alias = "short",
deserialize_with = "deserialize_shortcuts"
)]
pub shortcuts: Vec<String>,
/// Whether this task requires interactive input (stdin passthrough, TTY).
#[serde(default)]
pub interactive: bool,
/// Require confirmation when matched via LM Studio (for destructive tasks).
#[serde(default, alias = "confirm-on-match")]
pub confirm_on_match: bool,
/// Command to run when the task is cancelled (Ctrl+C).
#[serde(default, alias = "on-cancel")]
pub on_cancel: Option<String>,
}
/// Definition of a dependency that can be referenced by automation tasks.
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
pub enum DependencySpec {
/// Single command/binary that should be available on PATH.
Single(String),
/// Multiple commands that should be checked together.
Multiple(Vec<String>),
/// Flox package descriptor that should be added to the local env manifest.
Flox(FloxInstallSpec),
}
fn deserialize_shortcuts<'de, D>(deserializer: D) -> std::result::Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum ShortcutField {
Single(String),
Multiple(Vec<String>),
}
let value = Option::<ShortcutField>::deserialize(deserializer)?;
let shortcuts = match value {
Some(ShortcutField::Single(alias)) => vec![alias],
Some(ShortcutField::Multiple(aliases)) => aliases,
None => Vec::new(),
};
Ok(shortcuts)
}
/// Storage configuration describing remote environments providers.
#[derive(Debug, Clone, Deserialize)]
pub struct StorageConfig {
/// Provider identifier understood by the hosted hub.
pub provider: String,
/// Environment variable that stores the API key/token.
#[serde(default = "default_storage_env_var")]
pub env_var: String,
/// Base URL for the storage hub (defaults to hosted flow hub).
#[serde(default = "default_hub_url")]
pub hub_url: String,
/// Environments that can be synced locally.
#[serde(default)]
pub envs: Vec<StorageEnvConfig>,
}
fn default_hub_url() -> String {
"https://flow.1focus.ai".to_string()
}
fn default_storage_env_var() -> String {
"FLOW_SECRETS_TOKEN".to_string()
}
/// Definition of an environment with named variables.
#[derive(Debug, Clone, Deserialize)]
pub struct StorageEnvConfig {
pub name: String,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub variables: Vec<StorageVariable>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct StorageVariable {
pub key: String,
#[serde(default)]
pub default: Option<String>,
}
/// Flox manifest-style configuration (install set, etc.).
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct FloxConfig {
#[serde(default, rename = "install", alias = "deps")]
pub install: HashMap<String, FloxInstallSpec>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct FloxInstallSpec {
#[serde(rename = "pkg-path")]
pub pkg_path: String,
#[serde(default, rename = "pkg-group")]
pub pkg_group: Option<String>,
#[serde(default)]
pub version: Option<String>,
#[serde(default)]
pub systems: Option<Vec<String>>,
#[serde(default)]
pub priority: Option<i64>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct CommandFileConfig {
pub path: String,
#[serde(default)]
pub description: Option<String>,
}
#[allow(dead_code)]
#[derive(Debug, Clone, Deserialize)]
pub struct RemoteServerConfig {
#[serde(flatten)]
pub server: ServerConfig,
/// Optional hub name that coordinates this remote process.
#[serde(default)]
pub hub: Option<String>,
/// Paths to sync to the remote hub before launching.
#[serde(default)]
pub sync_paths: Vec<PathBuf>,
}
#[allow(dead_code)]
#[derive(Debug, Clone, Deserialize)]
pub struct ServerHubConfig {
pub name: String,
pub host: String,
#[serde(default = "default_server_hub_port")]
pub port: u16,
#[serde(default)]
pub tailscale: Option<String>,
#[serde(default)]
pub description: Option<String>,
}
fn default_server_hub_port() -> u16 {
9050
}
/// File watcher configuration for local automation.
#[derive(Debug, Clone, Deserialize)]
pub struct WatcherConfig {
#[serde(default)]
pub driver: WatcherDriver,
pub name: String,
pub path: String,
#[serde(default, rename = "match")]
pub filter: Option<String>,
#[serde(default)]
pub command: Option<String>,
#[serde(default = "default_debounce_ms")]
pub debounce_ms: u64,
#[serde(default)]
pub run_on_start: bool,
#[serde(default)]
pub env: HashMap<String, String>,
#[serde(default)]
pub poltergeist: Option<PoltergeistConfig>,
}
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum WatcherDriver {
Shell,
Poltergeist,
}
impl Default for WatcherDriver {
fn default() -> Self {
WatcherDriver::Shell
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct PoltergeistConfig {
#[serde(default = "default_poltergeist_binary")]
pub binary: String,
#[serde(default)]
pub mode: PoltergeistMode,
#[serde(default)]
pub args: Vec<String>,
}
impl Default for PoltergeistConfig {
fn default() -> Self {
Self {
binary: default_poltergeist_binary(),
mode: PoltergeistMode::Haunt,
args: Vec::new(),
}
}
}
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum PoltergeistMode {
Haunt,
Panel,
Status,
}
impl Default for PoltergeistMode {
fn default() -> Self {
PoltergeistMode::Haunt
}
}
fn default_debounce_ms() -> u64 {
200
}
fn default_poltergeist_binary() -> String {
"poltergeist".to_string()
}
impl PoltergeistMode {
pub fn as_subcommand(&self) -> &'static str {
match self {
PoltergeistMode::Haunt => "haunt",
PoltergeistMode::Panel => "panel",
PoltergeistMode::Status => "status",
}
}
}
/// Streaming configuration handled by the hub (stub for future OBS integration).
#[derive(Debug, Clone, Deserialize)]
pub struct StreamConfig {
pub provider: String,
#[serde(default)]
pub hotkey: Option<String>,
#[serde(default)]
pub toggle_url: Option<String>,
}
/// Configuration for a background daemon that flow can manage.
///
/// Example in flow.toml:
/// ```toml
/// [[daemon]]
/// name = "lin"
/// binary = "lin"
/// command = "daemon"
/// args = ["--host", "127.0.0.1", "--port", "9050"]
/// health_url = "http://127.0.0.1:9050/health"
///
/// [[daemon]]
/// name = "base"
/// binary = "base"
/// command = "jazz"
/// args = ["--port", "7201"]
/// health_url = "http://127.0.0.1:7201/health"
/// working_dir = "~/org/1f/base"
/// ```
#[derive(Debug, Clone, Deserialize)]
pub struct DaemonConfig {
/// Unique name for this daemon (used in `f daemon start <name>`).
pub name: String,
/// Binary to execute (can be a name on PATH or absolute path).
pub binary: String,
/// Subcommand to run the daemon (e.g., "daemon", "jazz", "serve").
#[serde(default)]
pub command: Option<String>,
/// Additional arguments passed after the command.
#[serde(default)]
pub args: Vec<String>,
/// Health check URL to determine if daemon is running.
#[serde(default, alias = "health")]
pub health_url: Option<String>,
/// Port the daemon listens on (extracted from health_url if not specified).
#[serde(default)]
pub port: Option<u16>,
/// Host the daemon binds to.
#[serde(default)]
pub host: Option<String>,
/// Working directory for the daemon process.
#[serde(default, alias = "path")]
pub working_dir: Option<String>,
/// Environment variables to set for the daemon.
#[serde(default)]
pub env: HashMap<String, String>,
/// Whether to start this daemon automatically when flow starts.
#[serde(default)]
pub autostart: bool,
/// Description of what this daemon does.
#[serde(default)]
pub description: Option<String>,
}
impl DaemonConfig {
/// Get the effective health URL, building from host/port if not specified.
pub fn effective_health_url(&self) -> Option<String> {
if let Some(url) = &self.health_url {
return Some(url.clone());
}
let host = self.host.as_deref().unwrap_or("127.0.0.1");
self.port.map(|p| format!("http://{}:{}/health", host, p))
}
/// Get the effective host.
pub fn effective_host(&self) -> &str {
self.host.as_deref().unwrap_or("127.0.0.1")
}
}
impl DependencySpec {
/// Add one or more command names to the provided buffer.
pub fn extend_commands(&self, buffer: &mut Vec<String>) {
match self {
DependencySpec::Single(cmd) => buffer.push(cmd.clone()),
DependencySpec::Multiple(cmds) => buffer.extend(cmds.iter().cloned()),
DependencySpec::Flox(_) => {}
}
}
}
fn deserialize_aliases<'de, D>(deserializer: D) -> Result<HashMap<String, String>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(untagged)]
enum AliasInput {
Map(HashMap<String, String>),
List(Vec<HashMap<String, String>>),
}
let maybe = Option::<AliasInput>::deserialize(deserializer)?;
let mut aliases = HashMap::new();
if let Some(input) = maybe {
match input {
AliasInput::Map(map) => aliases = map,
AliasInput::List(list) => {
for table in list {
for (name, command) in table {
aliases.insert(name, command);
}
}
}
}
}
Ok(aliases)
}
/// Default config path: ~/.config/flow/flow.toml (falls back to legacy config.toml)
pub fn default_config_path() -> PathBuf {
let base = global_config_dir();
let primary = base.join("flow.toml");
if primary.exists() {
return primary;
}
let legacy = base.join("config.toml");
if legacy.exists() {
tracing::warn!("using legacy config path ~/.config/flow/config.toml; rename to flow.toml");
return legacy;
}
primary
}
/// Global config directory: ~/.config/flow
pub fn global_config_dir() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow")
}
/// Load global secrets from ~/.config/flow/secrets.toml
pub fn load_global_secrets() {
let secrets_path = global_config_dir().join("secrets.toml");
if secrets_path.exists() {
if let Ok(secrets) = load_secrets(&secrets_path) {
let mut dummy = Config::default();
merge_secrets(&mut dummy, secrets);
tracing::debug!(path = %secrets_path.display(), "loaded global secrets");
}
}
}
pub fn expand_path(raw: &str) -> PathBuf {
let tilde_expanded = tilde(raw).into_owned();
let env_expanded = match shellexpand::env(&tilde_expanded) {
Ok(val) => val.into_owned(),
Err(_) => tilde_expanded,
};
PathBuf::from(env_expanded)
}
pub fn load<P: AsRef<Path>>(path: P) -> Result<Config> {
let path = path.as_ref();
let mut visited = Vec::new();
let mut cfg = load_with_includes(path, &mut visited)?;
// Load secrets from secrets.toml in the same directory (never shown on stream)
if let Some(parent) = path.parent() {
let secrets_path = parent.join("secrets.toml");
if secrets_path.exists() {
if let Ok(secrets) = load_secrets(&secrets_path) {
merge_secrets(&mut cfg, secrets);
tracing::debug!(path = %secrets_path.display(), "loaded secrets file");
}
}
}
Ok(cfg)
}
/// Secrets that can be loaded from a separate file to avoid exposing on stream.
#[derive(Debug, Clone, Default, Deserialize)]
struct Secrets {
#[serde(default)]
env: HashMap<String, String>,
#[serde(default)]
cloudflare: Option<CloudflareSecrets>,
#[serde(default)]
openai: Option<ApiKeySecret>,
#[serde(default)]
anthropic: Option<ApiKeySecret>,
#[serde(default)]
cerebras: Option<ApiKeySecret>,
}
#[derive(Debug, Clone, Default, Deserialize)]
struct CloudflareSecrets {
account_id: Option<String>,
stream_token: Option<String>,
stream_key: Option<String>,
}
#[derive(Debug, Clone, Default, Deserialize)]
struct ApiKeySecret {
#[serde(alias = "api_key", alias = "key")]
api_key: Option<String>,
}
fn load_secrets(path: &Path) -> Result<Secrets> {
let contents = fs::read_to_string(path)
.with_context(|| format!("failed to read secrets at {}", path.display()))?;
let secrets: Secrets = toml::from_str(&contents)
.with_context(|| format!("failed to parse secrets at {}", path.display()))?;
Ok(secrets)
}
fn merge_secrets(cfg: &mut Config, secrets: Secrets) {
// Inject secrets as environment variables for child processes
// SAFETY: We're setting env vars at startup before any threads are spawned
unsafe {
for (key, value) in secrets.env {
std::env::set_var(&key, &value);
}
if let Some(cf) = secrets.cloudflare {
if let Some(v) = cf.account_id {
std::env::set_var("CLOUDFLARE_ACCOUNT_ID", &v);
}
if let Some(v) = cf.stream_token {
std::env::set_var("CLOUDFLARE_STREAM_TOKEN", &v);
}
if let Some(v) = cf.stream_key {
std::env::set_var("CLOUDFLARE_STREAM_KEY", &v);
}
}
if let Some(openai) = secrets.openai {
if let Some(v) = openai.api_key {
std::env::set_var("OPENAI_API_KEY", &v);
}
}
if let Some(anthropic) = secrets.anthropic {
if let Some(v) = anthropic.api_key {
std::env::set_var("ANTHROPIC_API_KEY", &v);
}
}
if let Some(cerebras) = secrets.cerebras {
if let Some(v) = cerebras.api_key {
std::env::set_var("CEREBRAS_API_KEY", &v);
}
}
}
// Storage config can also reference these env vars
let _ = cfg; // cfg itself doesn't need modification, env vars are set
}
fn load_with_includes(path: &Path, visited: &mut Vec<PathBuf>) -> Result<Config> {
let canonical = path
.canonicalize()
.with_context(|| format!("failed to resolve path {}", path.display()))?;
if visited.contains(&canonical) {
anyhow::bail!(
"cycle detected while loading config includes: {}",
path.display()
);
}
visited.push(canonical.clone());
let contents = fs::read_to_string(&canonical)
.with_context(|| format!("failed to read flow config at {}", path.display()))?;
let mut cfg: Config = match toml::from_str(&contents) {
Ok(cfg) => cfg,
Err(err) => {
let fix = fixup::fix_toml_content(&contents);
if fix.fixes_applied.is_empty() {
return Err(err)
.with_context(|| format!("failed to parse flow config at {}", path.display()));
}
let fixed = fixup::apply_fixes_to_content(&contents, &fix.fixes_applied);
if let Err(write_err) = fs::write(&canonical, &fixed) {
return Err(err)
.with_context(|| format!("failed to parse flow config at {}", path.display()))
.with_context(|| format!("auto-fix write failed: {}", write_err));
}
toml::from_str(&fixed).with_context(|| {
format!(
"failed to parse flow config at {} (after auto-fix)",
path.display()
)
})?
}
};
for include in cfg.command_files.clone() {
let include_path = resolve_include_path(&canonical, &include.path);
if let Some(description) = include.description.as_deref() {
tracing::debug!(
path = %include_path.display(),
description,
"loading additional command file"
);
}
let included = load_with_includes(&include_path, visited)
.with_context(|| format!("failed to load commands file {}", include_path.display()))?;
merge_config(&mut cfg, included);
}
visited.pop();
Ok(cfg)
}
fn resolve_include_path(base: &Path, include: &str) -> PathBuf {
let include_path = PathBuf::from(include);
if include_path.is_absolute() {
include_path
} else if let Some(parent) = base.parent() {
parent.join(include_path)
} else {
include_path
}
}
fn merge_config(base: &mut Config, other: Config) {
if base.project_name.is_none() {
base.project_name = other.project_name;
}
if base.flow.primary_task.is_none() {
base.flow.primary_task = other.flow.primary_task;
}
if base.flow.release_task.is_none() {
base.flow.release_task = other.flow.release_task;
}
base.options.merge(other.options);
base.servers.extend(other.servers);
base.remote_servers.extend(other.remote_servers);
base.tasks.extend(other.tasks);
base.watchers.extend(other.watchers);
base.daemons.extend(other.daemons);
base.stream = base.stream.take().or(other.stream);
base.storage = base.storage.take().or(other.storage);
base.server_hub = base.server_hub.take().or(other.server_hub);
for (key, value) in other.aliases {
base.aliases.entry(key).or_insert(value);
}
for (key, value) in other.dependencies {
base.dependencies.entry(key).or_insert(value);
}
match (&mut base.flox, other.flox) {
(Some(base_flox), Some(other_flox)) => {
for (key, value) in other_flox.install {
base_flox.install.entry(key).or_insert(value);
}
}
(None, Some(other_flox)) => base.flox = Some(other_flox),
_ => {}
}
}
/// Load config from the given path, logging a warning and returning an empty
/// config if anything goes wrong. This keeps the daemon usable even if the
/// config file is missing or invalid.
pub fn load_or_default<P: AsRef<Path>>(path: P) -> Config {
match load(path) {
Ok(cfg) => cfg,
Err(err) => {
tracing::warn!(
?err,
"failed to load flow config; starting with no managed servers"
);
Config::default()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
fn fixture_path(relative: &str) -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(relative)
}
#[test]
fn load_parses_global_fixture() {
let cfg = load(fixture_path("test-data/global-config/flow.toml"))
.expect("global config fixture should parse");
assert_eq!(cfg.version, Some(1));
assert!(cfg.options.trace_terminal_io, "options table should parse");
assert_eq!(cfg.servers.len(), 1);
assert_eq!(cfg.remote_servers.len(), 1);
assert_eq!(cfg.watchers.len(), 1);
assert_eq!(
cfg.tasks.len(),
1,
"global config should inherit tasks from included command files"
);
let watcher = &cfg.watchers[0];
assert_eq!(watcher.driver, WatcherDriver::Shell);
assert_eq!(watcher.name, "karabiner");
assert_eq!(watcher.path, "~/config/i/karabiner");
assert_eq!(watcher.filter.as_deref(), Some("karabiner.edn"));
assert_eq!(watcher.command.as_deref(), Some("~/bin/goku"));
assert_eq!(watcher.debounce_ms, 150);
assert!(watcher.run_on_start);
assert!(watcher.poltergeist.is_none());
let server = &cfg.servers[0];
assert_eq!(server.name, "1f");
assert_eq!(server.command, "blade");
assert_eq!(server.args, ["--port", "4000"]);
let working_dir = server
.working_dir
.as_ref()
.expect("server working dir should parse");
assert!(
working_dir.ends_with("src/org/1f/1f"),
"unexpected working dir: {}",
working_dir.display()
);
assert!(server.env.is_empty());
assert!(
server.autostart,
"autostart should default to true when omitted"
);
let sync_task = &cfg.tasks[0];
assert_eq!(sync_task.name, "sync-config");
assert_eq!(
sync_task.command,
"rsync -av ~/.config/flow remote:~/flow-config"
);
assert!(
cfg.aliases.contains_key("fsh"),
"included aliases should merge into base config"
);
let remote = &cfg.remote_servers[0];
assert_eq!(remote.server.name, "homelab-blade");
assert_eq!(remote.hub.as_deref(), Some("homelab"));
assert_eq!(remote.sync_paths, [PathBuf::from("~/config/i/karabiner")]);
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/notify.rs | src/notify.rs | //! Notify command - sends proposals and alerts to Lin app.
use crate::cli::NotifyCommand;
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use uuid::Uuid;
/// Proposal format matching Lin's ProposalService.swift
#[derive(Debug, Serialize, Deserialize)]
struct Proposal {
id: String,
timestamp: i64,
title: String,
action: String,
context: Option<String>,
#[serde(rename = "expires_at")]
expires_at: i64,
}
/// Alert format for Lin's NotificationBannerManager.
#[derive(Debug, Clone, Serialize, Deserialize)]
struct Alert {
id: String,
timestamp: i64,
text: String,
kind: String, // "info", "warning", "error", "success"
#[serde(rename = "expires_at")]
expires_at: i64,
}
/// Get the path to Lin's proposals.json file.
fn get_proposals_path() -> Result<PathBuf> {
let home = dirs::home_dir().context("Could not find home directory")?;
let path = home
.join("Library")
.join("Application Support")
.join("Lin")
.join("proposals.json");
Ok(path)
}
/// Run the notify command - write a proposal to Lin's proposals.json.
pub fn run(cmd: NotifyCommand) -> Result<()> {
let proposals_path = get_proposals_path()?;
// Ensure the directory exists
if let Some(parent) = proposals_path.parent() {
fs::create_dir_all(parent)?;
}
// Read existing proposals
let mut proposals: Vec<Proposal> = if proposals_path.exists() {
let content = fs::read_to_string(&proposals_path)?;
serde_json::from_str(&content).unwrap_or_default()
} else {
Vec::new()
};
// Get current timestamp
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.context("Time went backwards")?
.as_secs() as i64;
// Create title from action if not provided
let title = cmd.title.unwrap_or_else(|| {
// Extract a nice title from the action
let action = &cmd.action;
if action.starts_with("f ") {
format!("Run: {}", &action[2..])
} else {
format!("Run: {}", action)
}
});
// Create new proposal
let proposal = Proposal {
id: Uuid::new_v4().to_string(),
timestamp: now,
title,
action: cmd.action.clone(),
context: cmd.context,
expires_at: now + cmd.expires as i64,
};
// Add to proposals
proposals.push(proposal);
// Write back
let content = serde_json::to_string_pretty(&proposals)?;
fs::write(&proposals_path, content)?;
println!("Proposal sent to Lin: {}", cmd.action);
Ok(())
}
// ============================================================================
// Alerts API (for commit rejections, errors, etc.)
// ============================================================================
/// Get the path to Lin's alerts.json file.
fn get_alerts_path() -> Result<PathBuf> {
let home = dirs::home_dir().context("Could not find home directory")?;
let path = home
.join("Library")
.join("Application Support")
.join("Lin")
.join("alerts.json");
Ok(path)
}
/// Alert kind for Lin's NotificationBannerManager.
#[derive(Debug, Clone, Copy)]
pub enum AlertKind {
Info,
Warning,
Error,
Success,
}
impl AlertKind {
fn as_str(&self) -> &'static str {
match self {
AlertKind::Info => "info",
AlertKind::Warning => "warning",
AlertKind::Error => "error",
AlertKind::Success => "success",
}
}
}
/// Send an alert to Lin's notification banner.
/// Alerts are shown as floating banners - errors/warnings stay for 10+ seconds.
pub fn send_alert(text: &str, kind: AlertKind) -> Result<()> {
let alerts_path = get_alerts_path()?;
// Ensure the directory exists
if let Some(parent) = alerts_path.parent() {
fs::create_dir_all(parent)?;
}
// Read existing alerts
let mut alerts: Vec<Alert> = if alerts_path.exists() {
let content = fs::read_to_string(&alerts_path)?;
serde_json::from_str(&content).unwrap_or_default()
} else {
Vec::new()
};
// Get current timestamp
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.context("Time went backwards")?
.as_secs() as i64;
// Determine expiry based on kind (warnings/errors stay longer)
let duration = match kind {
AlertKind::Error | AlertKind::Warning => 30, // 30 seconds for errors/warnings
AlertKind::Success => 5,
AlertKind::Info => 10,
};
// Create new alert
let alert = Alert {
id: Uuid::new_v4().to_string(),
timestamp: now,
text: text.to_string(),
kind: kind.as_str().to_string(),
expires_at: now + duration,
};
// Add to alerts
alerts.push(alert);
// Clean up old alerts (keep last 20)
if alerts.len() > 20 {
let skip_count = alerts.len() - 20;
alerts = alerts.into_iter().skip(skip_count).collect();
}
// Write back
let content = serde_json::to_string_pretty(&alerts)?;
fs::write(&alerts_path, content)?;
Ok(())
}
/// Send an error alert to Lin.
pub fn send_error(text: &str) -> Result<()> {
send_alert(text, AlertKind::Error)
}
/// Send a warning alert to Lin.
pub fn send_warning(text: &str) -> Result<()> {
send_alert(text, AlertKind::Warning)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/agents.rs | src/agents.rs | //! Gen agents integration.
//!
//! Invokes gen AI agents from the flow CLI.
//! Gen is opencode with GEN_MODE=1, providing flow integration.
use std::collections::HashSet;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use anyhow::{Context, Result, bail};
use ignore::WalkBuilder;
use shell_words;
use crate::cli::{AgentsAction, AgentsCommand};
use crate::discover;
/// Default gen repository location.
const GEN_REPO: &str = "/Users/nikiv/org/gen/gen";
/// Global agents that can be invoked from anywhere.
const GLOBAL_AGENTS: &[(&str, &str)] = &[
("repos-health", "Ensure all ~/repos have proper upstream configuration"),
("repos-sync", "Sync all ~/repos with upstream, resolve conflicts"),
("os-health", "Identify high-CPU or hanging processes and clean safely"),
];
const BUILTIN_SUBAGENTS: &[(&str, &str)] = &[
("flow", "Flow-aware agent with full context about flow.toml, tasks, and CLI"),
("explore", "Fast codebase exploration: find files, search code, answer questions"),
("codify", "Convert scripts/sessions to reusable TypeScript for Bun"),
("general", "Multi-step autonomous tasks, parallel execution"),
];
/// Run the agents subcommand.
pub fn run(cmd: AgentsCommand) -> Result<()> {
match cmd.action {
Some(AgentsAction::List) => list_agents(),
Some(AgentsAction::Run { agent, prompt }) => run_agent(&agent, prompt),
Some(AgentsAction::Global { agent, prompt }) => run_global_agent(&agent, prompt),
None => {
if cmd.agent.is_empty() {
run_fuzzy_agents()
} else {
let agent = &cmd.agent[0];
let prompt = if cmd.agent.len() > 1 {
Some(cmd.agent[1..].to_vec())
} else {
None
};
run_global_agent(agent, prompt)
}
}
}
}
/// Find gen - either the installed binary or the repo.
fn find_gen() -> Option<GenLocation> {
// Check ~/.local/bin/gen first (installed via `f install` in gen repo)
if let Some(home) = dirs::home_dir() {
let local_bin = home.join(".local/bin/gen");
if local_bin.exists() {
return Some(GenLocation::Binary(local_bin));
}
}
// Check PATH
if let Ok(path) = which::which("gen") {
return Some(GenLocation::Binary(path));
}
// Fall back to repo location
let repo = PathBuf::from(GEN_REPO);
if repo.join("packages/opencode/src/index.ts").exists() {
return Some(GenLocation::Repo(repo));
}
// Check GEN_REPO env var
if let Ok(env_repo) = std::env::var("GEN_REPO") {
let repo = PathBuf::from(&env_repo);
if repo.join("packages/opencode/src/index.ts").exists() {
return Some(GenLocation::Repo(repo));
}
}
None
}
enum GenLocation {
Binary(PathBuf),
Repo(PathBuf),
}
/// List available agents.
fn list_agents() -> Result<()> {
println!("Global agents:\n");
for (name, desc) in GLOBAL_AGENTS {
println!(" {:<14} - {}", name, desc);
}
println!();
println!("Flow agents:\n");
println!(" flow - Flow-aware agent with full context about flow.toml, tasks, and CLI");
println!(" Knows schema, best practices, and can create/modify tasks");
println!();
println!("Subagents (via Task tool):\n");
println!(" explore - Fast codebase exploration: find files, search code, answer questions");
println!(" codify - Convert scripts/sessions to reusable TypeScript for Bun");
println!(" general - Multi-step autonomous tasks, parallel execution");
println!();
println!("Primary agents (standalone modes):\n");
println!(" build - Default coding/building agent (full permissions)");
println!(" plan - Planning mode with read-only restrictions");
println!();
println!("Usage:");
println!(" f agents global repos-health # Check all ~/repos have upstream");
println!(" f agents global repos-sync # Sync all ~/repos with upstream");
println!(" f agents global os-health # Identify high-CPU or hanging processes");
println!(" f agents run flow \"add a deploy task\"");
println!(" f agents run explore \"find all API endpoints\"");
println!(" f agents os-health # Shortcut for global agents");
println!(" f agents os-health clean # Cleanup non-system offenders");
println!();
match find_gen() {
Some(GenLocation::Binary(p)) => println!("Using: {}", p.display()),
Some(GenLocation::Repo(p)) => println!("Using repo: {}", p.display()),
None => {
println!("⚠ gen not found. Install with:");
println!(" cd {} && f install", GEN_REPO);
println!(" # or set GEN_REPO environment variable");
}
}
Ok(())
}
#[derive(Debug, Clone, Copy)]
enum AgentKind {
Global,
Subagent,
}
struct AgentEntry {
name: String,
display: String,
kind: AgentKind,
}
struct FzfAgentResult<'a> {
entry: &'a AgentEntry,
with_args: bool,
}
fn run_fuzzy_agents() -> Result<()> {
let entries = build_agent_entries()?;
if entries.is_empty() {
println!("No agents available.");
return Ok(());
}
if which::which("fzf").is_err() {
println!("fzf not found on PATH – install it to use fuzzy selection.");
list_agents()?;
return Ok(());
}
if let Some(result) = run_agent_fzf(&entries)? {
let mut prompt_args = if result.with_args {
prompt_for_agent_prompt(&result.entry.name)?
} else {
Vec::new()
};
match result.entry.kind {
AgentKind::Global => {
if prompt_args.is_empty() {
run_global_agent(&result.entry.name, None)?;
} else {
run_global_agent(&result.entry.name, Some(prompt_args))?;
}
}
AgentKind::Subagent => {
if prompt_args.is_empty() {
prompt_args = prompt_for_agent_prompt(&result.entry.name)?;
}
if prompt_args.is_empty() {
bail!("No prompt provided.");
}
run_agent(&result.entry.name, prompt_args)?;
}
}
}
Ok(())
}
fn run_agent_fzf<'a>(entries: &'a [AgentEntry]) -> Result<Option<FzfAgentResult<'a>>> {
let mut child = Command::new("fzf")
.arg("--prompt")
.arg("agents> ")
.arg("--expect")
.arg("tab")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.context("failed to spawn fzf")?;
{
let stdin = child.stdin.as_mut().context("failed to open fzf stdin")?;
for entry in entries {
writeln!(stdin, "{}", entry.display)?;
}
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Ok(None);
}
let raw = String::from_utf8(output.stdout).context("fzf output was not valid UTF-8")?;
let mut lines = raw.lines();
let key = lines.next().unwrap_or("");
let with_args = key == "tab";
let selection = lines.next().unwrap_or("").trim();
if selection.is_empty() {
return Ok(None);
}
let entry = entries.iter().find(|entry| entry.display == selection);
Ok(entry.map(|e| FzfAgentResult {
entry: e,
with_args,
}))
}
fn prompt_for_agent_prompt(agent_name: &str) -> Result<Vec<String>> {
use std::io::{self, BufRead};
println!("(tip: use quotes for prompts with spaces, e.g. 'find all API endpoints')");
print!("f agents {} ", agent_name);
io::stdout().flush()?;
let stdin = io::stdin();
let line = stdin.lock().lines().next();
let input = match line {
Some(Ok(s)) => s,
_ => return Ok(Vec::new()),
};
let args = shell_words::split(&input).context("failed to parse prompt")?;
Ok(args)
}
fn build_agent_entries() -> Result<Vec<AgentEntry>> {
let mut entries = Vec::new();
let mut seen = HashSet::new();
for (name, desc) in GLOBAL_AGENTS {
let display = format!("[global] {} - {}", name, desc);
if seen.insert(display.clone()) {
entries.push(AgentEntry {
name: (*name).to_string(),
display,
kind: AgentKind::Global,
});
}
}
for (name, desc) in BUILTIN_SUBAGENTS {
let display = format!("[builtin] {} - {}", name, desc);
if seen.insert(display.clone()) {
entries.push(AgentEntry {
name: (*name).to_string(),
display,
kind: AgentKind::Subagent,
});
}
}
if let Some(project_root) = find_project_root() {
let opencode_dir = project_root.join(".opencode");
entries.extend(collect_agent_entries(
&opencode_dir.join("agent"),
"project",
&mut seen,
)?);
entries.extend(collect_agent_entries(
&opencode_dir.join("agents"),
"project",
&mut seen,
)?);
}
if let Some(global_dir) = dirs::config_dir().map(|d| d.join("opencode")) {
entries.extend(collect_agent_entries(
&global_dir.join("agent"),
"global-config",
&mut seen,
)?);
entries.extend(collect_agent_entries(
&global_dir.join("agents"),
"global-config",
&mut seen,
)?);
}
Ok(entries)
}
fn find_project_root() -> Option<PathBuf> {
let mut dir = std::env::current_dir().ok()?;
loop {
if dir.join(".opencode").exists() || dir.join("flow.toml").exists() {
return Some(dir);
}
if !dir.pop() {
break;
}
}
None
}
fn collect_agent_entries(
root: &Path,
label: &str,
seen: &mut HashSet<String>,
) -> Result<Vec<AgentEntry>> {
let mut entries = Vec::new();
if !root.exists() {
return Ok(entries);
}
let walker = WalkBuilder::new(root)
.hidden(false)
.git_ignore(false)
.git_exclude(false)
.ignore(false)
.build();
for entry in walker {
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
let path = entry.path();
if !path.is_file() || path.extension().and_then(|s| s.to_str()) != Some("md") {
continue;
}
let name = match agent_name_from_path(root, path) {
Some(n) => n,
None => continue,
};
let (desc, mode) = parse_agent_frontmatter(path)?;
if matches!(mode.as_deref(), Some("primary")) {
continue;
}
let summary = desc.unwrap_or_else(|| "No description".to_string());
let display = format!("[{label}] {} - {}", name, summary);
if seen.insert(display.clone()) {
entries.push(AgentEntry {
name,
display,
kind: AgentKind::Subagent,
});
}
}
Ok(entries)
}
fn agent_name_from_path(root: &Path, path: &Path) -> Option<String> {
let relative = path.strip_prefix(root).ok()?;
let without_ext = relative.with_extension("");
Some(without_ext.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "/"))
}
fn parse_agent_frontmatter(path: &Path) -> Result<(Option<String>, Option<String>)> {
let contents = fs::read_to_string(path).unwrap_or_default();
let mut lines = contents.lines();
if lines.next().map(|l| l.trim()) != Some("---") {
return Ok((None, None));
}
let mut desc: Option<String> = None;
let mut mode: Option<String> = None;
for line in lines {
let line = line.trim();
if line == "---" {
break;
}
if let Some(value) = line.strip_prefix("description:") {
desc = Some(trim_yaml_scalar(value));
} else if let Some(value) = line.strip_prefix("mode:") {
mode = Some(trim_yaml_scalar(value));
}
}
Ok((desc, mode))
}
fn trim_yaml_scalar(value: &str) -> String {
let trimmed = value.trim();
trimmed
.trim_matches('"')
.trim_matches('\'')
.to_string()
}
/// Run a global agent (repos-health, repos-sync, etc.)
fn run_global_agent(agent: &str, prompt: Option<Vec<String>>) -> Result<()> {
// Validate it's a known global agent
let agent_desc = GLOBAL_AGENTS
.iter()
.find(|(name, _)| *name == agent)
.map(|(_, desc)| *desc);
if agent_desc.is_none() {
let available: Vec<_> = GLOBAL_AGENTS.iter().map(|(n, _)| *n).collect();
bail!(
"Unknown global agent: '{}'\nAvailable: {}",
agent,
available.join(", ")
);
}
let gen_loc = find_gen().ok_or_else(|| {
anyhow::anyhow!(
"gen not found. Install with:\n cd {} && f install\n # or set GEN_REPO env var",
GEN_REPO
)
})?;
// Build prompt - use custom if provided, otherwise default for the agent
let full_prompt = match prompt {
Some(p) if !p.is_empty() => {
if agent == "os-health" && is_os_health_clean_request(&p) {
build_os_health_clean_prompt()
} else {
format!(
"Use the Task tool with subagent_type='{}' to: {}",
agent,
p.join(" ")
)
}
}
_ => build_global_agent_prompt(agent),
};
println!("Invoking {} agent...\n", agent);
let status = invoke_gen(&gen_loc, &full_prompt)?;
if !status.success() {
bail!("Agent exited with status: {}", status);
}
Ok(())
}
/// Build default prompt for global agents.
fn build_global_agent_prompt(agent: &str) -> String {
match agent {
"repos-health" => format!(
"Use the Task tool with subagent_type='repos-health' to: \
Check all repositories in ~/repos and ensure they have proper upstream configuration. \
For repos missing upstream, run 'f upstream setup --url <origin-url>'. \
Report health status for each repo."
),
"repos-sync" => format!(
"Use the Task tool with subagent_type='repos-sync' to: \
Sync all repositories in ~/repos with their upstream remotes. \
For each repo, run 'f upstream sync' and resolve any merge conflicts \
by preserving features from both sides. Report progress for each repo."
),
"os-health" => format!(
"Use the Task tool with subagent_type='os-health' to: \
Identify high-CPU or hanging processes on macOS and report offenders. \
Then ask: \"Proceed with cleanup? (y/n)\". \
If the user answers \"y\", terminate non-system offenders with TERM, \
recheck, and only use KILL if needed. \
Do not terminate system processes; report them and ask before any action."
),
_ => format!("Use the Task tool with subagent_type='{}' to complete the task.", agent),
}
}
fn is_os_health_clean_request(prompt: &[String]) -> bool {
if prompt.len() != 1 {
return false;
}
matches!(
prompt[0].to_lowercase().as_str(),
"clean" | "cleanup"
)
}
fn build_os_health_clean_prompt() -> String {
"Use the Task tool with subagent_type='os-health' to: \
Identify high-CPU or hanging processes on macOS and report offenders. \
Ask \"Proceed with cleanup? (y/n)\" and if the user answers \"y\", \
terminate non-system offenders with TERM, recheck, and only use KILL if needed. \
Do not terminate system processes; report them and ask before any action."
.to_string()
}
/// Run an agent with a prompt.
fn run_agent(agent: &str, prompt: Vec<String>) -> Result<()> {
let gen_loc = find_gen().ok_or_else(|| {
anyhow::anyhow!(
"gen not found. Install with:\n cd {} && f install\n # or set GEN_REPO env var",
GEN_REPO
)
})?;
let prompt_str = prompt.join(" ");
if prompt_str.is_empty() {
bail!(
"No prompt provided.\nUsage: f agents run {} \"your prompt here\"",
agent
);
}
// Build the full prompt based on agent type
let full_prompt = if agent == "flow" {
build_flow_prompt(&prompt_str)?
} else {
// Regular subagent - use Task tool
format!(
"Use the Task tool with subagent_type='{}' to: {}",
agent, prompt_str
)
};
println!("Invoking {} agent...\n", agent);
let status = invoke_gen(&gen_loc, &full_prompt)?;
if !status.success() {
bail!("Agent exited with status: {}", status);
}
Ok(())
}
/// Invoke gen with a prompt.
fn invoke_gen(location: &GenLocation, prompt: &str) -> Result<std::process::ExitStatus> {
match location {
GenLocation::Binary(path) => Command::new(path)
.args(["run", prompt])
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.context("failed to run gen"),
GenLocation::Repo(repo) => Command::new("bun")
.args([
"run",
"--cwd",
&repo.join("packages/opencode").to_string_lossy(),
"--conditions=browser",
"src/index.ts",
"run",
prompt,
])
.env("GEN_MODE", "1")
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.context("failed to run gen from repo"),
}
}
/// Build a flow-aware prompt with full context.
fn build_flow_prompt(user_prompt: &str) -> Result<String> {
let mut context = String::new();
// Add flow.toml schema reference
context.push_str(FLOW_SCHEMA_CONTEXT);
// Add current project tasks if available
let cwd = std::env::current_dir().unwrap_or_default();
if let Ok(discovery) = discover::discover_tasks(&cwd) {
if !discovery.tasks.is_empty() {
context.push_str("\n\n## Current Project Tasks\n\n");
for task in &discovery.tasks {
let desc = task.task.description.as_deref().unwrap_or("");
if task.relative_dir.is_empty() {
context.push_str(&format!("- `{}`: {} ({})\n", task.task.name, desc, task.task.command));
} else {
context.push_str(&format!("- `{}` ({}): {} ({})\n", task.task.name, task.relative_dir, desc, task.task.command));
}
}
}
}
// Add CLI commands reference
context.push_str(FLOW_CLI_CONTEXT);
Ok(format!(
"{}\n\n---\n\nUser request: {}\n\nComplete this task. Read flow.toml first if you need to modify it.",
context, user_prompt
))
}
/// Flow.toml schema and best practices context.
const FLOW_SCHEMA_CONTEXT: &str = r#"# Flow Task Runner Context
You are a flow-aware agent. Flow is a task runner with these key concepts:
## flow.toml Schema
```toml
version = 1
name = "project-name" # optional project identifier
[flow]
primary_task = "dev" # default task for `f` with no args
[[tasks]]
name = "task-name" # required: unique identifier
command = "echo hello" # required: shell command to run
description = "What it does" # optional: shown in task list
shortcuts = ["t", "tn"] # optional: short aliases
dependencies = ["other-task", "cargo"] # optional: run before, or ensure binary exists
interactive = false # optional: needs TTY (auto-detected for sudo, vim, etc.)
delegate_to_hub = false # optional: run via background hub daemon
on_cancel = "cleanup cmd" # optional: run when Ctrl+C pressed
# Dependencies section - define reusable deps
[deps]
cargo = "cargo" # simple binary check
node = ["node", "npm"] # multiple binaries
ripgrep = { pkg-path = "ripgrep" } # flox managed package
# Flox integration for reproducible dependencies
[flox.install]
cargo.pkg-path = "cargo"
nodejs.pkg-path = "nodejs"
```
## Best Practices
1. **Task naming**: Use kebab-case (e.g., `deploy-prod`, `test-unit`)
2. **Shortcuts**: Add 1-3 char shortcuts for frequent tasks
3. **Descriptions**: Always add descriptions - they appear in `f tasks` and fuzzy search
4. **Dependencies**: List task deps (run first) or binary deps (check PATH)
5. **on_cancel**: Add cleanup for long-running tasks that spawn processes
6. **interactive**: Auto-detected for sudo/vim/ssh, set manually for custom TUIs
"#;
/// Flow CLI commands context.
const FLOW_CLI_CONTEXT: &str = r#"
## Flow CLI Commands
- `f` - Fuzzy search tasks (fzf picker)
- `f <task>` - Run task directly
- `f tasks` - List all tasks
- `f run <task> [args]` - Run task with args
- `f init` - Create flow.toml scaffold
- `f start` - Bootstrap .ai/ folder structure
- `f commit` - AI-assisted git commit
- `f agents run <type> "prompt"` - Run AI agent
- `f agents global <agent>` - Run global agent
- `f ps` - Show running tasks
- `f kill` - Stop running tasks
- `f logs <task>` - View task logs
## Task Arguments
Tasks receive args as positional params:
```toml
[[tasks]]
name = "greet"
command = "echo Hello $1"
```
Run: `f greet World` → prints "Hello World"
"#;
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/publish.rs | src/publish.rs | //! Publish projects to GitHub.
use std::io::{self, Write};
use std::process::Command;
use anyhow::{Context, Result, bail};
use crate::cli::PublishOpts;
/// Run the publish command.
pub fn run(opts: PublishOpts) -> Result<()> {
// Check if gh CLI is available
if Command::new("gh").arg("--version").output().is_err() {
bail!("GitHub CLI (gh) is not installed. Install from: https://cli.github.com");
}
// Check if authenticated
let auth_status = Command::new("gh")
.args(["auth", "status"])
.output()
.context("failed to check gh auth status")?;
if !auth_status.status.success() {
println!("Not authenticated with GitHub.");
println!("Run: gh auth login");
bail!("GitHub authentication required");
}
// Get current directory name as default repo name
let cwd = std::env::current_dir()?;
let folder_name = cwd
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("repo")
.to_string();
// Check if already a git repo
let is_git_repo = cwd.join(".git").exists();
// Get GitHub username
let gh_user = Command::new("gh")
.args(["api", "user", "-q", ".login"])
.output()
.context("failed to get GitHub username")?;
let username = String::from_utf8_lossy(&gh_user.stdout).trim().to_string();
if username.is_empty() {
bail!("Could not determine GitHub username");
}
// Determine repo name
let repo_name = if let Some(name) = opts.name {
name
} else if opts.yes {
folder_name.clone()
} else {
print!("Repository name [{}]: ", folder_name);
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim();
if input.is_empty() {
folder_name.clone()
} else {
input.to_string()
}
};
// Determine visibility
let is_public = if opts.public {
true
} else if opts.private {
false
} else if opts.yes {
false // Default to private if -y is passed
} else {
print!("Visibility (public/private) [private]: ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim().to_lowercase();
input == "public" || input == "pub" || input == "p"
};
let visibility = if is_public { "public" } else { "private" };
let full_name = format!("{}/{}", username, repo_name);
// Check if repo already exists
let repo_check = Command::new("gh")
.args(["repo", "view", &full_name, "--json", "visibility", "-q", ".visibility"])
.output();
if let Ok(output) = repo_check {
if output.status.success() {
let current_visibility = String::from_utf8_lossy(&output.stdout).trim().to_lowercase();
println!("Repository {} already exists ({}).", full_name, current_visibility);
// Check if visibility needs to change
let target_visibility = if is_public { "public" } else { "private" };
if current_visibility != target_visibility {
println!("Updating visibility to {}...", target_visibility);
let visibility_flag = format!("--visibility={}", target_visibility);
let update_result = Command::new("gh")
.args(["repo", "edit", &full_name, &visibility_flag])
.status()
.context("failed to update repository visibility")?;
if update_result.success() {
println!("✓ Updated to {}", target_visibility);
} else {
println!("Warning: Could not update visibility");
}
}
// Check if origin remote exists
let origin_check = Command::new("git")
.args(["remote", "get-url", "origin"])
.output();
if origin_check.map(|o| o.status.success()).unwrap_or(false) {
println!("\n✓ https://github.com/{}", full_name);
return Ok(());
}
// Add origin and push
println!("Adding origin remote...");
let remote_url = format!("git@github.com:{}.git", full_name);
Command::new("git")
.args(["remote", "add", "origin", &remote_url])
.status()
.context("failed to add origin remote")?;
println!("Pushing to {}...", full_name);
push_to_origin()?;
println!("\n✓ Published to https://github.com/{}", full_name);
return Ok(());
}
}
// Show confirmation
if !opts.yes {
println!();
println!("Create repository:");
println!(" Name: {}", full_name);
println!(" Visibility: {}", visibility);
if let Some(ref desc) = opts.description {
println!(" Description: {}", desc);
}
println!();
print!("Proceed? [Y/n]: ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim().to_lowercase();
if input == "n" || input == "no" {
println!("Aborted.");
return Ok(());
}
}
// Initialize git if needed
if !is_git_repo {
println!("Initializing git repository...");
Command::new("git")
.args(["init"])
.status()
.context("failed to initialize git")?;
// Create initial commit if no commits exist
let has_commits = Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.map(|o| o.status.success())
.unwrap_or(false);
if !has_commits {
// Stage all files
Command::new("git")
.args(["add", "."])
.status()
.context("failed to stage files")?;
Command::new("git")
.args(["commit", "-m", "Initial commit"])
.status()
.context("failed to create initial commit")?;
}
}
// Create the repository
println!("Creating repository on GitHub...");
let mut args = vec![
"repo".to_string(),
"create".to_string(),
repo_name.clone(),
format!("--{}", visibility),
"--source=.".to_string(),
"--push".to_string(),
];
if let Some(desc) = opts.description {
args.push("--description".to_string());
args.push(desc);
}
let create_result = Command::new("gh")
.args(&args)
.status()
.context("failed to create repository")?;
if !create_result.success() {
bail!("Failed to create repository");
}
println!();
println!("✓ Published to https://github.com/{}", full_name);
Ok(())
}
fn push_to_origin() -> Result<()> {
// Get current branch
let branch = Command::new("git")
.args(["rev-parse", "--abbrev-ref", "HEAD"])
.output()
.context("failed to get current branch")?;
let branch = String::from_utf8_lossy(&branch.stdout).trim().to_string();
let branch = if branch.is_empty() || branch == "HEAD" {
"main".to_string()
} else {
branch
};
Command::new("git")
.args(["push", "-u", "origin", &branch])
.status()
.context("failed to push to origin")?;
Ok(())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/tasks.rs | src/tasks.rs | use std::{
collections::hash_map::DefaultHasher,
fs::{self, File, OpenOptions},
hash::{Hash, Hasher},
io::{IsTerminal, Read, Write},
net::IpAddr,
path::{Path, PathBuf},
process::{Command, ExitStatus, Stdio},
sync::{
Arc, Mutex,
atomic::{AtomicBool, Ordering},
},
thread,
time::{Duration, Instant},
};
use portable_pty::{CommandBuilder, NativePtySystem, PtySize, PtySystem};
use anyhow::{Context, Result, bail};
use reqwest::blocking::Client;
use serde_json::json;
use shell_words;
use which::which;
use crate::{
cli::{
GlobalAction, GlobalCommand, HubAction, HubCommand, HubOpts, TaskActivateOpts, TaskRunOpts,
TasksOpts,
},
config::{self, Config, FloxInstallSpec, TaskConfig},
discover,
flox::{self, FloxEnv},
history::{self, InvocationRecord},
hub, projects, task_match,
running::{self, RunningProcess},
};
/// Global state for cancel cleanup handler.
static CANCEL_HANDLER_SET: AtomicBool = AtomicBool::new(false);
/// Cleanup state shared with the signal handler.
struct CleanupState {
command: Option<String>,
workdir: PathBuf,
}
static CLEANUP_STATE: std::sync::OnceLock<Mutex<CleanupState>> = std::sync::OnceLock::new();
/// Run the cleanup command if one is set.
fn run_cleanup() {
let state = CLEANUP_STATE.get_or_init(|| {
Mutex::new(CleanupState {
command: None,
workdir: PathBuf::from("."),
})
});
if let Ok(guard) = state.lock() {
if let Some(ref cmd) = guard.command {
eprintln!("\nRunning cleanup: {}", cmd);
let _ = Command::new("/bin/sh")
.arg("-c")
.arg(cmd)
.current_dir(&guard.workdir)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status();
}
}
}
/// Set up the cleanup handler for Ctrl+C.
fn setup_cancel_handler(on_cancel: Option<&str>, workdir: &Path) {
let state = CLEANUP_STATE.get_or_init(|| {
Mutex::new(CleanupState {
command: None,
workdir: PathBuf::from("."),
})
});
// Update the cleanup state
if let Ok(mut guard) = state.lock() {
guard.command = on_cancel.map(|s| s.to_string());
guard.workdir = workdir.to_path_buf();
}
// Only set up the handler once
if !CANCEL_HANDLER_SET.swap(true, Ordering::SeqCst) {
let _ = ctrlc::set_handler(move || {
run_cleanup();
std::process::exit(130); // 128 + SIGINT (2)
});
}
}
/// Clear the cleanup handler (called after task completes normally).
fn clear_cancel_handler() {
let state = CLEANUP_STATE.get_or_init(|| {
Mutex::new(CleanupState {
command: None,
workdir: PathBuf::from("."),
})
});
if let Ok(mut guard) = state.lock() {
guard.command = None;
}
}
/// Context for registering a running task process
#[derive(Debug, Clone)]
pub struct TaskContext {
pub task_name: String,
pub command: String,
pub config_path: PathBuf,
pub project_root: PathBuf,
pub used_flox: bool,
pub project_name: Option<String>,
pub log_path: Option<PathBuf>,
pub interactive: bool,
}
/// Check if a command needs interactive mode (TTY passthrough).
/// Auto-detects commands that typically require user input.
fn needs_interactive_mode(command: &str) -> bool {
// Check each line of the command (for multi-line scripts)
for line in command.lines() {
let line = line.trim();
// Skip empty lines and comments
if line.is_empty() || line.starts_with('#') {
continue;
}
// Commands that need interactive mode when they start a line
let interactive_prefixes = [
"sudo ",
"sudo\t",
"su ",
"ssh ",
"docker run -it",
"docker run -ti",
"docker exec -it",
"docker exec -ti",
"kubectl exec -it",
"kubectl exec -ti",
];
for prefix in &interactive_prefixes {
if line.starts_with(prefix) {
return true;
}
}
// Also check if line is exactly "sudo" followed by something
if line == "sudo" || line.starts_with("sudo ") {
return true;
}
}
// Check for sudo anywhere in piped/chained commands
if command.contains("| sudo") || command.contains("&& sudo") || command.contains("; sudo") {
return true;
}
// Standalone interactive commands (check first line's first word)
let interactive_commands = [
"vim", "nvim", "nano", "emacs",
"htop", "top", "btop",
"less", "more",
"psql", "mysql", "sqlite3",
"node", "python", "python3", "irb", "ghci",
"lazygit", "lazydocker",
];
let first_line = command.lines().next().unwrap_or("").trim();
let first_word = first_line.split_whitespace().next().unwrap_or("");
let base_cmd = first_word.rsplit('/').next().unwrap_or(first_word);
interactive_commands.contains(&base_cmd)
}
pub fn list(opts: TasksOpts) -> Result<()> {
// Determine root directory for discovery
let root = if opts.config.is_absolute() {
opts.config
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| PathBuf::from("."))
} else {
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."))
};
let discovery = discover::discover_tasks(&root)?;
if discovery.tasks.is_empty() {
println!("No tasks defined in {} or subdirectories", root.display());
return Ok(());
}
println!("Tasks (root: {}):", root.display());
for line in format_discovered_task_lines(&discovery.tasks) {
println!("{line}");
}
Ok(())
}
/// Run tasks from the global flow config (~/.config/flow/flow.toml).
pub fn run_global(opts: GlobalCommand) -> Result<()> {
let config_path = config::default_config_path();
if !config_path.exists() {
bail!("global flow config not found at {}", config_path.display());
}
if let Some(action) = opts.action {
match action {
GlobalAction::List => {
return list(TasksOpts { config: config_path });
}
GlobalAction::Run { task, args } => {
return run(TaskRunOpts {
config: config_path,
delegate_to_hub: false,
hub_host: std::net::IpAddr::from([127, 0, 0, 1]),
hub_port: 9050,
name: task,
args,
});
}
GlobalAction::Match(opts) => {
return task_match::run_global(task_match::MatchOpts {
args: opts.query,
model: opts.model,
port: Some(opts.port),
execute: !opts.dry_run,
});
}
}
}
if opts.list {
return list(TasksOpts { config: config_path });
}
if let Some(task) = opts.task {
return run(TaskRunOpts {
config: config_path,
delegate_to_hub: false,
hub_host: std::net::IpAddr::from([127, 0, 0, 1]),
hub_port: 9050,
name: task,
args: opts.args,
});
}
list(TasksOpts { config: config_path })
}
/// Run a task, searching nested flow.toml files if not found in root.
pub fn run_with_discovery(task_name: &str, args: Vec<String>) -> Result<()> {
let root = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let discovery = discover::discover_tasks(&root)?;
// Find the task in discovered tasks
let discovered = discovery.tasks.iter().find(|d| {
d.task.name.eq_ignore_ascii_case(task_name)
|| d.task
.shortcuts
.iter()
.any(|s| s.eq_ignore_ascii_case(task_name))
});
// Also try abbreviation matching
let discovered = discovered.or_else(|| {
let needle = task_name.to_ascii_lowercase();
if needle.len() < 2 {
return None;
}
let mut matches = discovery.tasks.iter().filter(|d| {
generate_abbreviation(&d.task.name)
.map(|abbr| abbr == needle)
.unwrap_or(false)
});
let first = matches.next()?;
// Only match if unambiguous
if matches.next().is_some() {
None
} else {
Some(first)
}
});
if let Some(discovered) = discovered {
// Run the task with its specific config path
return run(TaskRunOpts {
config: discovered.config_path.clone(),
delegate_to_hub: false,
hub_host: std::net::IpAddr::from([127, 0, 0, 1]),
hub_port: 9050,
name: discovered.task.name.clone(),
args,
});
}
// List available tasks in error message
let available: Vec<_> = discovery
.tasks
.iter()
.map(|d| {
if d.relative_dir.is_empty() {
d.task.name.clone()
} else {
format!("{} ({})", d.task.name, d.relative_dir)
}
})
.collect();
bail!(
"task '{}' not found.\nAvailable tasks: {}",
task_name,
available.join(", ")
);
}
pub fn run(opts: TaskRunOpts) -> Result<()> {
let config_path_for_deps = opts.config.clone();
let (config_path, cfg) = load_project_config(opts.config)?;
let project_name = cfg.project_name.clone();
let workdir = config_path.parent().unwrap_or(Path::new("."));
// Set active project when running a task
if let Some(ref name) = project_name {
let _ = projects::set_active_project(name);
}
let Some(task) = find_task(&cfg, &opts.name) else {
bail!(
"task '{}' not found in {}",
opts.name,
config_path.display()
);
};
// Build user_input early so we can record failures
let quoted_args: Vec<String> = opts
.args
.iter()
.map(|arg| shell_words::quote(arg).into_owned())
.collect();
let user_input = if opts.args.is_empty() {
task.name.clone()
} else {
format!("{} {}", task.name, quoted_args.join(" "))
};
let base_command = task.command.trim().to_string();
let display_command = if opts.args.is_empty() {
base_command.clone()
} else {
format!("{} {}", base_command, quoted_args.join(" "))
};
// Helper to record a failed invocation
let record_failure = |error_msg: &str| {
let mut record = InvocationRecord::new(
workdir.display().to_string(),
config_path.display().to_string(),
project_name.as_deref(),
&task.name,
&display_command,
&user_input,
false,
);
record.success = false;
record.status = Some(1);
record.output = error_msg.to_string();
if let Err(err) = history::record(record) {
tracing::warn!(?err, "failed to write task history");
}
};
// Resolve dependencies and record failure if it fails
let resolved = match resolve_task_dependencies(task, &cfg) {
Ok(r) => r,
Err(err) => {
record_failure(&err.to_string());
return Err(err);
}
};
// Run task dependencies first (tasks that must complete before this one)
if !resolved.task_deps.is_empty() {
for dep_task_name in &resolved.task_deps {
println!("Running dependency task '{}'...", dep_task_name);
let dep_opts = TaskRunOpts {
config: config_path_for_deps.clone(),
delegate_to_hub: false,
hub_host: opts.hub_host,
hub_port: opts.hub_port,
name: dep_task_name.clone(),
args: vec![],
};
if let Err(err) = run(dep_opts) {
record_failure(&format!(
"dependency task '{}' failed: {}",
dep_task_name, err
));
bail!("dependency task '{}' failed: {}", dep_task_name, err);
}
println!();
}
}
let should_delegate = opts.delegate_to_hub || task.delegate_to_hub;
if should_delegate {
match delegate_task_to_hub(
task,
&resolved,
workdir,
opts.hub_host,
opts.hub_port,
&display_command,
) {
Ok(()) => {
let mut record = InvocationRecord::new(
workdir.display().to_string(),
config_path.display().to_string(),
project_name.as_deref(),
&task.name,
&display_command,
&user_input,
false,
);
record.success = true;
record.status = Some(0);
record.output = format!("delegated to hub at {}:{}", opts.hub_host, opts.hub_port);
if let Err(err) = history::record(record) {
tracing::warn!(?err, "failed to write task history");
}
return Ok(());
}
Err(err) => {
println!(
"⚠️ Failed to delegate task '{}' to hub ({}); falling back to local execution.",
task.name, err
);
}
}
}
let flox_pkgs = collect_flox_packages(&cfg, &resolved.flox);
let mut preamble = String::new();
let flox_disabled_env = std::env::var_os("FLOW_DISABLE_FLOX").is_some();
let flox_disabled_marker = flox_disabled_marker(workdir).exists();
let flox_enabled = !flox_pkgs.is_empty() && !flox_disabled_env && !flox_disabled_marker;
if flox_enabled {
log_and_capture(
&mut preamble,
&format!(
"Skipping host PATH checks; using managed deps [{}]",
flox_pkgs
.iter()
.map(|(name, _)| name.as_str())
.collect::<Vec<_>>()
.join(", ")
),
);
} else {
if flox_disabled_env {
log_and_capture(
&mut preamble,
"FLOW_DISABLE_FLOX is set; running on host PATH",
);
}
if let Err(err) = ensure_command_dependencies_available(&resolved.commands) {
record_failure(&err.to_string());
return Err(err);
}
}
execute_task(
task,
&config_path,
workdir,
preamble,
project_name.as_deref(),
&flox_pkgs,
flox_enabled,
&base_command,
&opts.args,
&user_input,
)
}
pub fn activate(opts: TaskActivateOpts) -> Result<()> {
let (config_path, cfg) = load_project_config(opts.config)?;
let workdir = config_path.parent().unwrap_or(Path::new("."));
let project_name = cfg.project_name.clone();
let tasks: Vec<&TaskConfig> = cfg
.tasks
.iter()
.filter(|task| task.activate_on_cd_to_root)
.collect();
if tasks.is_empty() {
return Ok(());
}
let mut combined = ResolvedDependencies::default();
for task in &tasks {
let resolved = resolve_task_dependencies(task, &cfg)?;
combined.commands.extend(resolved.commands);
combined.flox.extend(resolved.flox);
}
let flox_pkgs = collect_flox_packages(&cfg, &combined.flox);
let mut preamble = String::new();
if flox_pkgs.is_empty() {
ensure_command_dependencies_available(&combined.commands)?;
} else {
log_and_capture(
&mut preamble,
&format!(
"Skipping host PATH checks; using managed deps [{}]",
flox_pkgs
.iter()
.map(|(name, _)| name.as_str())
.collect::<Vec<_>>()
.join(", ")
),
);
}
for task in tasks {
let flox_disabled_env = std::env::var_os("FLOW_DISABLE_FLOX").is_some();
let flox_disabled_marker = flox_disabled_marker(workdir).exists();
let flox_enabled = !flox_pkgs.is_empty() && !flox_disabled_env && !flox_disabled_marker;
let command = task.command.trim().to_string();
let empty_args: Vec<String> = Vec::new();
execute_task(
task,
&config_path,
workdir,
preamble.clone(),
project_name.as_deref(),
&flox_pkgs,
flox_enabled,
&command,
&empty_args,
&task.name,
)?;
}
Ok(())
}
pub(crate) fn load_project_config(path: PathBuf) -> Result<(PathBuf, Config)> {
let config_path = resolve_path(path)?;
let cfg = config::load(&config_path).with_context(|| {
format!(
"failed to load flow tasks configuration at {}",
config_path.display()
)
})?;
if let Some(name) = cfg.project_name.as_deref() {
if let Err(err) = projects::register_project(name, &config_path) {
tracing::debug!(?err, "failed to register project name");
}
}
Ok((config_path, cfg))
}
fn resolve_path(path: PathBuf) -> Result<PathBuf> {
if path.is_absolute() {
Ok(path)
} else {
Ok(std::env::current_dir()?.join(path))
}
}
fn log_and_capture(buf: &mut String, msg: &str) {
println!("{msg}");
buf.push_str(msg);
if !msg.ends_with('\n') {
buf.push('\n');
}
}
fn log_dir() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow/logs")
}
fn sanitize_component(raw: &str) -> String {
let mut s = String::new();
for ch in raw.chars() {
if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' {
s.push(ch);
} else {
s.push('-');
}
}
s.trim_matches('-').to_lowercase()
}
fn short_hash(input: &str) -> String {
let mut hasher = DefaultHasher::new();
input.hash(&mut hasher);
format!("{:x}", hasher.finish())
}
fn task_log_path(ctx: &TaskContext) -> Option<PathBuf> {
let base = log_dir();
let slug = if let Some(name) = ctx.project_name.as_deref() {
let clean = sanitize_component(name);
if clean.is_empty() {
format!(
"proj-{}",
short_hash(&ctx.project_root.display().to_string())
)
} else {
format!(
"{clean}-{}",
short_hash(&ctx.project_root.display().to_string())
)
}
} else {
format!(
"proj-{}",
short_hash(&ctx.project_root.display().to_string())
)
};
let task = {
let clean = sanitize_component(&ctx.task_name);
if clean.is_empty() {
"task".to_string()
} else {
clean
}
};
Some(base.join(slug).join(format!("{task}.log")))
}
fn execute_task(
task: &TaskConfig,
config_path: &Path,
workdir: &Path,
mut preamble: String,
project_name: Option<&str>,
flox_pkgs: &[(String, FloxInstallSpec)],
flox_enabled: bool,
command: &str,
args: &[String],
user_input: &str,
) -> Result<()> {
if command.is_empty() {
bail!("task '{}' has an empty command", task.name);
}
log_and_capture(
&mut preamble,
&format!("Running task '{}': {}", task.name, command),
);
// Create context for PID tracking
let canonical_config = config_path
.canonicalize()
.unwrap_or_else(|_| config_path.to_path_buf());
let canonical_workdir = workdir
.canonicalize()
.unwrap_or_else(|_| workdir.to_path_buf());
// Auto-detect interactive mode if not explicitly set
let interactive = task.interactive || needs_interactive_mode(command);
let task_ctx = TaskContext {
task_name: task.name.clone(),
command: command.to_string(),
config_path: canonical_config,
project_root: canonical_workdir.clone(),
used_flox: flox_enabled && !flox_pkgs.is_empty(),
project_name: project_name.map(|s| s.to_string()),
log_path: None,
interactive,
};
// Set up cancel handler if on_cancel is defined
setup_cancel_handler(task.on_cancel.as_deref(), workdir);
let mut record = InvocationRecord::new(
workdir.display().to_string(),
config_path.display().to_string(),
project_name,
&task.name,
command,
user_input,
!flox_pkgs.is_empty(),
);
let started = Instant::now();
let mut combined_output = preamble;
let status: ExitStatus;
let flox_disabled = flox_disabled_marker(workdir).exists();
if flox_pkgs.is_empty() || flox_disabled || !flox_enabled {
if flox_disabled {
log_and_capture(
&mut combined_output,
"flox disabled for this project (marker present); using host PATH",
);
}
let (st, out) = run_host_command(workdir, command, args, Some(task_ctx.clone()))?;
status = st;
combined_output.push_str(&out);
} else {
log_and_capture(
&mut combined_output,
&format!(
"Skipping host PATH checks; using managed deps [{}]",
flox_pkgs
.iter()
.map(|(name, _)| name.as_str())
.collect::<Vec<_>>()
.join(", ")
),
);
match flox_health_check(workdir, flox_pkgs) {
Ok(true) => {
match run_flox_with_reset(flox_pkgs, workdir, command, args, Some(task_ctx.clone()))
{
Ok(Some((st, out))) => {
combined_output.push_str(&out);
if st.success() {
status = st;
} else {
log_and_capture(
&mut combined_output,
&format!(
"flox activate failed (status {:?}); retrying on host PATH",
st.code()
),
);
let (host_status, host_out) =
run_host_command(workdir, command, args, Some(task_ctx.clone()))?;
combined_output
.push_str("\n[flox activate failed; retried on host PATH]\n");
combined_output.push_str(&host_out);
status = host_status;
}
}
Ok(None) => {
log_and_capture(
&mut combined_output,
"flox disabled after repeated errors; using host PATH",
);
combined_output.push_str("[flox disabled after errors]\n");
let (host_status, host_out) =
run_host_command(workdir, command, args, Some(task_ctx.clone()))?;
combined_output.push_str(&host_out);
status = host_status;
}
Err(err) => {
log_and_capture(
&mut combined_output,
&format!("flox activate failed ({err}); retrying on host PATH"),
);
let (host_status, host_out) =
run_host_command(workdir, command, args, Some(task_ctx.clone()))?;
combined_output
.push_str("\n[flox activate failed; retried on host PATH]\n");
combined_output.push_str(&host_out);
status = host_status;
}
}
}
Ok(false) => {
log_and_capture(
&mut combined_output,
"flox disabled after health check; using host PATH",
);
combined_output.push_str("[flox disabled after health check]\n");
let (host_status, host_out) =
run_host_command(workdir, command, args, Some(task_ctx.clone()))?;
combined_output.push_str(&host_out);
status = host_status;
}
Err(err) => {
log_and_capture(
&mut combined_output,
&format!("flox health check failed ({err}); using host PATH"),
);
combined_output.push_str("[flox health check failed; using host PATH]\n");
let (host_status, host_out) =
run_host_command(workdir, command, args, Some(task_ctx))?;
combined_output.push_str(&host_out);
status = host_status;
}
}
}
record.duration_ms = started.elapsed().as_millis();
record.status = status.code();
record.success = status.success();
record.output = combined_output;
if let Err(err) = history::record(record) {
tracing::warn!(?err, "failed to write task history");
}
// Clear cancel handler since task completed normally
clear_cancel_handler();
if status.success() {
Ok(())
} else {
bail!(
"task '{}' exited with status {}",
task.name,
status.code().unwrap_or(-1)
);
}
}
#[cfg(test)]
fn format_task_lines(tasks: &[TaskConfig]) -> Vec<String> {
let mut lines = Vec::new();
for (idx, task) in tasks.iter().enumerate() {
let shortcut_display = if task.shortcuts.is_empty() {
String::new()
} else {
format!(" [{}]", task.shortcuts.join(", "))
};
lines.push(format!(
"{:>2}. {}{} – {}",
idx + 1,
task.name,
shortcut_display,
task.command
));
if let Some(desc) = &task.description {
lines.push(format!(" {desc}"));
}
}
lines
}
fn format_discovered_task_lines(tasks: &[discover::DiscoveredTask]) -> Vec<String> {
let mut lines = Vec::new();
for (idx, discovered) in tasks.iter().enumerate() {
let task = &discovered.task;
let shortcut_display = if task.shortcuts.is_empty() {
String::new()
} else {
format!(" [{}]", task.shortcuts.join(", "))
};
// Show relative path for nested tasks
let path_suffix = if let Some(path_label) = discovered.path_label() {
format!(" ({})", path_label)
} else {
String::new()
};
lines.push(format!(
"{:>2}. {}{}{} – {}",
idx + 1,
task.name,
shortcut_display,
path_suffix,
task.command
));
if let Some(desc) = &task.description {
lines.push(format!(" {desc}"));
}
}
lines
}
pub(crate) fn find_task<'a>(cfg: &'a Config, needle: &str) -> Option<&'a TaskConfig> {
let normalized = needle.trim();
if normalized.is_empty() {
return None;
}
if let Some(task) = cfg
.tasks
.iter()
.find(|task| task.name.eq_ignore_ascii_case(normalized))
{
return Some(task);
}
if let Some(task) = cfg.tasks.iter().find(|task| {
task.shortcuts
.iter()
.any(|alias| alias.eq_ignore_ascii_case(normalized))
}) {
return Some(task);
}
resolve_by_abbreviation(&cfg.tasks, normalized)
}
fn resolve_by_abbreviation<'a>(tasks: &'a [TaskConfig], alias: &str) -> Option<&'a TaskConfig> {
let alias = alias.trim().to_ascii_lowercase();
if alias.len() < 2 {
return None;
}
let mut matches = tasks.iter().filter(|task| {
generate_abbreviation(&task.name)
.map(|abbr| abbr == alias)
.unwrap_or(false)
});
let first = matches.next()?;
if matches.next().is_some() {
None
} else {
Some(first)
}
}
fn generate_abbreviation(name: &str) -> Option<String> {
let mut abbr = String::new();
let mut new_segment = true;
for ch in name.chars() {
if ch.is_ascii_alphanumeric() {
if new_segment {
abbr.push(ch.to_ascii_lowercase());
new_segment = false;
}
} else {
new_segment = true;
}
}
if abbr.len() >= 2 { Some(abbr) } else { None }
}
/// Check if command already references shell positional args ($@, $*, $1, etc.)
fn command_references_args(command: &str) -> bool {
// Look for $@, $*, $1-$9, ${1}, ${@}, etc.
let mut chars = command.chars().peekable();
while let Some(c) = chars.next() {
if c == '$' {
match chars.peek() {
Some('@') | Some('*') | Some('1'..='9') => return true,
Some('{') => {
// Check for ${1}, ${@}, ${*}, etc.
chars.next();
match chars.peek() {
Some('@') | Some('*') | Some('1'..='9') => return true,
_ => {}
}
}
_ => {}
}
}
}
false
}
fn run_host_command(
workdir: &Path,
command: &str,
args: &[String],
ctx: Option<TaskContext>,
) -> Result<(ExitStatus, String)> {
// For interactive tasks, run directly with inherited stdio
// This ensures proper TTY handling for readline, prompts, etc.
let interactive = ctx.as_ref().map(|c| c.interactive).unwrap_or(false);
let is_tty = std::io::stdin().is_terminal();
if interactive && is_tty {
return run_interactive_command(workdir, command, args, ctx);
}
let mut cmd = Command::new("/bin/sh");
// If args are provided and command doesn't already reference them ($@ or $1, $2, etc.),
// append "$@" to pass them through properly
let full_command = if args.is_empty() || command_references_args(command) {
command.to_string()
} else {
format!("{} \"$@\"", command)
};
cmd.arg("-c").arg(&full_command);
if !args.is_empty() {
cmd.arg("sh"); // $0 placeholder
for arg in args {
cmd.arg(arg);
}
}
cmd.current_dir(workdir);
run_command_with_tee(cmd, ctx).with_context(|| "failed to spawn command without managed env")
}
fn run_flox_with_reset(
flox_pkgs: &[(String, FloxInstallSpec)],
workdir: &Path,
command: &str,
args: &[String],
ctx: Option<TaskContext>,
) -> Result<Option<(ExitStatus, String)>> {
let mut combined_output = String::new();
let mut reset_done = false;
loop {
let env = flox::ensure_env(workdir, flox_pkgs)?;
match run_flox_command(&env, workdir, command, args, ctx.clone()) {
Ok((status, out)) => {
combined_output.push_str(&out);
if status.success() {
return Ok(Some((status, combined_output)));
}
if !reset_done {
reset_flox_env(workdir)?;
combined_output
.push_str("\n[flox activate failed; reset .flox and retrying]\n");
reset_done = true;
continue;
}
mark_flox_disabled(workdir, "flox activate repeatedly failed")?;
return Ok(None);
}
Err(err) => {
combined_output.push_str(&format!("[flox activate error: {err}]\n"));
if !reset_done {
reset_flox_env(workdir)?;
combined_output.push_str("[reset .flox and retrying]\n");
reset_done = true;
continue;
}
mark_flox_disabled(workdir, "flox activate error after reset")?;
return Ok(None);
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/log_store.rs | src/log_store.rs | use anyhow::{Context, Result};
use rusqlite::{Connection, params};
use serde::{Deserialize, Serialize};
use crate::db;
/// A log entry for ingestion and storage.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogEntry {
pub project: String,
pub content: String,
pub timestamp: i64, // unix ms
#[serde(rename = "type")]
pub log_type: String, // "log" | "error"
pub service: String, // task name or custom service
#[serde(skip_serializing_if = "Option::is_none")]
pub stack: Option<String>,
#[serde(default = "default_format")]
pub format: String, // "json" | "text"
}
fn default_format() -> String {
"text".to_string()
}
/// Stored log entry with ID.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StoredLogEntry {
pub id: i64,
#[serde(flatten)]
pub entry: LogEntry,
}
/// Query parameters for filtering logs.
#[derive(Debug, Clone, Default, Deserialize)]
pub struct LogQuery {
pub project: Option<String>,
pub service: Option<String>,
#[serde(rename = "type")]
pub log_type: Option<String>,
pub since: Option<i64>, // timestamp ms
pub until: Option<i64>, // timestamp ms
#[serde(default = "default_limit")]
pub limit: usize,
#[serde(default)]
pub offset: usize,
}
fn default_limit() -> usize {
100
}
/// Initialize the logs table schema.
pub fn init_schema(conn: &Connection) -> Result<()> {
conn.execute_batch(
r#"
CREATE TABLE IF NOT EXISTS logs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
project TEXT NOT NULL,
content TEXT NOT NULL,
timestamp INTEGER NOT NULL,
log_type TEXT NOT NULL,
service TEXT NOT NULL,
stack TEXT,
format TEXT NOT NULL DEFAULT 'text'
);
CREATE INDEX IF NOT EXISTS idx_logs_project ON logs(project);
CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp);
CREATE INDEX IF NOT EXISTS idx_logs_type ON logs(log_type);
CREATE INDEX IF NOT EXISTS idx_logs_service ON logs(service);
"#,
)
.context("failed to create logs schema")?;
Ok(())
}
/// Insert a single log entry.
pub fn insert_log(conn: &Connection, entry: &LogEntry) -> Result<i64> {
conn.execute(
r#"
INSERT INTO logs (project, content, timestamp, log_type, service, stack, format)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)
"#,
params![
entry.project,
entry.content,
entry.timestamp,
entry.log_type,
entry.service,
entry.stack,
entry.format,
],
)
.context("failed to insert log")?;
Ok(conn.last_insert_rowid())
}
/// Insert multiple log entries in a transaction.
pub fn insert_logs(conn: &mut Connection, entries: &[LogEntry]) -> Result<Vec<i64>> {
let tx = conn.transaction()?;
let mut ids = Vec::with_capacity(entries.len());
for entry in entries {
tx.execute(
r#"
INSERT INTO logs (project, content, timestamp, log_type, service, stack, format)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)
"#,
params![
entry.project,
entry.content,
entry.timestamp,
entry.log_type,
entry.service,
entry.stack,
entry.format,
],
)
.context("failed to insert log")?;
ids.push(tx.last_insert_rowid());
}
tx.commit()?;
Ok(ids)
}
/// Query logs with filters.
pub fn query_logs(conn: &Connection, query: &LogQuery) -> Result<Vec<StoredLogEntry>> {
let mut sql = String::from(
"SELECT id, project, content, timestamp, log_type, service, stack, format FROM logs WHERE 1=1",
);
let mut params_vec: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
if let Some(ref project) = query.project {
sql.push_str(" AND project = ?");
params_vec.push(Box::new(project.clone()));
}
if let Some(ref service) = query.service {
sql.push_str(" AND service = ?");
params_vec.push(Box::new(service.clone()));
}
if let Some(ref log_type) = query.log_type {
sql.push_str(" AND log_type = ?");
params_vec.push(Box::new(log_type.clone()));
}
if let Some(since) = query.since {
sql.push_str(" AND timestamp >= ?");
params_vec.push(Box::new(since));
}
if let Some(until) = query.until {
sql.push_str(" AND timestamp <= ?");
params_vec.push(Box::new(until));
}
sql.push_str(" ORDER BY timestamp DESC LIMIT ? OFFSET ?");
params_vec.push(Box::new(query.limit as i64));
params_vec.push(Box::new(query.offset as i64));
let params_refs: Vec<&dyn rusqlite::ToSql> = params_vec.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(&sql)?;
let rows = stmt.query_map(params_refs.as_slice(), |row| {
Ok(StoredLogEntry {
id: row.get(0)?,
entry: LogEntry {
project: row.get(1)?,
content: row.get(2)?,
timestamp: row.get(3)?,
log_type: row.get(4)?,
service: row.get(5)?,
stack: row.get(6)?,
format: row.get(7)?,
},
})
})?;
let mut entries = Vec::new();
for row in rows {
entries.push(row?);
}
Ok(entries)
}
/// Get error logs for a project (convenience function).
pub fn get_errors(conn: &Connection, project: &str, limit: usize) -> Result<Vec<StoredLogEntry>> {
query_logs(
conn,
&LogQuery {
project: Some(project.to_string()),
log_type: Some("error".to_string()),
limit,
..Default::default()
},
)
}
/// Open database and ensure schema exists.
pub fn open_log_db() -> Result<Connection> {
let conn = db::open_db()?;
init_schema(&conn)?;
Ok(conn)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_insert_and_query() {
let conn = Connection::open_in_memory().unwrap();
init_schema(&conn).unwrap();
let entry = LogEntry {
project: "test-project".to_string(),
content: "Test log message".to_string(),
timestamp: 1234567890000,
log_type: "log".to_string(),
service: "web".to_string(),
stack: None,
format: "text".to_string(),
};
let id = insert_log(&conn, &entry).unwrap();
assert!(id > 0);
let results = query_logs(
&conn,
&LogQuery {
project: Some("test-project".to_string()),
..Default::default()
},
)
.unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].entry.content, "Test log message");
}
#[test]
fn test_error_query() {
let conn = Connection::open_in_memory().unwrap();
init_schema(&conn).unwrap();
let log_entry = LogEntry {
project: "test".to_string(),
content: "Normal log".to_string(),
timestamp: 1000,
log_type: "log".to_string(),
service: "api".to_string(),
stack: None,
format: "text".to_string(),
};
let error_entry = LogEntry {
project: "test".to_string(),
content: "Error occurred".to_string(),
timestamp: 2000,
log_type: "error".to_string(),
service: "api".to_string(),
stack: Some("at main.rs:10".to_string()),
format: "text".to_string(),
};
insert_log(&conn, &log_entry).unwrap();
insert_log(&conn, &error_entry).unwrap();
let errors = get_errors(&conn, "test", 10).unwrap();
assert_eq!(errors.len(), 1);
assert_eq!(errors[0].entry.log_type, "error");
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/lib.rs | src/lib.rs | pub mod agents;
pub mod ai;
pub mod cli;
pub mod commit;
pub mod commits;
pub mod config;
pub mod daemon;
pub mod db;
pub mod deps;
pub mod deploy;
pub mod deploy_setup;
pub mod discover;
pub mod docs;
pub mod doctor;
pub mod env;
pub mod env_setup;
pub mod fixup;
pub mod flox;
pub mod history;
pub mod hub;
pub mod init;
pub mod lin_runtime;
pub mod lmstudio;
pub mod log_server;
pub mod log_store;
pub mod notify;
pub mod palette;
pub mod parallel;
pub mod processes;
pub mod projects;
pub mod publish;
pub mod repos;
pub mod release;
pub mod running;
pub mod skills;
pub mod storage;
pub mod start;
pub mod tools;
pub mod todo;
pub mod upstream;
pub mod sync;
pub mod task_match;
pub mod tasks;
pub mod watchers;
/// Initialize tracing with a default filter if `RUST_LOG` is unset.
pub fn init_tracing() {
let default_filter = "flowd=info,axum=warn,tower=warn";
let filter_layer = std::env::var("RUST_LOG").unwrap_or_else(|_| default_filter.to_string());
tracing_subscriber::fmt()
.with_env_filter(filter_layer)
.with_target(false)
.compact()
.init();
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/discover.rs | src/discover.rs | //! Fast discovery of nested flow.toml files in a project.
use std::path::{Path, PathBuf};
use anyhow::Result;
use ignore::WalkBuilder;
use crate::config::{self, Config, TaskConfig};
/// A task with its source location information.
#[derive(Debug, Clone)]
pub struct DiscoveredTask {
/// The task configuration.
pub task: TaskConfig,
/// Absolute path to the flow.toml containing this task.
pub config_path: PathBuf,
/// Relative path from the discovery root to the config file's directory.
/// Empty string for root-level tasks.
pub relative_dir: String,
/// Depth from the discovery root (0 = root, 1 = immediate subdirectory, etc.)
pub depth: usize,
}
impl DiscoveredTask {
/// Format a display label showing the relative path for nested tasks.
pub fn path_label(&self) -> Option<String> {
if self.relative_dir.is_empty() {
None
} else {
Some(self.relative_dir.clone())
}
}
}
/// Result of discovering flow.toml files in a directory tree.
#[derive(Debug)]
pub struct DiscoveryResult {
/// All discovered tasks, sorted by depth (root first).
pub tasks: Vec<DiscoveredTask>,
/// The root config path (if exists).
pub root_config: Option<PathBuf>,
/// Root config object (if exists).
pub root_cfg: Option<Config>,
}
/// Discover all flow.toml files starting from the given root directory.
/// Uses the `ignore` crate for fast, gitignore-aware traversal.
///
/// Tasks are returned sorted by depth (root-level first, then nested).
pub fn discover_tasks(root: &Path) -> Result<DiscoveryResult> {
let root = if root.is_absolute() {
root.to_path_buf()
} else {
std::env::current_dir()?.join(root)
};
let root = root.canonicalize().unwrap_or(root);
let mut discovered: Vec<DiscoveredTask> = Vec::new();
let mut root_config: Option<PathBuf> = None;
let mut root_cfg: Option<Config> = None;
// Check if root itself has a flow.toml
let root_flow_toml = root.join("flow.toml");
if root_flow_toml.exists() {
match config::load(&root_flow_toml) {
Ok(cfg) => {
root_config = Some(root_flow_toml.clone());
for task in &cfg.tasks {
discovered.push(DiscoveredTask {
task: task.clone(),
config_path: root_flow_toml.clone(),
relative_dir: String::new(),
depth: 0,
});
}
root_cfg = Some(cfg);
}
Err(e) => {
eprintln!(
"Warning: failed to parse {}: {:#}",
root_flow_toml.display(),
e
);
}
}
}
// Walk subdirectories looking for flow.toml files
// Use the ignore crate which respects .gitignore and is very fast
let walker = WalkBuilder::new(&root)
.hidden(true) // skip hidden directories
.git_ignore(true) // respect .gitignore
.git_global(true) // respect global gitignore
.git_exclude(true) // respect .git/info/exclude
.max_depth(Some(10)) // reasonable depth limit
.filter_entry(|entry| {
// Skip common directories that won't have flow.toml we care about
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let name = entry.file_name().to_string_lossy();
// Skip these directories entirely
!matches!(
name.as_ref(),
"node_modules"
| "target"
| "dist"
| "build"
| ".git"
| ".hg"
| ".svn"
| "__pycache__"
| ".pytest_cache"
| ".mypy_cache"
| "venv"
| ".venv"
| "vendor"
| "Pods"
| ".cargo"
| ".rustup"
)
} else {
true
}
})
.build();
for entry in walker.flatten() {
let path = entry.path();
// Skip the root (already handled above)
if path == root {
continue;
}
// We're looking for directories that contain flow.toml
if !path.is_dir() {
continue;
}
let flow_toml = path.join("flow.toml");
if !flow_toml.exists() {
continue;
}
// Parse the config
let cfg = match config::load(&flow_toml) {
Ok(c) => c,
Err(_) => continue, // Skip invalid configs
};
// Calculate relative path from root
let relative_dir = path
.strip_prefix(&root)
.map(|p| p.to_string_lossy().to_string())
.unwrap_or_default();
// Calculate depth
let depth = relative_dir.matches('/').count()
+ relative_dir.matches('\\').count()
+ if relative_dir.is_empty() { 0 } else { 1 };
for task in cfg.tasks {
discovered.push(DiscoveredTask {
task,
config_path: flow_toml.clone(),
relative_dir: relative_dir.clone(),
depth,
});
}
}
// Sort by depth (root first), then by task name for stability
discovered.sort_by(|a, b| {
a.depth
.cmp(&b.depth)
.then_with(|| a.relative_dir.cmp(&b.relative_dir))
.then_with(|| a.task.name.cmp(&b.task.name))
});
Ok(DiscoveryResult {
tasks: discovered,
root_config,
root_cfg,
})
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
fn write_flow_toml(dir: &Path, content: &str) {
fs::write(dir.join("flow.toml"), content).unwrap();
}
#[test]
fn discovers_root_tasks() {
let tmp = TempDir::new().unwrap();
write_flow_toml(
tmp.path(),
r#"
[[tasks]]
name = "test"
command = "echo test"
"#,
);
let result = discover_tasks(tmp.path()).unwrap();
assert_eq!(result.tasks.len(), 1);
assert_eq!(result.tasks[0].task.name, "test");
assert_eq!(result.tasks[0].depth, 0);
assert!(result.tasks[0].relative_dir.is_empty());
}
#[test]
fn discovers_nested_tasks() {
let tmp = TempDir::new().unwrap();
write_flow_toml(
tmp.path(),
r#"
[[tasks]]
name = "root-task"
command = "echo root"
"#,
);
let nested = tmp.path().join("packages/api");
fs::create_dir_all(&nested).unwrap();
write_flow_toml(
&nested,
r#"
[[tasks]]
name = "api-task"
command = "echo api"
"#,
);
let result = discover_tasks(tmp.path()).unwrap();
assert_eq!(result.tasks.len(), 2);
// Root task should come first
assert_eq!(result.tasks[0].task.name, "root-task");
assert_eq!(result.tasks[0].depth, 0);
// Nested task second
assert_eq!(result.tasks[1].task.name, "api-task");
assert!(result.tasks[1].depth > 0);
assert!(result.tasks[1].relative_dir.contains("packages"));
}
#[test]
fn skips_node_modules() {
let tmp = TempDir::new().unwrap();
write_flow_toml(
tmp.path(),
r#"
[[tasks]]
name = "root"
command = "echo root"
"#,
);
let node_modules = tmp.path().join("node_modules/some-pkg");
fs::create_dir_all(&node_modules).unwrap();
write_flow_toml(
&node_modules,
r#"
[[tasks]]
name = "should-skip"
command = "echo skip"
"#,
);
let result = discover_tasks(tmp.path()).unwrap();
assert_eq!(result.tasks.len(), 1);
assert_eq!(result.tasks[0].task.name, "root");
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/palette.rs | src/palette.rs | use std::{
io::Write,
path::PathBuf,
process::{Command, Stdio},
};
use anyhow::{Context, Result, bail};
use crate::{
cli::TasksOpts,
config::{self, TaskConfig},
discover::{self, DiscoveredTask},
};
pub fn run(opts: TasksOpts) -> Result<()> {
let entries = build_entries(Some(opts))?;
present(entries)
}
/// Show global commands/tasks only (no project flow.toml required).
pub fn run_global() -> Result<()> {
let entries = build_entries(None)?;
present(entries)
}
struct FzfResult<'a> {
entry: &'a PaletteEntry,
with_args: bool,
}
fn run_fzf<'a>(entries: &'a [PaletteEntry]) -> Result<Option<FzfResult<'a>>> {
let mut child = Command::new("fzf")
.arg("--prompt")
.arg("f> ")
.arg("--expect")
.arg("tab") // tab to run with args prompt
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.context("failed to spawn fzf")?;
{
let stdin = child.stdin.as_mut().context("failed to open fzf stdin")?;
for entry in entries {
writeln!(stdin, "{}", entry.display)?;
}
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Ok(None);
}
let raw = String::from_utf8(output.stdout).context("fzf output was not valid UTF-8")?;
let mut lines = raw.lines();
// First line is the key pressed (if any from --expect)
let key = lines.next().unwrap_or("");
let with_args = key == "tab";
// Second line is the selection
let selection = lines.next().unwrap_or("").trim();
if selection.is_empty() {
return Ok(None);
}
let entry = entries.iter().find(|entry| entry.display == selection);
Ok(entry.map(|e| FzfResult {
entry: e,
with_args,
}))
}
fn run_entry(entry: &PaletteEntry, extra_args: Vec<String>) -> Result<()> {
let exe = std::env::current_exe().context("failed to resolve current executable")?;
let status = Command::new(exe)
.args(&entry.exec)
.args(&extra_args)
.status()
.with_context(|| format!("failed to run {}", entry.display))?;
if status.success() {
Ok(())
} else {
bail!(
"{} exited with status {}",
entry.display,
status.code().unwrap_or(-1)
);
}
}
fn present(entries: Vec<PaletteEntry>) -> Result<()> {
if entries.is_empty() {
println!("No commands or tasks available. Add entries to flow.toml or global config.");
return Ok(());
}
if which::which("fzf").is_err() {
println!("fzf not found on PATH – install it to use fuzzy selection.");
println!("Available commands:");
for entry in &entries {
println!(" {}", entry.display);
}
return Ok(());
}
if let Some(result) = run_fzf(&entries)? {
let extra_args = if result.with_args {
prompt_for_args(&result.entry.display)?
} else {
Vec::new()
};
run_entry(result.entry, extra_args)?;
}
Ok(())
}
fn prompt_for_args(task_display: &str) -> Result<Vec<String>> {
use std::io::{self, BufRead};
// Extract task name from display (e.g., "[task] foo – description" -> "foo")
let task_name = task_display
.strip_prefix("[task] ")
.and_then(|s| s.split(" – ").next())
.and_then(|s| s.split(" (").next()) // handle "(path)" suffix
.unwrap_or("task");
// Show hint about quoting for args with spaces
println!("(tip: use quotes for args with spaces, e.g. 'my prompt')");
print!("f {} ", task_name);
io::stdout().flush()?;
let stdin = io::stdin();
let line = stdin.lock().lines().next();
let input = match line {
Some(Ok(s)) => s,
_ => return Ok(Vec::new()),
};
let args = shell_words::split(&input).context("failed to parse arguments")?;
Ok(args)
}
struct PaletteEntry {
display: String,
exec: Vec<String>,
}
impl PaletteEntry {
fn new(display: &str, exec: Vec<String>) -> Self {
Self {
display: display.to_string(),
exec,
}
}
fn from_task(task: &TaskConfig, config_arg: &str) -> Self {
let summary = task
.description
.as_deref()
.unwrap_or_else(|| task.command.as_str());
let display = format!("[task] {} – {}", task.name, truncate(summary, 96));
let exec = vec![
"run".into(),
"--config".into(),
config_arg.to_string(),
task.name.clone(),
];
Self { display, exec }
}
fn from_discovered(discovered: &DiscoveredTask) -> Self {
let summary = discovered
.task
.description
.as_deref()
.unwrap_or_else(|| discovered.task.command.as_str());
let display = if let Some(path_label) = discovered.path_label() {
format!(
"[task] {} ({}) – {}",
discovered.task.name,
path_label,
truncate(summary, 80)
)
} else {
format!(
"[task] {} – {}",
discovered.task.name,
truncate(summary, 96)
)
};
let exec = vec![
"run".into(),
"--config".into(),
discovered.config_path.display().to_string(),
discovered.task.name.clone(),
];
Self { display, exec }
}
}
fn build_entries(project_opts: Option<TasksOpts>) -> Result<Vec<PaletteEntry>> {
let mut entries = Vec::new();
let global_cfg = load_if_exists(config::default_config_path())?;
let mut has_project = false;
if let Some(opts) = project_opts {
// Determine the root directory for discovery
let root = if opts.config.is_absolute() {
opts.config
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| PathBuf::from("."))
} else {
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."))
};
// Discover all nested flow.toml files
let discovery = discover::discover_tasks(&root)?;
if !discovery.tasks.is_empty() {
has_project = true;
for discovered in &discovery.tasks {
entries.push(PaletteEntry::from_discovered(discovered));
}
}
}
if has_project {
return Ok(entries);
}
entries.extend(builtin_entries());
if let Some((global_path, cfg)) = global_cfg {
let arg = global_path.display().to_string();
for task in &cfg.tasks {
entries.push(PaletteEntry::from_task(task, &arg));
}
}
Ok(entries)
}
fn builtin_entries() -> Vec<PaletteEntry> {
let entries = vec![
PaletteEntry::new("[cmd] hub – ensure daemon is running", vec!["hub".into()]),
PaletteEntry::new(
"[cmd] search – global commands/tasks",
vec!["search".into()],
),
PaletteEntry::new("[cmd] init – scaffold flow.toml", vec!["init".into()]),
];
entries
}
fn load_if_exists(path: PathBuf) -> Result<Option<(PathBuf, config::Config)>> {
if path.exists() {
let cfg = config::load(&path)?;
Ok(Some((path, cfg)))
} else {
Ok(None)
}
}
fn truncate(input: &str, max: usize) -> String {
let mut out = String::new();
for ch in input.chars() {
if out.chars().count() + 1 >= max {
break;
}
out.push(ch);
}
if out.len() < input.len() {
out.push('…');
}
out
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/deploy_setup.rs | src/deploy_setup.rs | use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use crossterm::{
event::{self, Event as CEvent, KeyCode, KeyEvent},
execute,
terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
};
use ignore::WalkBuilder;
use ratatui::{
Terminal,
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, Paragraph, Wrap},
};
use regex::Regex;
use crate::env::parse_env_file;
#[derive(Debug, Clone, Default)]
pub struct CloudflareSetupDefaults {
pub worker_path: Option<PathBuf>,
pub env_file: Option<PathBuf>,
pub environment: Option<String>,
}
#[derive(Debug, Clone)]
pub struct CloudflareSetupResult {
pub worker_path: PathBuf,
pub env_file: Option<PathBuf>,
pub environment: Option<String>,
pub selected_keys: Vec<String>,
pub apply_secrets: bool,
}
pub fn run_cloudflare_setup(
project_root: &Path,
defaults: CloudflareSetupDefaults,
) -> Result<Option<CloudflareSetupResult>> {
let worker_paths = discover_wrangler_configs(project_root)?;
if worker_paths.is_empty() {
println!("No Cloudflare Worker config found (wrangler.toml/json).");
println!("Run `wrangler init` first, then try: f deploy setup");
return Ok(None);
}
let env_files = discover_env_files(project_root)?;
let mut app = DeploySetupApp::new(project_root, worker_paths, env_files, defaults);
enable_raw_mode().context("failed to enable raw mode")?;
let mut stdout = std::io::stdout();
execute!(stdout, EnterAlternateScreen).context("failed to enter alternate screen")?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend).context("failed to create terminal backend")?;
let app_result = run_app(&mut terminal, &mut app);
disable_raw_mode().ok();
let _ = terminal.show_cursor();
drop(terminal);
let mut stdout = std::io::stdout();
execute!(stdout, LeaveAlternateScreen).ok();
app_result
}
#[derive(Debug, Clone, Copy)]
enum SetupStep {
Worker,
EnvFile,
EnvTarget,
CustomEnv,
Keys,
Confirm,
}
struct EnvFileChoice {
label: String,
path: Option<PathBuf>,
}
struct EnvTargetChoice {
label: String,
value: Option<String>,
is_custom: bool,
}
struct EnvKeyItem {
key: String,
selected: bool,
suspect: bool,
suspect_reason: Option<String>,
value_len: usize,
}
struct DeploySetupApp {
project_root: PathBuf,
step: SetupStep,
worker_paths: Vec<PathBuf>,
selected_worker: usize,
env_files: Vec<EnvFileChoice>,
selected_env_file: usize,
env_targets: Vec<EnvTargetChoice>,
selected_env_target: usize,
custom_env: String,
key_items: Vec<EnvKeyItem>,
selected_key: usize,
apply_secrets: bool,
result: Option<CloudflareSetupResult>,
}
impl DeploySetupApp {
fn new(
project_root: &Path,
worker_paths: Vec<PathBuf>,
env_files: Vec<PathBuf>,
defaults: CloudflareSetupDefaults,
) -> Self {
let selected_worker = pick_default_worker(&worker_paths, defaults.worker_path.as_ref());
let env_file_choices = build_env_file_choices(project_root, &env_files);
let selected_env_file = pick_default_env_file_for_worker(
&env_file_choices,
&worker_paths[selected_worker],
defaults.env_file.as_ref(),
);
let mut app = Self {
project_root: project_root.to_path_buf(),
step: SetupStep::Worker,
worker_paths,
selected_worker,
env_files: env_file_choices,
selected_env_file,
env_targets: Vec::new(),
selected_env_target: 0,
custom_env: String::new(),
key_items: Vec::new(),
selected_key: 0,
apply_secrets: true,
result: None,
};
app.refresh_env_targets(defaults.environment.as_deref());
if matches!(
app.env_targets.get(app.selected_env_target),
Some(choice) if choice.is_custom
) {
app.custom_env = defaults.environment.unwrap_or_default();
}
app
}
fn worker_path(&self) -> &Path {
&self.worker_paths[self.selected_worker]
}
fn refresh_env_targets(&mut self, preferred: Option<&str>) {
let envs = extract_wrangler_envs(self.worker_path());
let mut targets = Vec::new();
targets.push(EnvTargetChoice {
label: "production (default)".to_string(),
value: None,
is_custom: false,
});
for env in envs {
targets.push(EnvTargetChoice {
label: env.clone(),
value: Some(env),
is_custom: false,
});
}
if let Some(env) = preferred {
if !targets.iter().any(|choice| choice.value.as_deref() == Some(env)) && env != "production" {
targets.push(EnvTargetChoice {
label: env.to_string(),
value: Some(env.to_string()),
is_custom: false,
});
}
}
targets.push(EnvTargetChoice {
label: "custom...".to_string(),
value: None,
is_custom: true,
});
self.env_targets = targets;
self.selected_env_target = pick_default_env_target(&self.env_targets, preferred);
}
fn select_env_file_for_worker(&mut self) {
let worker_path = self.worker_path().to_path_buf();
if let Some(idx) = pick_env_file_for_worker(&self.env_files, &worker_path) {
self.selected_env_file = idx;
}
}
fn refresh_keys(&mut self) {
self.key_items.clear();
self.selected_key = 0;
if let Some(path) = self.env_files.get(self.selected_env_file).and_then(|c| c.path.clone())
{
if let Ok(items) = build_key_items(&path) {
self.key_items = items;
}
}
}
fn env_file_path(&self) -> Option<PathBuf> {
self.env_files
.get(self.selected_env_file)
.and_then(|choice| choice.path.clone())
}
fn env_file_path_ref(&self) -> Option<&Path> {
self.env_files
.get(self.selected_env_file)
.and_then(|choice| choice.path.as_deref())
}
fn selected_env_target(&self) -> Option<String> {
self.env_targets
.get(self.selected_env_target)
.and_then(|choice| choice.value.clone())
}
fn finalize(&mut self) {
let env_file = self.env_file_path();
let mut selected_keys = Vec::new();
if env_file.is_some() {
selected_keys = self
.key_items
.iter()
.filter(|item| item.selected)
.map(|item| item.key.clone())
.collect();
}
self.result = Some(CloudflareSetupResult {
worker_path: self.worker_path().to_path_buf(),
env_file,
environment: self.selected_env_target(),
selected_keys,
apply_secrets: self.apply_secrets,
});
}
}
fn run_app<B: ratatui::backend::Backend>(
terminal: &mut Terminal<B>,
app: &mut DeploySetupApp,
) -> Result<Option<CloudflareSetupResult>> {
loop {
terminal.draw(|f| draw_ui(f, app))?;
if event::poll(std::time::Duration::from_millis(200))? {
if let CEvent::Key(key) = event::read()? {
if handle_key(app, key)? {
return Ok(app.result.take());
}
}
}
}
}
fn handle_key(app: &mut DeploySetupApp, key: KeyEvent) -> Result<bool> {
match key.code {
KeyCode::Char('q') => return Ok(true),
KeyCode::Esc => return Ok(step_back(app)),
_ => {}
}
match app.step {
SetupStep::Worker => match key.code {
KeyCode::Up => {
select_prev(&mut app.selected_worker, app.worker_paths.len());
app.select_env_file_for_worker();
}
KeyCode::Down => {
select_next(&mut app.selected_worker, app.worker_paths.len());
app.select_env_file_for_worker();
}
KeyCode::Enter => {
app.refresh_env_targets(None);
app.select_env_file_for_worker();
if app.env_files.len() <= 1 {
app.step = SetupStep::EnvTarget;
} else {
app.step = SetupStep::EnvFile;
}
}
_ => {}
},
SetupStep::EnvFile => match key.code {
KeyCode::Up => select_prev(&mut app.selected_env_file, app.env_files.len()),
KeyCode::Down => select_next(&mut app.selected_env_file, app.env_files.len()),
KeyCode::Enter => {
app.step = SetupStep::EnvTarget;
}
_ => {}
},
SetupStep::EnvTarget => match key.code {
KeyCode::Up => select_prev(&mut app.selected_env_target, app.env_targets.len()),
KeyCode::Down => select_next(&mut app.selected_env_target, app.env_targets.len()),
KeyCode::Enter => {
if app
.env_targets
.get(app.selected_env_target)
.is_some_and(|choice| choice.is_custom)
{
app.custom_env.clear();
app.step = SetupStep::CustomEnv;
} else if app.env_file_path().is_some() {
app.refresh_keys();
if app.key_items.is_empty() {
app.step = SetupStep::Confirm;
} else {
app.step = SetupStep::Keys;
}
} else {
app.step = SetupStep::Confirm;
}
}
_ => {}
},
SetupStep::CustomEnv => match key.code {
KeyCode::Enter => {
if !app.custom_env.trim().is_empty() {
app.env_targets.push(EnvTargetChoice {
label: app.custom_env.trim().to_string(),
value: Some(app.custom_env.trim().to_string()),
is_custom: false,
});
app.selected_env_target = app.env_targets.len().saturating_sub(2);
if app.env_file_path().is_some() {
app.refresh_keys();
app.step = if app.key_items.is_empty() {
SetupStep::Confirm
} else {
SetupStep::Keys
};
} else {
app.step = SetupStep::Confirm;
}
}
}
KeyCode::Backspace => {
app.custom_env.pop();
}
KeyCode::Char(ch) => {
if !ch.is_control() {
app.custom_env.push(ch);
}
}
_ => {}
},
SetupStep::Keys => match key.code {
KeyCode::Up => select_prev(&mut app.selected_key, app.key_items.len()),
KeyCode::Down => select_next(&mut app.selected_key, app.key_items.len()),
KeyCode::Char(' ') => {
if let Some(item) = app.key_items.get_mut(app.selected_key) {
item.selected = !item.selected;
}
}
KeyCode::Enter => app.step = SetupStep::Confirm,
_ => {}
},
SetupStep::Confirm => match key.code {
KeyCode::Char(' ') => app.apply_secrets = !app.apply_secrets,
KeyCode::Enter => {
app.finalize();
return Ok(true);
}
_ => {}
},
}
Ok(false)
}
fn draw_ui(f: &mut ratatui::Frame<'_>, app: &DeploySetupApp) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(1), Constraint::Length(3)].as_ref())
.split(f.area());
let title = match app.step {
SetupStep::Worker => "Deploy Setup: Cloudflare Workers",
SetupStep::EnvFile => "Select .env file (optional)",
SetupStep::EnvTarget => "Select Cloudflare environment",
SetupStep::CustomEnv => "Enter custom environment",
SetupStep::Keys => "Select secrets to push",
SetupStep::Confirm => "Confirm setup",
};
let header = Paragraph::new(Line::from(title))
.block(Block::default().borders(Borders::ALL).title("flow"))
.alignment(ratatui::layout::Alignment::Center);
f.render_widget(header, chunks[0]);
match app.step {
SetupStep::Worker => {
let items = app
.worker_paths
.iter()
.map(|path| {
let label = relative_display(&app.project_root, path);
ListItem::new(Line::from(label))
})
.collect::<Vec<_>>();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("Worker path"))
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_worker));
f.render_stateful_widget(list, chunks[1], &mut state);
}
SetupStep::EnvFile => {
let body = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(55), Constraint::Percentage(45)].as_ref())
.split(chunks[1]);
let items = app
.env_files
.iter()
.map(|choice| ListItem::new(Line::from(choice.label.clone())))
.collect::<Vec<_>>();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("Secrets source"))
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_env_file));
f.render_stateful_widget(list, body[0], &mut state);
let preview_lines = build_env_preview_lines(&app.project_root, app.env_file_path_ref());
let preview = Paragraph::new(preview_lines)
.block(Block::default().borders(Borders::ALL).title("Preview"))
.wrap(Wrap { trim: true });
f.render_widget(preview, body[1]);
}
SetupStep::EnvTarget => {
let items = app
.env_targets
.iter()
.map(|choice| ListItem::new(Line::from(choice.label.clone())))
.collect::<Vec<_>>();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("Wrangler --env"))
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_env_target));
f.render_stateful_widget(list, chunks[1], &mut state);
}
SetupStep::CustomEnv => {
let prompt = format!("> {}", app.custom_env);
let input = Paragraph::new(prompt)
.block(Block::default().borders(Borders::ALL).title("Environment name"))
.wrap(Wrap { trim: true });
f.render_widget(input, chunks[1]);
}
SetupStep::Keys => {
let body = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(60), Constraint::Percentage(40)].as_ref())
.split(chunks[1]);
let selected_count = app.key_items.iter().filter(|item| item.selected).count();
let items = app
.key_items
.iter()
.map(|item| {
let indicator = if item.selected { "[x]" } else { "[ ]" };
let flag = if item.suspect { " suspect" } else { "" };
let label = format!("{indicator} {}{flag}", item.key);
ListItem::new(Line::from(label))
})
.collect::<Vec<_>>();
let list = List::new(items)
.block(
Block::default()
.borders(Borders::ALL)
.title(format!(
"Secrets ({}/{})",
selected_count,
app.key_items.len()
)),
)
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_key));
f.render_stateful_widget(list, body[0], &mut state);
let detail_lines = build_key_detail_lines(
&app.project_root,
app.env_file_path_ref(),
app.key_items.get(app.selected_key),
);
let details = Paragraph::new(detail_lines)
.block(Block::default().borders(Borders::ALL).title("Details"))
.wrap(Wrap { trim: true });
f.render_widget(details, body[1]);
}
SetupStep::Confirm => {
let worker = relative_display(&app.project_root, app.worker_path());
let env_file = app
.env_file_path()
.map(|p| relative_display(&app.project_root, &p))
.unwrap_or_else(|| "none".to_string());
let env_target = app
.selected_env_target()
.unwrap_or_else(|| "production (default)".to_string());
let selected_count = app.key_items.iter().filter(|item| item.selected).count();
let apply = if app.apply_secrets { "yes" } else { "no" };
let summary = vec![
Line::from(vec![
Span::styled("Worker: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(worker),
]),
Line::from(vec![
Span::styled("Env file: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(env_file),
]),
Line::from(vec![
Span::styled("Environment: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(env_target),
]),
Line::from(vec![
Span::styled("Secrets selected: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(format!("{}", selected_count)),
]),
Line::from(vec![
Span::styled("Apply secrets now: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(apply),
]),
];
let paragraph = Paragraph::new(summary)
.block(Block::default().borders(Borders::ALL).title("Review"))
.wrap(Wrap { trim: true });
f.render_widget(paragraph, chunks[1]);
}
}
let help = match app.step {
SetupStep::Worker => "Up/Down to move, Enter to select, Esc to cancel, q to cancel",
SetupStep::EnvFile => "Up/Down to move, Enter to select, Esc to back, q to cancel",
SetupStep::EnvTarget => "Up/Down to move, Enter to select, Esc to back, q to cancel",
SetupStep::CustomEnv => "Type name, Enter to confirm, Esc to back, q to cancel",
SetupStep::Keys => "Up/Down to move, Space to toggle, Enter to continue, Esc to back, q to cancel",
SetupStep::Confirm => "Space to toggle apply, Enter to finish, Esc to back, q to cancel",
};
let footer = Paragraph::new(help)
.block(Block::default().borders(Borders::ALL))
.alignment(ratatui::layout::Alignment::Center);
f.render_widget(footer, chunks[2]);
}
fn build_env_preview_lines(project_root: &Path, env_file: Option<&Path>) -> Vec<Line<'static>> {
let mut lines = Vec::new();
let Some(path) = env_file else {
lines.push(Line::from("No env file selected."));
lines.push(Line::from("Secrets will not be set."));
return lines;
};
lines.push(Line::from(vec![
Span::styled("File: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(relative_display(project_root, path)),
]));
lines.push(Line::from("Values are hidden."));
let content = match fs::read_to_string(path) {
Ok(content) => content,
Err(_) => {
lines.push(Line::from("Unable to read file."));
return lines;
}
};
let vars = parse_env_file(&content);
if vars.is_empty() {
lines.push(Line::from("No env vars found."));
return lines;
}
let mut entries: Vec<_> = vars.into_iter().collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let suspect_count = entries
.iter()
.filter(|(_, value)| suspect_reason(value).is_some())
.count();
let total = entries.len();
lines.push(Line::from(format!(
"Keys: {} (suspect: {})",
total, suspect_count
)));
lines.push(Line::from("! = likely test/local value"));
let max_keys = 12usize;
for (key, value) in entries.iter().take(max_keys) {
let flag = if suspect_reason(value).is_some() {
" !"
} else {
""
};
lines.push(Line::from(format!(" - {}{}", key, flag)));
}
if total > max_keys {
lines.push(Line::from(format!("... +{} more", total - max_keys)));
}
lines
}
fn build_key_detail_lines(
project_root: &Path,
env_file: Option<&Path>,
item: Option<&EnvKeyItem>,
) -> Vec<Line<'static>> {
let mut lines = Vec::new();
let env_label = env_file
.map(|path| relative_display(project_root, path))
.unwrap_or_else(|| "none".to_string());
lines.push(Line::from(format!("Env file: {}", env_label)));
let Some(item) = item else {
lines.push(Line::from("No key selected."));
return lines;
};
lines.push(Line::from(format!("Key: {}", item.key)));
lines.push(Line::from(format!(
"Selected: {}",
if item.selected { "yes" } else { "no" }
)));
lines.push(Line::from(format!(
"Status: {}",
if item.suspect { "suspect" } else { "ok" }
)));
if let Some(reason) = &item.suspect_reason {
lines.push(Line::from(format!("Reason: {}", reason)));
}
lines.push(Line::from(format!("Value length: {}", item.value_len)));
lines.push(Line::from("Values are hidden."));
if item.suspect {
lines.push(Line::from("Tip: suspect values default to unchecked."));
}
lines
}
fn select_prev(selected: &mut usize, len: usize) {
if len == 0 {
return;
}
if *selected == 0 {
*selected = len.saturating_sub(1);
} else {
*selected -= 1;
}
}
fn select_next(selected: &mut usize, len: usize) {
if len == 0 {
return;
}
if *selected + 1 >= len {
*selected = 0;
} else {
*selected += 1;
}
}
fn step_back(app: &mut DeploySetupApp) -> bool {
match app.step {
SetupStep::Worker => true,
SetupStep::EnvFile => {
app.step = SetupStep::Worker;
false
}
SetupStep::EnvTarget => {
if app.env_files.len() <= 1 {
app.step = SetupStep::Worker;
} else {
app.step = SetupStep::EnvFile;
}
false
}
SetupStep::CustomEnv => {
app.step = SetupStep::EnvTarget;
false
}
SetupStep::Keys => {
app.step = SetupStep::EnvTarget;
false
}
SetupStep::Confirm => {
if app.env_file_path().is_some() && !app.key_items.is_empty() {
app.step = SetupStep::Keys;
} else {
app.step = SetupStep::EnvTarget;
}
false
}
}
}
fn relative_display(root: &Path, path: &Path) -> String {
if let Ok(rel) = path.strip_prefix(root) {
let rel = rel.to_string_lossy().to_string();
if rel.is_empty() {
".".to_string()
} else {
rel
}
} else {
path.to_string_lossy().to_string()
}
}
fn pick_default_worker(paths: &[PathBuf], preferred: Option<&PathBuf>) -> usize {
if let Some(path) = preferred {
if let Some((idx, _)) = paths.iter().enumerate().find(|(_, p)| *p == path) {
return idx;
}
}
0
}
fn build_env_file_choices(project_root: &Path, env_files: &[PathBuf]) -> Vec<EnvFileChoice> {
let mut choices = Vec::new();
choices.push(EnvFileChoice {
label: "Skip (do not set secrets)".to_string(),
path: None,
});
for path in env_files {
choices.push(EnvFileChoice {
label: relative_display(project_root, path),
path: Some(path.clone()),
});
}
choices
}
fn pick_default_env_file_for_worker(
choices: &[EnvFileChoice],
worker_path: &Path,
preferred: Option<&PathBuf>,
) -> usize {
if let Some(path) = preferred {
if let Some((idx, _)) = choices.iter().enumerate().find(|(_, c)| c.path.as_ref() == Some(path)) {
return idx;
}
}
if let Some(idx) = pick_env_file_for_worker(choices, worker_path) {
return idx;
}
0
}
fn pick_env_file_for_worker(choices: &[EnvFileChoice], worker_path: &Path) -> Option<usize> {
let candidates = [
".env",
".env.cloudflare",
".env.production",
".env.staging",
".env.local",
];
for candidate in candidates {
let candidate_path = worker_path.join(candidate);
if let Some((idx, _)) = choices
.iter()
.enumerate()
.find(|(_, c)| c.path.as_ref() == Some(&candidate_path))
{
return Some(idx);
}
}
None
}
fn pick_default_env_target(targets: &[EnvTargetChoice], preferred: Option<&str>) -> usize {
if let Some(env) = preferred {
if let Some((idx, _)) = targets
.iter()
.enumerate()
.find(|(_, choice)| choice.value.as_deref() == Some(env))
{
return idx;
}
}
0
}
fn build_key_items(path: &Path) -> Result<Vec<EnvKeyItem>> {
let content = fs::read_to_string(path)
.with_context(|| format!("failed to read env file {}", path.display()))?;
let env = parse_env_file(&content);
let mut keys: Vec<_> = env.into_iter().collect();
keys.sort_by(|a, b| a.0.cmp(&b.0));
Ok(keys
.into_iter()
.map(|(key, value)| {
let reason = suspect_reason(&value);
let suspect = reason.is_some();
EnvKeyItem {
key,
selected: !suspect,
suspect: suspect || value.trim().is_empty(),
suspect_reason: reason.map(|reason| reason.to_string()),
value_len: value.len(),
}
})
.collect())
}
fn suspect_reason(value: &str) -> Option<&'static str> {
let trimmed = value.trim();
if trimmed.is_empty() {
return Some("empty");
}
let lowered = trimmed.to_lowercase();
if lowered.contains("sk_test") || lowered.contains("pk_test") {
return Some("stripe_test");
}
if lowered.contains("localhost") || lowered.contains("127.0.0.1") {
return Some("localhost");
}
if lowered.contains("example.com") || lowered.contains("example") {
return Some("example");
}
if lowered.contains("dummy") {
return Some("dummy");
}
if lowered.contains("test") {
return Some("test");
}
None
}
pub(crate) fn discover_wrangler_configs(root: &Path) -> Result<Vec<PathBuf>> {
let walker = WalkBuilder::new(root)
.hidden(true)
.git_ignore(true)
.git_global(true)
.git_exclude(true)
.max_depth(Some(10))
.filter_entry(|entry| {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let name = entry.file_name().to_string_lossy();
!matches!(
name.as_ref(),
"node_modules"
| "target"
| "dist"
| "build"
| ".git"
| ".hg"
| ".svn"
| "__pycache__"
| ".pytest_cache"
| ".mypy_cache"
| "venv"
| ".venv"
| "vendor"
| "Pods"
| ".cargo"
| ".rustup"
)
} else {
true
}
})
.build();
let mut paths = Vec::new();
for entry in walker.flatten() {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
if let Some(name) = entry.path().file_name().and_then(|s| s.to_str()) {
if matches!(name, "wrangler.toml" | "wrangler.json" | "wrangler.jsonc") {
if let Some(parent) = entry.path().parent() {
paths.push(parent.to_path_buf());
}
}
}
}
}
paths.sort();
paths.dedup();
Ok(paths)
}
fn discover_env_files(root: &Path) -> Result<Vec<PathBuf>> {
let walker = WalkBuilder::new(root)
.hidden(false)
.git_ignore(false)
.git_global(false)
.git_exclude(false)
.max_depth(Some(10))
.filter_entry(|entry| {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let name = entry.file_name().to_string_lossy();
!matches!(
name.as_ref(),
"node_modules"
| "target"
| "dist"
| "build"
| ".git"
| ".hg"
| ".svn"
| "__pycache__"
| ".pytest_cache"
| ".mypy_cache"
| "venv"
| ".venv"
| "vendor"
| "Pods"
| ".cargo"
| ".rustup"
)
} else {
true
}
})
.build();
let mut env_files = Vec::new();
for entry in walker.flatten() {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
if let Some(name) = entry.path().file_name().and_then(|s| s.to_str()) {
if name.starts_with(".env") && name != ".envrc" {
env_files.push(entry.path().to_path_buf());
}
}
}
}
env_files.sort();
env_files.dedup();
Ok(env_files)
}
fn extract_wrangler_envs(worker_path: &Path) -> Vec<String> {
let toml_path = worker_path.join("wrangler.toml");
if toml_path.exists() {
if let Ok(content) = fs::read_to_string(&toml_path) {
let re = Regex::new(r"^\s*\[env\.([^\]]+)\]\s*$").unwrap();
let mut envs = Vec::new();
for line in content.lines() {
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/sync.rs | src/sync.rs | //! Auto-setup command for autonomous agent workflows.
use anyhow::{Context, Result, bail};
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use crate::config;
/// Generate agents.md content with project-specific settings.
fn generate_agents_md(project_name: &str, _primary_task: &str) -> String {
format!(
r#"# Autonomous Agent Instructions
Project: {project_name}
This project is configured for autonomous AI agent workflows with human-in-the-loop approval.
## Response Format
**Every response MUST end with exactly one of these signals on the final line:**
### Success signals
```
done.
```
Use when task completed successfully with high certainty. No further action needed.
```
done: <message>
```
Use when task completed with context to share. Example: `done: Added login command with --token flag`
### Needs human input
```
needsUpdate: <message>
```
Use when you need human decision or action. Example: `needsUpdate: Should I use OAuth or API key auth?`
### Error signals
```
error: <message>
```
Use when task failed or cannot proceed. Example: `error: Build failed - missing dependency xyz`
## Rules
1. **Always end with a signal** - The last line must be one of the above
2. **One signal only** - Never combine signals
3. **Be specific** - Include actionable context in messages
4. **No quotes** - Write signals exactly as shown, no wrapping quotes
## Examples
### Successful implementation
```
Added the new CLI command with all requested flags.
done.
```
### Completed with context
```
Refactored the auth module to use the new token format.
done: Auth now supports both JWT and API key methods
```
### Need human decision
```
Found two approaches for caching:
1. Redis - better for distributed systems
2. In-memory - simpler, faster for single instance
needsUpdate: Which caching approach should I use?
```
### Error occurred
```
Attempted to run tests but encountered issues.
error: Test suite requires DATABASE_URL environment variable
```
"#
)
}
/// Run the auto-setup command.
pub fn run() -> Result<()> {
println!("Setting up autonomous agent workflow...\n");
// Check if Lin.app is running
print!("Checking Lin.app... ");
if !is_lin_running() {
println!("not running");
println!();
println!("Lin.app is required for autonomous agent workflows.");
println!("Please start Lin.app from /Applications/Lin.app");
bail!("Lin.app is not running");
}
println!("running ✓");
// Check if Lin.app exists
let lin_app = PathBuf::from("/Applications/Lin.app");
if !lin_app.exists() {
println!();
println!("Warning: Lin.app not found at /Applications/Lin.app");
println!("The autonomous workflow requires Lin.app to be installed.");
}
let cwd = std::env::current_dir().context("failed to get current directory")?;
// Load flow.toml to get project settings
let flow_toml = cwd.join("flow.toml");
let (project_name, primary_task) = if flow_toml.exists() {
let cfg = config::load(&flow_toml).unwrap_or_default();
let name = cfg
.project_name
.or_else(|| cwd.file_name().map(|n| n.to_string_lossy().into_owned()))
.unwrap_or_else(|| "project".to_string());
let task = cfg
.flow
.primary_task
.unwrap_or_else(|| "deploy".to_string());
(name, task)
} else {
let name = cwd
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| "project".to_string());
(name, "deploy".to_string())
};
print!("Project: {} ", project_name);
println!("(primary task: {})", primary_task);
// Generate customized agents.md
let agents_content = generate_agents_md(&project_name, &primary_task);
// Create .claude directory if needed
let claude_dir = cwd.join(".claude");
fs::create_dir_all(&claude_dir).context("failed to create .claude directory")?;
// Write agents.md
let agents_path = claude_dir.join("agents.md");
let existed = agents_path.exists();
fs::write(&agents_path, &agents_content).context("failed to write agents.md")?;
if existed {
println!("Updated .claude/agents.md ✓");
} else {
println!("Created .claude/agents.md ✓");
}
// Also create for Codex (.codex/agents.md)
let codex_dir = cwd.join(".codex");
fs::create_dir_all(&codex_dir).context("failed to create .codex directory")?;
let codex_agents_path = codex_dir.join("agents.md");
let codex_existed = codex_agents_path.exists();
fs::write(&codex_agents_path, &agents_content).context("failed to write .codex/agents.md")?;
if codex_existed {
println!("Updated .codex/agents.md ✓");
} else {
println!("Created .codex/agents.md ✓");
}
println!();
println!("Autonomous agent workflow is ready!");
println!();
println!("Claude Code and Codex will now end responses with:");
println!(" done. - Task completed successfully");
println!(" done: <msg> - Completed with context");
println!(" needsUpdate: <msg> - Needs human decision");
println!(" error: <msg> - Task failed");
println!();
println!("Lin.app will detect these signals and show appropriate widgets.");
Ok(())
}
/// Check if Lin.app is running.
fn is_lin_running() -> bool {
let output = Command::new("pgrep").args(["-x", "Lin"]).output();
match output {
Ok(out) => out.status.success(),
Err(_) => false,
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/commits.rs | src/commits.rs | //! Browse and analyze git commits with AI session metadata.
//!
//! Shows commits with attached AI sessions, reviews, and other metadata.
use std::io::Write;
use std::process::{Command, Stdio};
use anyhow::{Context, Result, bail};
use crate::cli::CommitsOpts;
/// Commit with associated metadata
#[derive(Debug, Clone)]
struct CommitEntry {
/// Git commit hash (short)
hash: String,
/// Full commit hash
full_hash: String,
/// Commit subject line (used in display)
#[allow(dead_code)]
subject: String,
/// Relative time (e.g., "2 hours ago")
relative_time: String,
/// Author name
author: String,
/// Whether this commit has AI session metadata
has_ai_metadata: bool,
/// Display string for fzf
display: String,
}
/// Run the commits subcommand.
pub fn run(opts: CommitsOpts) -> Result<()> {
let commits = list_commits(opts.limit, opts.all)?;
if commits.is_empty() {
println!("No commits found.");
return Ok(());
}
// Check for fzf
if which::which("fzf").is_err() {
println!("fzf not found – install it for fuzzy selection.");
println!("\nCommits:");
for commit in &commits {
println!("{}", commit.display);
}
return Ok(());
}
// Run fzf with preview
if let Some(selected) = run_commits_fzf(&commits)? {
show_commit_details(&selected)?;
}
Ok(())
}
/// List recent commits with metadata.
fn list_commits(limit: usize, all_branches: bool) -> Result<Vec<CommitEntry>> {
let mut args = vec!["log", "--pretty=format:%h|%H|%s|%ar|%an", "-n"];
let limit_str = limit.to_string();
args.push(&limit_str);
if all_branches {
args.push("--all");
}
let output = Command::new("git")
.args(&args)
.output()
.context("failed to run git log")?;
if !output.status.success() {
bail!("git log failed");
}
let stdout = String::from_utf8_lossy(&output.stdout);
let mut commits = Vec::new();
for line in stdout.lines() {
let parts: Vec<&str> = line.splitn(5, '|').collect();
if parts.len() < 5 {
continue;
}
let hash = parts[0].to_string();
let full_hash = parts[1].to_string();
let subject = parts[2].to_string();
let relative_time = parts[3].to_string();
let author = parts[4].to_string();
// Check if commit has AI metadata (check git notes or commit trailers)
let has_ai_metadata = check_ai_metadata(&full_hash);
// Build display string
let ai_indicator = if has_ai_metadata { "◆ " } else { " " };
let display = format!(
"{}{} | {} | {} | {}",
ai_indicator,
hash,
truncate_str(&subject, 50),
relative_time,
author
);
commits.push(CommitEntry {
hash,
full_hash,
subject,
relative_time,
author,
has_ai_metadata,
display,
});
}
Ok(commits)
}
/// Check if a commit has AI session metadata attached.
fn check_ai_metadata(commit_hash: &str) -> bool {
// Check git notes for AI metadata
let output = Command::new("git")
.args(["notes", "show", commit_hash])
.output();
if let Ok(output) = output {
if output.status.success() {
let notes = String::from_utf8_lossy(&output.stdout);
if notes.contains("ai-session") || notes.contains("claude") || notes.contains("codex") {
return true;
}
}
}
// Check commit message for AI-related trailers
let output = Command::new("git")
.args(["log", "-1", "--format=%B", commit_hash])
.output();
if let Ok(output) = output {
if output.status.success() {
let body = String::from_utf8_lossy(&output.stdout).to_lowercase();
if body.contains("reviewed-by: codex")
|| body.contains("reviewed-by: claude")
|| body.contains("ai-session:")
{
return true;
}
}
}
false
}
/// Run fzf with preview for commits.
fn run_commits_fzf(commits: &[CommitEntry]) -> Result<Option<&CommitEntry>> {
let mut child = Command::new("fzf")
.arg("--prompt")
.arg("commits> ")
.arg("--ansi")
.arg("--preview")
.arg("git show --stat --color=always {1}")
.arg("--preview-window")
.arg("down:50%:wrap")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.context("failed to spawn fzf")?;
{
let stdin = child.stdin.as_mut().context("failed to open fzf stdin")?;
for commit in commits {
// Write with hash first for preview extraction
writeln!(stdin, "{}", commit.display)?;
}
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Ok(None);
}
let selection = String::from_utf8(output.stdout).context("fzf output was not valid UTF-8")?;
let selection = selection.trim();
if selection.is_empty() {
return Ok(None);
}
Ok(commits.iter().find(|c| c.display == selection))
}
/// Show detailed commit information including AI metadata.
fn show_commit_details(commit: &CommitEntry) -> Result<()> {
println!("\n────────────────────────────────────────");
println!("Commit: {} ({})", commit.hash, commit.relative_time);
println!("Author: {}", commit.author);
println!("────────────────────────────────────────\n");
// Show commit message
let output = Command::new("git")
.args(["log", "-1", "--format=%B", &commit.full_hash])
.output()
.context("failed to get commit message")?;
if output.status.success() {
let message = String::from_utf8_lossy(&output.stdout);
println!("Message:\n{}", message);
}
// Show AI metadata if present
if commit.has_ai_metadata {
println!("────────────────────────────────────────");
println!("AI Session Metadata:");
println!("────────────────────────────────────────\n");
// Try to get notes
let notes_output = Command::new("git")
.args(["notes", "show", &commit.full_hash])
.output();
if let Ok(notes) = notes_output {
if notes.status.success() {
let notes_content = String::from_utf8_lossy(¬es.stdout);
println!("{}", notes_content);
}
}
}
// Show files changed
println!("────────────────────────────────────────");
println!("Files Changed:");
println!("────────────────────────────────────────\n");
let files_output = Command::new("git")
.args(["show", "--stat", "--format=", &commit.full_hash])
.output()
.context("failed to get files changed")?;
if files_output.status.success() {
let files = String::from_utf8_lossy(&files_output.stdout);
println!("{}", files);
}
Ok(())
}
/// Truncate a string to a maximum length, adding "..." if truncated.
fn truncate_str(s: &str, max_len: usize) -> String {
if s.len() <= max_len {
s.to_string()
} else {
// Find valid UTF-8 char boundary
let target = max_len.saturating_sub(3);
let mut end = target.min(s.len());
while end > 0 && !s.is_char_boundary(end) {
end -= 1;
}
format!("{}...", &s[..end])
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/task_match.rs | src/task_match.rs | //! Match user query to a task using LM Studio.
use std::io::{self, Write};
use std::path::PathBuf;
use anyhow::{Context, Result, bail};
use crate::{
cli::TaskRunOpts,
config,
discover::{self, DiscoveredTask},
lmstudio, tasks,
};
use clap::CommandFactory;
use crate::cli::Cli;
/// Options for the match command.
#[derive(Debug, Clone)]
pub struct MatchOpts {
/// The user's query as separate arguments (preserves quoting from shell).
pub args: Vec<String>,
/// LM Studio model to use.
pub model: Option<String>,
/// LM Studio API port.
pub port: Option<u16>,
/// Whether to actually run the matched task.
pub execute: bool,
}
/// Result of matching a query to a task.
#[derive(Debug)]
pub struct MatchResult {
pub task_name: String,
pub config_path: PathBuf,
pub relative_dir: String,
}
// Built-in commands that can be run directly if no task matches
const BUILTIN_COMMANDS: &[(&str, &[&str])] = &[("commit", &["commit", "c"])];
fn cli_subcommands() -> Vec<String> {
let mut names = Vec::new();
let cmd = Cli::command();
for sub in cmd.get_subcommands() {
names.push(sub.get_name().to_string());
for alias in sub.get_all_aliases() {
names.push(alias.to_string());
}
}
names.extend(["help", "-h", "--help"].iter().map(|s| s.to_string()));
names
}
fn run_builtin(name: &str, execute: bool) -> Result<()> {
match name {
"commit" => {
println!("Running: commit");
if execute {
crate::commit::run(true)?;
}
}
_ => bail!("Unknown built-in: {}", name),
}
Ok(())
}
fn find_builtin(query: &str) -> Option<&'static str> {
let q = query.trim().to_lowercase();
for (name, aliases) in BUILTIN_COMMANDS {
if aliases.iter().any(|a| *a == q) {
return Some(name);
}
}
None
}
/// Check if the first arg is a CLI subcommand that needs pass-through
fn is_cli_subcommand(args: &[String]) -> bool {
let Some(first) = args.first() else {
return false;
};
let first_lower = first.to_ascii_lowercase();
cli_subcommands()
.iter()
.any(|cmd| cmd.eq_ignore_ascii_case(&first_lower))
}
/// Re-invoke the CLI with the original arguments (bypassing match)
fn passthrough_to_cli(args: &[String]) -> Result<()> {
use std::process::Command;
let exe = std::env::current_exe().context("failed to get current executable")?;
let status = Command::new(&exe)
.args(args)
.status()
.with_context(|| format!("failed to run: {} {}", exe.display(), args.join(" ")))?;
if !status.success() {
std::process::exit(status.code().unwrap_or(1));
}
Ok(())
}
/// Match a user query to a task and optionally execute it.
pub fn run(opts: MatchOpts) -> Result<()> {
let root = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let discovery = discover::discover_tasks(&root)?;
run_with_tasks(opts, discovery.tasks, true)
}
/// Match a user query to a global task and optionally execute it.
pub fn run_global(opts: MatchOpts) -> Result<()> {
let config_path = config::default_config_path();
if !config_path.exists() {
bail!("global flow config not found at {}", config_path.display());
}
let cfg = config::load(&config_path).with_context(|| {
format!(
"failed to load global flow config at {}",
config_path.display()
)
})?;
let tasks = cfg
.tasks
.iter()
.map(|task| DiscoveredTask {
task: task.clone(),
config_path: config_path.clone(),
relative_dir: "global".to_string(),
depth: 0,
})
.collect();
run_with_tasks(opts, tasks, false)
}
fn run_with_tasks(opts: MatchOpts, tasks: Vec<DiscoveredTask>, allow_passthrough: bool) -> Result<()> {
// Check if this is a CLI subcommand that should bypass matching
if allow_passthrough && is_cli_subcommand(&opts.args) {
return passthrough_to_cli(&opts.args);
}
// Join args for display/LLM purposes (but task execution uses the preserved args)
let query_display = opts.args.join(" ");
// Try direct match first (exact name, shortcut, or abbreviation) - no LLM needed
let (task_name, task_args, was_direct_match) = if let Some(direct) =
try_direct_match(&opts.args, &tasks)
{
(direct.task_name, direct.args, true)
} else if let Some(builtin) = find_builtin(&query_display) {
// No task match, but matches a built-in command
return run_builtin(builtin, opts.execute);
} else if tasks.is_empty() {
if allow_passthrough {
// No tasks and no built-in match: behave like `f <args>`
return passthrough_to_cli(&opts.args);
}
bail!("No global tasks available to match.");
} else if allow_passthrough && opts.args.len() == 1 {
// Single-token queries should behave like `f <arg>` if no direct match.
return passthrough_to_cli(&opts.args);
} else {
// No direct match, use LM Studio
let prompt = build_matching_prompt(&query_display, &tasks);
// Query LM Studio (will fail with clear error if not running)
let response = match lmstudio::quick_prompt(&prompt, opts.model.as_deref(), opts.port) {
Ok(r) if !r.trim().is_empty() => r,
Ok(_) => {
// Empty response - check for built-in before failing
if let Some(builtin) = find_builtin(&query_display) {
return run_builtin(builtin, opts.execute);
}
let task_list: Vec<_> = tasks.iter().map(|t| t.task.name.as_str()).collect();
bail!(
"No match for '{}'. LM Studio returned empty response.\n\nAvailable tasks:\n {}",
query_display,
task_list.join("\n ")
);
}
Err(e) => {
// LM Studio error - fall back to built-in if available
if let Some(builtin) = find_builtin(&query_display) {
return run_builtin(builtin, opts.execute);
}
let task_list: Vec<_> = tasks.iter().map(|t| t.task.name.as_str()).collect();
bail!(
"No direct match for '{}'. LM Studio error: {}\n\nAvailable tasks:\n {}",
query_display,
e,
task_list.join("\n ")
);
}
};
// Parse the response to get the task name (no args for LLM matches)
(
extract_task_name(&response, &tasks)?,
Vec::new(),
false,
)
};
// Find the matched task
let matched = tasks
.iter()
.find(|t| t.task.name.eq_ignore_ascii_case(&task_name))
.ok_or_else(|| anyhow::anyhow!("LM Studio returned unknown task: {}", task_name))?;
// Show what was matched
if matched.relative_dir.is_empty() {
println!("Matched: {} – {}", matched.task.name, matched.task.command);
} else {
println!(
"Matched: {} ({}) – {}",
matched.task.name, matched.relative_dir, matched.task.command
);
}
if opts.execute {
// Check if confirmation is needed (only for LLM matches on tasks with confirm_on_match)
let needs_confirm = !was_direct_match && matched.task.confirm_on_match;
if needs_confirm {
print!("Press Enter to confirm, Ctrl+C to cancel: ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
}
// Execute the matched task
let run_opts = TaskRunOpts {
config: matched.config_path.clone(),
delegate_to_hub: false,
hub_host: "127.0.0.1".parse().unwrap(),
hub_port: 9050,
name: matched.task.name.clone(),
args: task_args.clone(),
};
tasks::run(run_opts)?;
}
Ok(())
}
/// Normalize a string by removing hyphens, underscores, and lowercasing
fn normalize_name(s: &str) -> String {
s.chars()
.filter(|c| *c != '-' && *c != '_')
.collect::<String>()
.to_ascii_lowercase()
}
/// Result of a direct match attempt - includes task name and any extra args
struct DirectMatchResult {
task_name: String,
args: Vec<String>,
}
/// Try to match query directly to a task name, shortcut, or abbreviation.
/// Returns the task name and any remaining arguments.
fn try_direct_match(args: &[String], tasks: &[DiscoveredTask]) -> Option<DirectMatchResult> {
if args.is_empty() {
return None;
}
let first = args[0].trim();
let rest: Vec<String> = args[1..].to_vec();
// Exact name match (case-insensitive)
if let Some(task) = tasks
.iter()
.find(|t| t.task.name.eq_ignore_ascii_case(first))
{
return Some(DirectMatchResult {
task_name: task.task.name.clone(),
args: rest,
});
}
// Shortcut match
if let Some(task) = tasks.iter().find(|t| {
t.task
.shortcuts
.iter()
.any(|s| s.eq_ignore_ascii_case(first))
}) {
return Some(DirectMatchResult {
task_name: task.task.name.clone(),
args: rest,
});
}
// Normalized match (ignoring hyphens/underscores, only if unambiguous)
let normalized_query = normalize_name(first);
let mut normalized_matches: Vec<_> = tasks
.iter()
.filter(|t| normalize_name(&t.task.name) == normalized_query)
.collect();
if normalized_matches.len() == 1 {
return Some(DirectMatchResult {
task_name: normalized_matches.remove(0).task.name.clone(),
args: rest,
});
}
// Abbreviation match (only if unambiguous)
let needle = first.to_ascii_lowercase();
if needle.len() >= 2 {
let mut matches = tasks.iter().filter(|t| {
generate_abbreviation(&t.task.name)
.map(|abbr| abbr == needle)
.unwrap_or(false)
});
if let Some(first_match) = matches.next() {
if matches.next().is_none() {
return Some(DirectMatchResult {
task_name: first_match.task.name.clone(),
args: rest,
});
}
}
}
None
}
fn generate_abbreviation(name: &str) -> Option<String> {
let mut abbr = String::new();
let mut new_segment = true;
for ch in name.chars() {
if ch.is_ascii_alphanumeric() {
if new_segment {
abbr.push(ch.to_ascii_lowercase());
new_segment = false;
}
} else {
new_segment = true;
}
}
if abbr.len() >= 2 { Some(abbr) } else { None }
}
fn build_matching_prompt(query: &str, tasks: &[DiscoveredTask]) -> String {
let mut prompt = String::new();
prompt.push_str(
"You are a task matcher. Given a user query, select the most appropriate task from the list below.\n\n",
);
prompt.push_str("Available tasks:\n");
for task in tasks {
let location = if task.relative_dir.is_empty() {
String::new()
} else {
format!(" (in {})", task.relative_dir)
};
let desc = task
.task
.description
.as_deref()
.unwrap_or(&task.task.command);
prompt.push_str(&format!("- {}{}: {}\n", task.task.name, location, desc));
}
prompt.push_str("\nRespond with ONLY the exact task name, nothing else. No explanation.\n");
prompt.push_str(&format!("\nUser query: {}\n", query));
prompt.push_str("\nTask name:");
prompt
}
fn extract_task_name(response: &str, tasks: &[DiscoveredTask]) -> Result<String> {
let response = response.trim();
// Try exact match first
for task in tasks {
if task.task.name.eq_ignore_ascii_case(response) {
return Ok(task.task.name.clone());
}
}
// Try to find a task name within the response
for task in tasks {
if response
.to_lowercase()
.contains(&task.task.name.to_lowercase())
{
return Ok(task.task.name.clone());
}
}
// Clean up common LLM artifacts
let cleaned = response
.trim_start_matches(|c: char| !c.is_alphanumeric())
.trim_end_matches(|c: char| !c.is_alphanumeric() && c != '-' && c != '_')
.to_string();
for task in tasks {
if task.task.name.eq_ignore_ascii_case(&cleaned) {
return Ok(task.task.name.clone());
}
}
bail!(
"Could not parse task name from LM response: '{}'\nAvailable tasks: {}",
response,
tasks
.iter()
.map(|t| t.task.name.as_str())
.collect::<Vec<_>>()
.join(", ")
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::TaskConfig;
fn make_discovered(name: &str, desc: Option<&str>) -> DiscoveredTask {
DiscoveredTask {
task: TaskConfig {
name: name.to_string(),
command: format!("echo {}", name),
delegate_to_hub: false,
activate_on_cd_to_root: false,
dependencies: Vec::new(),
description: desc.map(|s| s.to_string()),
shortcuts: Vec::new(),
interactive: false,
confirm_on_match: false,
on_cancel: None,
},
config_path: PathBuf::from("flow.toml"),
relative_dir: String::new(),
depth: 0,
}
}
#[test]
fn extracts_exact_task_name() {
let tasks = vec![
make_discovered("build", Some("Build the project")),
make_discovered("test", Some("Run tests")),
];
assert_eq!(extract_task_name("build", &tasks).unwrap(), "build");
assert_eq!(extract_task_name("BUILD", &tasks).unwrap(), "build");
assert_eq!(extract_task_name(" test ", &tasks).unwrap(), "test");
}
#[test]
fn extracts_task_name_from_response() {
let tasks = vec![
make_discovered("build", None),
make_discovered("deploy-prod", None),
];
assert_eq!(
extract_task_name("The task is: build", &tasks).unwrap(),
"build"
);
assert_eq!(
extract_task_name("deploy-prod.", &tasks).unwrap(),
"deploy-prod"
);
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/servers.rs | src/servers.rs | use std::{
collections::VecDeque,
sync::Arc,
time::{SystemTime, UNIX_EPOCH},
};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use tokio::{
io::{AsyncBufReadExt, BufReader},
process::Command,
sync::{Mutex, RwLock, broadcast, mpsc},
};
use crate::config::ServerConfig;
/// Origin of a log line (stdout or stderr).
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LogStream {
Stdout,
Stderr,
}
/// Single log entry from a managed server process.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogLine {
/// Name of the server that produced this line.
pub server: String,
/// Milliseconds since UNIX epoch when the line was captured.
pub timestamp_ms: u128,
/// Which stream the line came from.
pub stream: LogStream,
/// The raw text of the log line.
pub line: String,
}
#[derive(Debug)]
enum ProcessState {
Idle,
Starting,
Running { pid: u32 },
Exited { code: Option<i32> },
Failed { error: String },
}
#[derive(Debug, Clone, Copy)]
enum ServerControl {
Terminate,
}
/// Snapshot of the current state of a managed server, suitable for JSON APIs.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerSnapshot {
pub name: String,
pub command: String,
pub args: Vec<String>,
pub port: Option<u16>,
pub working_dir: Option<std::path::PathBuf>,
pub autostart: bool,
pub status: String,
pub pid: Option<u32>,
pub exit_code: Option<i32>,
}
/// In-process supervisor for a single child HTTP server defined in the config.
#[derive(Debug)]
pub struct ManagedServer {
cfg: ServerConfig,
state: RwLock<ProcessState>,
log_tx: broadcast::Sender<LogLine>,
log_buffer: RwLock<VecDeque<LogLine>>,
log_buffer_capacity: usize,
control: Mutex<Option<mpsc::Sender<ServerControl>>>,
}
impl ManagedServer {
pub fn new(cfg: ServerConfig, log_buffer_capacity: usize) -> Arc<Self> {
let (log_tx, _) = broadcast::channel(1024);
Arc::new(Self {
cfg,
state: RwLock::new(ProcessState::Idle),
log_tx,
log_buffer: RwLock::new(VecDeque::with_capacity(log_buffer_capacity)),
log_buffer_capacity,
control: Mutex::new(None),
})
}
pub fn config(&self) -> &ServerConfig {
&self.cfg
}
pub fn subscribe(&self) -> broadcast::Receiver<LogLine> {
self.log_tx.subscribe()
}
pub async fn snapshot(&self) -> ServerSnapshot {
let state = self.state.read().await;
let (status, pid, exit_code) = match &*state {
ProcessState::Idle => ("idle".to_string(), None, None),
ProcessState::Starting => ("starting".to_string(), None, None),
ProcessState::Running { pid } => ("running".to_string(), Some(*pid), None),
ProcessState::Exited { code } => ("exited".to_string(), None, *code),
ProcessState::Failed { error } => (format!("failed: {error}"), None, None),
};
ServerSnapshot {
name: self.cfg.name.clone(),
command: self.cfg.command.clone(),
args: self.cfg.args.clone(),
port: self.cfg.port,
working_dir: self.cfg.working_dir.clone(),
autostart: self.cfg.autostart,
status,
pid,
exit_code,
}
}
pub async fn recent_logs(&self, limit: usize) -> Vec<LogLine> {
let guard = self.log_buffer.read().await;
let len = guard.len();
let start = len.saturating_sub(limit);
guard.iter().skip(start).cloned().collect()
}
/// Spawn the configured process and begin capturing stdout/stderr.
///
/// This method returns immediately after the process has been started; a
/// background task monitors for process exit.
pub async fn start(self: &Arc<Self>) -> Result<()> {
{
let state = self.state.read().await;
if matches!(
*state,
ProcessState::Starting | ProcessState::Running { .. }
) {
return Ok(());
}
}
{
let mut state = self.state.write().await;
*state = ProcessState::Starting;
}
let mut cmd = Command::new(&self.cfg.command);
cmd.args(&self.cfg.args);
if let Some(dir) = &self.cfg.working_dir {
cmd.current_dir(dir);
}
if !self.cfg.env.is_empty() {
cmd.envs(self.cfg.env.clone());
}
cmd.stdout(std::process::Stdio::piped());
cmd.stderr(std::process::Stdio::piped());
let mut child = cmd
.spawn()
.with_context(|| format!("failed to spawn managed server {}", self.cfg.name))?;
{
let pid = child.id().unwrap_or(0);
let mut state = self.state.write().await;
*state = ProcessState::Running { pid };
}
let (control_tx, mut control_rx) = mpsc::channel(1);
{
let mut guard = self.control.lock().await;
*guard = Some(control_tx);
}
let server = Arc::clone(self);
// stdout task
if let Some(stdout) = child.stdout.take() {
Self::spawn_log_task(Arc::clone(&server), stdout, LogStream::Stdout);
}
// stderr task
if let Some(stderr) = child.stderr.take() {
Self::spawn_log_task(server.clone(), stderr, LogStream::Stderr);
}
// wait for exit
tokio::spawn(async move {
let status = tokio::select! {
status = child.wait() => status,
ctrl = control_rx.recv() => {
if matches!(ctrl, Some(ServerControl::Terminate)) {
if let Err(err) = child.kill().await {
tracing::warn!(?err, server = server.cfg.name, "failed to terminate server child");
}
}
child.wait().await
}
};
{
let mut guard = server.control.lock().await;
*guard = None;
}
let mut state = server.state.write().await;
match status {
Ok(status) => {
*state = ProcessState::Exited {
code: status.code(),
}
}
Err(err) => {
*state = ProcessState::Failed {
error: err.to_string(),
}
}
}
});
Ok(())
}
pub async fn stop(&self) -> Result<()> {
let tx = { self.control.lock().await.clone() };
if let Some(tx) = tx {
let _ = tx.send(ServerControl::Terminate).await;
}
Ok(())
}
fn spawn_log_task<R>(server: Arc<Self>, reader: R, stream: LogStream)
where
R: tokio::io::AsyncRead + Unpin + Send + 'static,
{
tokio::spawn(async move {
let mut lines = BufReader::new(reader).lines();
while let Ok(Some(line)) = lines.next_line().await {
let entry = LogLine {
server: server.cfg.name.clone(),
timestamp_ms: current_epoch_ms(),
stream: stream.clone(),
line,
};
server.push_log(entry).await;
}
});
}
async fn push_log(&self, line: LogLine) {
// broadcast; ignore errors if there are no subscribers
let _ = self.log_tx.send(line.clone());
let mut buf = self.log_buffer.write().await;
if buf.len() == self.log_buffer_capacity {
buf.pop_front();
}
buf.push_back(line);
}
}
fn current_epoch_ms() -> u128 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/cli.rs | src/cli.rs | use clap::{Args, Parser, Subcommand, ValueEnum};
use std::{net::IpAddr, path::PathBuf};
use crate::commit::ReviewModelArg;
/// Command line interface for the flow daemon / CLI hybrid.
#[derive(Parser, Debug)]
#[command(
name = "flow",
version = version_with_build_time(),
about = "Your second OS",
subcommand_required = false,
arg_required_else_help = false
)]
pub struct Cli {
#[command(subcommand)]
pub command: Option<Commands>,
}
/// Returns version string with relative build time (e.g., "0.1.0 (built 5m ago)")
fn version_with_build_time() -> &'static str {
use std::sync::OnceLock;
static VERSION: OnceLock<String> = OnceLock::new();
// Include the generated timestamp file to force recompilation when it changes
const BUILD_TIMESTAMP_STR: &str =
include_str!(concat!(env!("OUT_DIR"), "/build_timestamp.txt"));
VERSION.get_or_init(|| {
let version = env!("CARGO_PKG_VERSION");
let build_timestamp: u64 = BUILD_TIMESTAMP_STR.trim().parse().unwrap_or(0);
if build_timestamp == 0 {
return version.to_string();
}
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
let elapsed = now.saturating_sub(build_timestamp);
let relative = format_relative_time(elapsed);
format!("{version} (built {relative})")
})
}
fn format_relative_time(seconds: u64) -> String {
if seconds < 60 {
format!("{}s ago", seconds)
} else if seconds < 3600 {
format!("{}m ago", seconds / 60)
} else if seconds < 86400 {
let hours = seconds / 3600;
format!("{}h ago", hours)
} else {
let days = seconds / 86400;
format!("{}d ago", days)
}
}
#[derive(Subcommand, Debug)]
pub enum Commands {
#[command(
about = "Fuzzy search global commands/tasks without a project flow.toml.",
long_about = "Browse global commands and tasks from your global flow config (e.g., ~/.config/flow/flow.toml). Useful when you are outside a project directory.",
alias = "s"
)]
Search,
#[command(
about = "Run tasks from the global flow config.",
long_about = "Run tasks defined in ~/.config/flow/flow.toml without project discovery.",
alias = "g"
)]
Global(GlobalCommand),
#[command(
about = "Ensure the background hub daemon is running (spawns it if missing).",
long_about = "Checks the /health endpoint on the configured host/port (defaults to 127.0.0.1:9050). If unreachable, a daemon is launched in the background using the lin runtime recorded via `lin register` (or PATH), then a TUI opens so you can inspect managed servers and aggregated logs."
)]
Hub(HubCommand),
#[command(
about = "Scaffold a new flow.toml in the current directory.",
long_about = "Creates a starter flow.toml with stub tasks (setup, dev) so you can fill in commands later."
)]
Init(InitOpts),
#[command(
about = "Verify required tools and shell integrations.",
long_about = "Checks for flox (for managed deps), lin (hub helper), and direnv + shell hook presence."
)]
Doctor(DoctorOpts),
#[command(
about = "List tasks from the current project flow.toml (name + description).",
long_about = "Prints the tasks defined in the active flow.toml along with any descriptions, suitable for piping into a launcher."
)]
Tasks(TasksOpts),
/// Execute a specific project task (hidden; used by the palette and task shortcuts).
#[command(hide = true)]
Run(TaskRunOpts),
/// Invoke tasks directly via `f <task>` without typing `run`.
#[command(external_subcommand)]
TaskShortcut(Vec<String>),
#[command(about = "Show the last task input and its output/error.")]
LastCmd,
#[command(about = "Show the last task run (command, status, and output) recorded by flow.")]
LastCmdFull,
#[command(about = "Re-run the last task executed in this project.")]
Rerun(RerunOpts),
#[command(
about = "List running flow processes for the current project.",
long_about = "Lists flow-started processes tracked for this project. Use --all to see processes across all projects."
)]
Ps(ProcessOpts),
#[command(
about = "Stop running flow processes.",
long_about = "Kill flow-started processes by task name, PID, or all for the project. Sends SIGTERM first, then SIGKILL after timeout."
)]
Kill(KillOpts),
#[command(
about = "View logs from running or recent tasks.",
long_about = "Tail the log output of a running task. Use -f to follow in real-time."
)]
Logs(TaskLogsOpts),
#[command(
about = "List registered projects.",
long_about = "Shows all projects that have been registered (projects with a 'name' field in flow.toml)."
)]
Projects,
#[command(
about = "Fuzzy search AI sessions across all projects and copy context.",
long_about = "Browse AI sessions (Claude, Codex) across all projects. On selection, copies the session context since last checkpoint to clipboard for passing to another session.",
alias = "ss"
)]
Sessions(SessionsOpts),
#[command(
about = "Show or set the active project.",
long_about = "The active project is used as a fallback for commands like `f logs` when not in a project directory."
)]
Active(ActiveOpts),
#[command(
about = "Start the flow HTTP server for log ingestion and queries.",
long_about = "Runs an HTTP server with endpoints for log ingestion (/logs/ingest) and queries (/logs/query)."
)]
Server(ServerOpts),
#[command(
about = "Match a natural language query to a task using LM Studio.",
long_about = "Uses a local LM Studio model to intelligently match your query to an available task. Requires LM Studio running on localhost:1234 (or custom port).",
alias = "m"
)]
Match(MatchOpts),
#[command(
about = "AI-powered commit with code review and GitEdit sync.",
long_about = "Stages all changes, runs code review for bugs/security, generates commit message, commits, pushes, and syncs AI sessions to gitedit.dev.",
alias = "c"
)]
Commit(CommitOpts),
#[command(
about = "Simple AI commit without code review.",
long_about = "Stages all changes, uses OpenAI to generate a commit message from the diff, commits, and pushes. No code review.",
visible_alias = "commitSimple",
hide = true
)]
CommitSimple(CommitOpts),
#[command(
about = "AI commit with code review (no GitEdit sync).",
long_about = "Like 'commit' but without syncing to gitedit.dev.",
alias = "cc",
visible_alias = "commitWithCheck",
hide = true
)]
CommitWithCheck(CommitOpts),
#[command(
about = "Fix common TOML syntax errors in flow.toml.",
long_about = "Automatically fixes common issues in flow.toml that can break parsing, such as invalid escape sequences (\\$, \\n in basic strings), unclosed quotes, and other TOML syntax errors."
)]
Fixup(FixupOpts),
#[command(
about = "Manage background daemons (start, stop, status).",
long_about = "Start, stop, and monitor background daemons defined in flow.toml. Daemons are long-running processes like sync servers, API servers, or file watchers.",
alias = "d"
)]
Daemon(DaemonCommand),
#[command(
about = "Manage AI coding sessions (Claude Code).",
long_about = "Track, list, and resume Claude Code sessions for the current project. Sessions are stored in .ai/sessions/claude/ and can be named for easy recall."
)]
Ai(AiCommand),
#[command(
about = "Manage project env vars and 1focus sync.",
long_about = "With no arguments, lists project env vars for the current environment. Use subcommands to manage env vars via 1focus or run the sync workflow."
)]
Env(EnvCommand),
#[command(
about = "Manage project todos.",
long_about = "Create, list, edit, and complete lightweight todos stored in .ai/todos/todos.json."
)]
Todo(TodoCommand),
#[command(
about = "Manage Codex skills (.ai/skills/).",
long_about = "Create, list, and manage Codex skills for this project. Skills are stored in .ai/skills/ and help Codex understand project-specific workflows."
)]
Skills(SkillsCommand),
#[command(
about = "Install or update project dependencies.",
long_about = "Detects the package manager from lockfiles and runs install/update at the project root."
)]
Deps(DepsCommand),
#[command(
about = "Manage storage providers (e.g., Jazz).",
long_about = "Provision storage backends and populate environment variables for services like Jazz."
)]
Storage(StorageCommand),
#[command(
about = "Manage AI tools (.ai/tools/*.ts).",
long_about = "Create, list, and run TypeScript tools via Bun. Tools are fast, reusable scripts stored in .ai/tools/. Use 'codify' to generate tools from natural language.",
alias = "t"
)]
Tools(ToolsCommand),
#[command(
about = "Send a proposal notification to Lin for approval.",
long_about = "Sends a proposal to the Lin app widget for user approval. Used for human-in-the-loop AI workflows."
)]
Notify(NotifyCommand),
#[command(
about = "Browse and analyze git commits with AI session metadata.",
long_about = "Fuzzy search through git commits, showing attached AI sessions and review metadata. Allows jumping between commits to see the context and reasoning behind changes."
)]
Commits(CommitsOpts),
#[command(
about = "Bootstrap project with .ai/ folder and checkpoints.",
long_about = "Creates .ai/ folder structure with public (actions, skills, tools) and internal (sessions, db) folders. Adds .ai/internal/ to .gitignore. Safe to run multiple times."
)]
Start,
#[command(
about = "Invoke gen AI agents.",
long_about = "Run gen agents with prompts. Global agents: repos-health, repos-sync, os-health. Subagents: codify, explore, general. Special: flow (flow-aware).",
alias = "a"
)]
Agents(AgentsCommand),
#[command(
about = "Manage upstream fork workflow.",
long_about = "Set up and manage upstream forks. Creates a local 'upstream' branch to cleanly track the original repo, making merges easier.",
alias = "up"
)]
Upstream(UpstreamCommand),
#[command(
about = "Deploy project to host or cloud platform.",
long_about = "Deploy your project to a Linux host (via SSH), Cloudflare Workers, or Railway. Automatically detects platform from flow.toml [host], [cloudflare], or [railway] sections."
)]
Deploy(DeployCommand),
#[command(
about = "Run the project's release task.",
long_about = "Runs the task configured by flow.release_task in flow.toml. Falls back to a task named 'release' or 'release-build', then to flow.primary_task."
)]
Release(ReleaseOpts),
#[command(
about = "Publish project to GitHub.",
long_about = "Create a new GitHub repository and push the current project. Infers repo name from folder, asks for public/private visibility."
)]
Publish(PublishOpts),
#[command(
about = "Clone repositories into a structured local directory.",
long_about = "Clone repositories into ~/repos/<owner>/<repo> with SSH URLs and optional upstream setup for forks."
)]
Repos(ReposCommand),
#[command(
about = "Run tasks in parallel with pretty status display.",
long_about = "Execute multiple shell commands in parallel with a real-time status display showing spinners, progress, and output. Useful for running independent tasks concurrently.",
alias = "p"
)]
Parallel(ParallelCommand),
#[command(
about = "Manage auto-generated documentation in .ai/docs/.",
long_about = "AI-maintained documentation that stays in sync with the codebase. Docs are stored in .ai/docs/ and can be updated based on recent commits."
)]
Docs(DocsCommand),
}
#[derive(Args, Debug, Clone)]
pub struct DaemonOpts {
/// Address to bind the Axum server to.
#[arg(long, default_value = "0.0.0.0")]
pub host: IpAddr,
/// TCP port for the daemon's HTTP interface.
#[arg(long, default_value_t = 9050)]
pub port: u16,
/// Target FPS for the mock frame generator until a real screen capture backend lands.
#[arg(long, default_value_t = 5, value_parser = clap::value_parser!(u8).range(1..=120))]
pub fps: u8,
/// Buffer size for the broadcast channel that fans screen frames out to connected clients.
#[arg(long, default_value_t = 512)]
pub frame_buffer: usize,
/// Optional path to the flow config TOML (defaults to ~/.config/flow/config.toml).
#[arg(long)]
pub config: Option<PathBuf>,
}
#[derive(Args, Debug, Clone)]
pub struct ScreenOpts {
/// Number of frames to preview before exiting.
#[arg(long, default_value_t = 10)]
pub frames: u16,
/// Frame generation rate for the preview stream.
#[arg(long, default_value_t = 5, value_parser = clap::value_parser!(u8).range(1..=60))]
pub fps: u8,
/// How many frames we keep buffered locally while previewing.
#[arg(long, default_value_t = 64)]
pub frame_buffer: usize,
}
#[derive(Args, Debug, Clone)]
pub struct LogsOpts {
/// Hostname or IP address of the running flowd daemon.
#[arg(long, default_value = "127.0.0.1")]
pub host: IpAddr,
/// TCP port of the daemon's HTTP interface.
#[arg(long, default_value_t = 9050)]
pub port: u16,
/// Specific server to fetch logs for (omit to dump all servers).
#[arg(long)]
pub server: Option<String>,
/// Number of log lines to fetch per server when not streaming.
#[arg(long, default_value_t = 200)]
pub limit: usize,
/// Stream logs in real-time (requires --server).
#[arg(long)]
pub follow: bool,
/// Disable ANSI color output in log prefixes.
#[arg(long)]
pub no_color: bool,
}
#[derive(Args, Debug, Clone)]
pub struct TraceOpts {
/// Show the last command's input/output instead of streaming events.
#[arg(long)]
pub last_command: bool,
}
#[derive(Args, Debug, Clone)]
pub struct ServersOpts {
/// Hostname or IP address of the running flowd daemon.
#[arg(long, default_value = "127.0.0.1")]
pub host: IpAddr,
/// TCP port of the daemon's HTTP interface.
#[arg(long, default_value_t = 9050)]
pub port: u16,
}
#[derive(Args, Debug, Clone)]
pub struct TasksOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
}
impl Default for TasksOpts {
fn default() -> Self {
Self {
config: PathBuf::from("flow.toml"),
}
}
}
#[derive(Args, Debug, Clone)]
pub struct GlobalCommand {
#[command(subcommand)]
pub action: Option<GlobalAction>,
/// Task name to run (omit to list global tasks).
#[arg(value_name = "TASK")]
pub task: Option<String>,
/// List global tasks.
#[arg(long, short)]
pub list: bool,
/// Additional arguments passed to the task command.
#[arg(value_name = "ARGS", trailing_var_arg = true)]
pub args: Vec<String>,
}
#[derive(Subcommand, Debug, Clone)]
pub enum GlobalAction {
/// List global tasks.
List,
/// Run a global task by name.
Run {
/// Task name to run.
#[arg(value_name = "TASK")]
task: String,
/// Additional arguments passed to the task command.
#[arg(value_name = "ARGS", trailing_var_arg = true)]
args: Vec<String>,
},
/// Match a query against global tasks (LM Studio).
Match(MatchOpts),
}
#[derive(Args, Debug, Clone)]
pub struct TaskRunOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Hand off the task to the hub daemon instead of running it locally.
#[arg(long)]
pub delegate_to_hub: bool,
/// Hub host to delegate tasks to (defaults to the local lin daemon).
#[arg(long, default_value = "127.0.0.1")]
pub hub_host: IpAddr,
/// Hub port to delegate tasks to.
#[arg(long, default_value_t = 9050)]
pub hub_port: u16,
/// Name of the task to execute.
#[arg(value_name = "TASK")]
pub name: String,
/// Additional arguments passed to the task command.
#[arg(value_name = "ARGS", trailing_var_arg = true)]
pub args: Vec<String>,
}
#[derive(Args, Debug, Clone)]
pub struct TaskActivateOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
}
#[derive(Args, Debug, Clone)]
pub struct ProcessOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Show all running flow processes across all projects.
#[arg(long)]
pub all: bool,
}
#[derive(Args, Debug, Clone)]
pub struct KillOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Kill by task name.
#[arg(value_name = "TASK")]
pub task: Option<String>,
/// Kill by PID directly.
#[arg(long)]
pub pid: Option<u32>,
/// Kill all processes for this project.
#[arg(long)]
pub all: bool,
/// Force kill (SIGKILL) without graceful shutdown.
#[arg(long, short)]
pub force: bool,
/// Timeout in seconds before sending SIGKILL (default: 5).
#[arg(long, default_value_t = 5)]
pub timeout: u64,
}
#[derive(Args, Debug, Clone)]
pub struct TaskLogsOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Task name to view logs for.
#[arg(value_name = "TASK")]
pub task: Option<String>,
/// Follow the log in real-time (like tail -f).
#[arg(long, short)]
pub follow: bool,
/// Number of lines to show from the end.
#[arg(long, short = 'n', default_value_t = 50)]
pub lines: usize,
/// Show logs for all projects.
#[arg(long)]
pub all: bool,
/// List available log files instead of showing content.
#[arg(long, short)]
pub list: bool,
/// Look up logs by registered project name instead of config path.
#[arg(long, short)]
pub project: Option<String>,
/// Suppress headers, output only log content.
#[arg(long, short)]
pub quiet: bool,
/// Hub task ID to fetch logs for (from delegated tasks).
#[arg(long)]
pub task_id: Option<String>,
}
#[derive(Args, Debug, Clone, Default)]
pub struct DoctorOpts {}
#[derive(Args, Debug, Clone)]
pub struct RerunOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
}
#[derive(Args, Debug, Clone, Default)]
pub struct ActiveOpts {
/// Project name to set as active.
#[arg(value_name = "PROJECT")]
pub project: Option<String>,
/// Clear the active project.
#[arg(long, short)]
pub clear: bool,
}
#[derive(Args, Debug, Clone, Default)]
pub struct SessionsOpts {
/// Filter by provider (claude, codex, or all).
#[arg(long, short, default_value = "all")]
pub provider: String,
/// Number of exchanges to copy (default: all since checkpoint).
#[arg(long, short)]
pub count: Option<usize>,
/// Show sessions but don't copy to clipboard.
#[arg(long, short)]
pub list: bool,
/// Get full session context, ignoring checkpoints.
#[arg(long, short)]
pub full: bool,
/// Generate summaries for stale sessions (uses Gemini).
#[arg(long)]
pub summarize: bool,
}
#[derive(Args, Debug, Clone)]
pub struct ServerOpts {
/// Host to bind the server to.
#[arg(long, default_value = "127.0.0.1")]
pub host: String,
/// Port for the HTTP server.
#[arg(long, default_value_t = 9060)]
pub port: u16,
#[command(subcommand)]
pub action: Option<ServerAction>,
}
#[derive(Subcommand, Debug, Clone, PartialEq, Eq)]
pub enum ServerAction {
#[command(about = "Start the server in the foreground")]
Foreground,
#[command(about = "Stop the background server")]
Stop,
}
#[derive(Args, Debug, Clone)]
pub struct InitOpts {
/// Where to write the scaffolded flow.toml (defaults to ./flow.toml).
#[arg(long)]
pub path: Option<PathBuf>,
}
#[derive(Args, Debug, Clone)]
pub struct HubCommand {
#[command(subcommand)]
pub action: Option<HubAction>,
#[command(flatten)]
pub opts: HubOpts,
}
#[derive(Args, Debug, Clone)]
pub struct HubOpts {
/// Hostname or IP address of the hub daemon.
#[arg(long, default_value = "127.0.0.1", global = true)]
pub host: IpAddr,
/// TCP port for the daemon's HTTP interface.
#[arg(long, default_value_t = 9050, global = true)]
pub port: u16,
/// Optional path to the lin hub config (defaults to lin's built-in lookup).
#[arg(long, global = true)]
pub config: Option<PathBuf>,
/// Skip launching the hub TUI after ensuring the daemon is running.
#[arg(long, global = true)]
pub no_ui: bool,
}
#[derive(Subcommand, Debug, Clone, PartialEq, Eq)]
pub enum HubAction {
#[command(about = "Start or ensure the hub daemon is running")]
Start,
#[command(about = "Stop the hub daemon if it was started by flow")]
Stop,
}
#[derive(Args, Debug, Clone)]
pub struct SecretsCommand {
#[command(subcommand)]
pub action: SecretsAction,
}
#[derive(Subcommand, Debug, Clone)]
pub enum SecretsAction {
#[command(about = "List configured secret environments")]
List(SecretsListOpts),
#[command(about = "Fetch secrets for a specific environment")]
Pull(SecretsPullOpts),
}
#[derive(Args, Debug, Clone)]
pub struct SecretsListOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
}
#[derive(Args, Debug, Clone)]
pub struct SecretsPullOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Environment name defined in the storage config.
#[arg(value_name = "ENV")]
pub env: String,
/// Optional override for the storage hub URL (default flow.1focus.ai).
#[arg(long)]
pub hub: Option<String>,
/// Optional file to write secrets to (defaults to stdout).
#[arg(long)]
pub output: Option<PathBuf>,
/// Output format for rendered secrets.
#[arg(long, default_value_t = SecretsFormat::Shell, value_enum)]
pub format: SecretsFormat,
}
#[derive(Args, Debug, Clone)]
pub struct StorageCommand {
#[command(subcommand)]
pub action: StorageAction,
}
#[derive(Subcommand, Debug, Clone)]
pub enum StorageAction {
/// Jazz worker accounts and env wiring.
Jazz(JazzStorageCommand),
}
#[derive(Args, Debug, Clone)]
pub struct JazzStorageCommand {
#[command(subcommand)]
pub action: JazzStorageAction,
}
#[derive(Subcommand, Debug, Clone)]
pub enum JazzStorageAction {
/// Create a new Jazz worker account and store env vars.
New {
/// What the worker account will be used for.
#[arg(long, value_enum, default_value = "mirror")]
kind: JazzStorageKind,
/// Optional name for the worker account.
#[arg(long)]
name: Option<String>,
/// Optional sync server (peer) URL.
#[arg(long)]
peer: Option<String>,
/// Optional Jazz API key (used when constructing the default peer).
#[arg(long)]
api_key: Option<String>,
/// Environment to store in (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
},
}
#[derive(Debug, Clone, Copy, ValueEnum)]
pub enum JazzStorageKind {
/// Mirror worker account (gitedit-style mirror sync).
Mirror,
/// Env store worker account (1focus env storage).
EnvStore,
}
#[derive(Debug, Clone, Copy, ValueEnum)]
pub enum SecretsFormat {
Shell,
Dotenv,
}
#[derive(Args, Debug, Clone)]
pub struct SetupOpts {
/// Path to the project flow config (flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
}
#[derive(Args, Debug, Clone)]
pub struct IndexOpts {
/// Codanna binary to execute (defaults to looking up 'codanna' in PATH).
#[arg(long, default_value = "codanna")]
pub binary: String,
/// Directory to index; defaults to the current working directory.
#[arg(long)]
pub project_root: Option<PathBuf>,
/// SQLite destination for snapshots (defaults to ~/.db/flow/flow.sqlite).
#[arg(long)]
pub database: Option<PathBuf>,
}
#[derive(Args, Debug, Clone)]
pub struct MatchOpts {
/// Natural language query describing the task you want to run.
#[arg(value_name = "QUERY", trailing_var_arg = true, num_args = 1..)]
pub query: Vec<String>,
/// LM Studio model to use (defaults to qwen3-8b).
#[arg(long)]
pub model: Option<String>,
/// LM Studio API port (defaults to 1234).
#[arg(long, default_value_t = 1234)]
pub port: u16,
/// Only show the match without running the task.
#[arg(long, short = 'n')]
pub dry_run: bool,
}
#[derive(Args, Debug, Clone)]
pub struct CommitOpts {
/// Skip pushing after commit.
#[arg(long, short = 'n')]
pub no_push: bool,
/// Run synchronously (don't delegate to hub).
#[arg(long, visible_alias = "no-hub")]
pub sync: bool,
/// Include AI session context in code review (default: off).
#[arg(long)]
pub context: bool,
/// Dry run: show context that would be passed to review without committing.
#[arg(long)]
pub dry: bool,
/// Use Codex instead of Claude for code review (default: Claude).
#[arg(long)]
pub codex: bool,
/// Choose a specific review model (claude-opus, codex-high, codex-mini).
#[arg(long, value_enum)]
pub review_model: Option<ReviewModelArg>,
/// Custom message to include in commit (appended after author line).
#[arg(long, short = 'm')]
pub message: Option<String>,
/// Max tokens for AI session context (default: 1000).
#[arg(long, short = 't', default_value = "1000")]
pub tokens: usize,
}
#[derive(Args, Debug, Clone)]
pub struct FixupOpts {
/// Path to the flow.toml to fix (defaults to ./flow.toml).
#[arg(long, default_value = "flow.toml")]
pub config: PathBuf,
/// Only show what would be fixed without making changes.
#[arg(long, short = 'n')]
pub dry_run: bool,
}
#[derive(Args, Debug, Clone)]
pub struct DaemonCommand {
#[command(subcommand)]
pub action: Option<DaemonAction>,
}
#[derive(Subcommand, Debug, Clone)]
pub enum DaemonAction {
/// Start a daemon by name.
Start {
/// Name of the daemon to start.
name: String,
},
/// Stop a running daemon.
Stop {
/// Name of the daemon to stop.
name: String,
},
/// Restart a daemon (stop then start).
Restart {
/// Name of the daemon to restart.
name: String,
},
/// Show status of all configured daemons.
Status,
/// List available daemons.
#[command(alias = "ls")]
List,
}
#[derive(Args, Debug, Clone)]
pub struct AiCommand {
#[command(subcommand)]
pub action: Option<AiAction>,
}
#[derive(Subcommand, Debug, Clone)]
pub enum AiAction {
/// List all AI sessions for this project (Claude + Codex).
#[command(alias = "ls")]
List,
/// Claude Code sessions only.
Claude {
#[command(subcommand)]
action: Option<ProviderAiAction>,
},
/// Codex sessions only.
Codex {
#[command(subcommand)]
action: Option<ProviderAiAction>,
},
/// Resume an AI session by name or ID.
Resume {
/// Session name or ID to resume.
session: Option<String>,
},
/// Save/bookmark the current or most recent session with a name.
Save {
/// Name for the session.
name: String,
/// Session ID to save (defaults to most recent).
#[arg(long)]
id: Option<String>,
},
/// Open or create notes for a session.
Notes {
/// Session name or ID.
session: String,
},
/// Remove a saved session from tracking (doesn't delete the actual session).
Remove {
/// Session name or ID to remove.
session: String,
},
/// Initialize .ai folder structure in current project.
Init,
/// Import all existing sessions for this project.
Import,
/// Copy session history to clipboard (fuzzy search to select).
Copy {
/// Session name or ID to copy (if not provided, shows fuzzy search).
session: Option<String>,
},
/// Copy last prompt and response from a session to clipboard (for context passing).
/// Usage: f ai context [session] [path] [count]
Context {
/// Session name or ID (if not provided, shows fuzzy search).
session: Option<String>,
/// Path to project directory (default: current directory).
path: Option<String>,
/// Number of exchanges to include (default: 1).
#[arg(default_value = "1")]
count: usize,
},
}
/// Provider-specific AI actions (for claude/codex subcommands).
#[derive(Subcommand, Debug, Clone)]
pub enum ProviderAiAction {
/// List sessions for this provider.
#[command(alias = "ls")]
List,
/// Resume a session.
Resume {
/// Session name or ID to resume.
session: Option<String>,
},
/// Copy session history to clipboard.
Copy {
/// Session name or ID to copy.
session: Option<String>,
},
/// Copy last prompt and response to clipboard (for context passing).
/// Usage: f ai claude context [session] [path] [count]
Context {
/// Session name or ID to copy.
session: Option<String>,
/// Path to project directory (default: current directory).
path: Option<String>,
/// Number of exchanges to include (default: 1).
#[arg(default_value = "1")]
count: usize,
},
}
#[derive(Args, Debug, Clone)]
pub struct EnvCommand {
#[command(subcommand)]
pub action: Option<EnvAction>,
}
#[derive(Subcommand, Debug, Clone)]
pub enum EnvAction {
/// Sync project settings and set up autonomous agent workflow.
Sync,
/// Unlock env read access (Touch ID on macOS).
Unlock,
/// Authenticate with 1focus to fetch env vars.
Login,
/// Fetch env vars from 1focus and write to .env.
Pull {
/// Environment to fetch (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
},
/// Push local .env to 1focus.
Push {
/// Environment to push to (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
},
/// Guided prompt to set required env vars from flow.toml.
Guide {
/// Environment to set in (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
},
/// Apply env vars from 1focus to the configured Cloudflare worker.
Apply,
/// Bootstrap Cloudflare secrets from flow.toml (interactive).
Bootstrap,
/// Interactive env setup (uses flow.toml when configured).
Setup {
/// Optional .env file path to preselect.
#[arg(short = 'f', long)]
env_file: Option<PathBuf>,
/// Optional environment to preselect.
#[arg(short, long)]
environment: Option<String>,
},
/// List env vars for this project.
#[command(alias = "ls")]
List {
/// Environment to list (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
},
/// Set a single env var.
Set {
/// KEY=VALUE pair to set.
pair: String,
/// Environment to set in (dev, staging, production).
#[arg(short, long, default_value = "production")]
environment: String,
/// Optional description for this env var.
#[arg(short, long)]
description: Option<String>,
},
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/trace.rs | src/trace.rs | use std::{
collections::HashMap,
fs::{self, File},
io::{BufRead, BufReader, Seek, SeekFrom},
path::{Path, PathBuf},
sync::mpsc,
time::Duration,
};
use anyhow::{Context, Result, bail};
use base64::{Engine, engine::general_purpose};
use notify::{RecursiveMode, Watcher};
use crate::cli::TraceOpts;
const META_DIR_SUFFIX: &str = ".flow/tty-meta";
const TTY_LOG_DIR_SUFFIX: &str = ".flow/tmux-logs";
pub fn run(opts: TraceOpts) -> Result<()> {
if opts.last_command {
return print_last_command();
}
stream_operations()
}
fn stream_operations() -> Result<()> {
let meta_dir = meta_dir();
if !meta_dir.exists() {
bail!(
"no meta dir at {}; enable trace_terminal_io and open a new terminal",
meta_dir.display()
);
}
let mut positions = HashMap::new();
bootstrap_existing(&meta_dir, &mut positions)?;
let (tx, rx) = mpsc::channel();
let mut watcher = notify::recommended_watcher(move |res| {
let _ = tx.send(res);
})
.context("failed to start watcher on tty meta dir")?;
watcher
.watch(&meta_dir, RecursiveMode::NonRecursive)
.with_context(|| format!("failed to watch {}", meta_dir.display()))?;
println!("# streaming command events (Ctrl+C to stop)");
loop {
match rx.recv_timeout(Duration::from_millis(500)) {
Ok(Ok(event)) => {
for path in event.paths {
if path.extension().and_then(|s| s.to_str()) != Some("log") {
continue;
}
let _ = print_new_lines(&path, &mut positions);
}
}
Ok(Err(err)) => {
eprintln!("watch error: {err}");
}
Err(mpsc::RecvTimeoutError::Timeout) => {
// poll for new files
let _ = bootstrap_existing(&meta_dir, &mut positions);
}
Err(mpsc::RecvTimeoutError::Disconnected) => break,
}
}
Ok(())
}
fn bootstrap_existing(meta_dir: &Path, positions: &mut HashMap<PathBuf, u64>) -> Result<()> {
for entry in
fs::read_dir(meta_dir).with_context(|| format!("failed to read {}", meta_dir.display()))?
{
let entry = entry?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("log") {
continue;
}
if !positions.contains_key(&path) {
positions.insert(path.clone(), 0);
print_new_lines(&path, positions)?;
}
}
Ok(())
}
fn print_new_lines(path: &Path, positions: &mut HashMap<PathBuf, u64>) -> Result<()> {
let mut file =
File::open(path).with_context(|| format!("failed to open {}", path.display()))?;
let pos = positions.entry(path.to_path_buf()).or_insert(0);
file.seek(SeekFrom::Start(*pos))
.with_context(|| format!("failed to seek {}", path.display()))?;
let mut reader = BufReader::new(file);
let mut buf = String::new();
while reader.read_line(&mut buf)? != 0 {
*pos += buf.len() as u64;
if let Some(evt) = parse_meta_line(buf.trim_end()) {
println!("{}", format_event(evt, path));
}
buf.clear();
}
Ok(())
}
fn print_last_command() -> Result<()> {
let meta_dir = meta_dir();
let tty_dir = tty_dir();
if !meta_dir.exists() {
bail!(
"no meta data found at {}; enable trace_terminal_io and run commands inside tmux",
meta_dir.display()
);
}
if !tty_dir.exists() {
bail!(
"no tmux logs at {}; ensure shells run inside tmux",
tty_dir.display()
);
}
let (last_evt, start_map) = latest_event(&meta_dir)?;
let Some(evt) = last_evt else {
bail!("no commands recorded yet");
};
let cmd = start_map.get(&evt.id).cloned();
let output = extract_command_output(&evt.id, &tty_dir)
.with_context(|| format!("failed to find output for command {}", evt.id))?;
if let Some(start) = cmd {
println!(
"command: {}",
start.cmd.unwrap_or_else(|| "<unknown>".to_string())
);
if let Some(cwd) = start.cwd {
println!("cwd: {cwd}");
}
} else {
println!("command: <unknown>");
}
if let Some(status) = evt.status {
println!("status: {status}");
}
println!("--- output ---");
print!("{output}");
Ok(())
}
fn extract_command_output(id: &str, tty_dir: &Path) -> Result<String> {
let start_marker = format!("flow-cmd-start;{id}");
let end_marker = format!("flow-cmd-end;{id}");
for entry in
fs::read_dir(tty_dir).with_context(|| format!("failed to read {}", tty_dir.display()))?
{
let entry = entry?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("log") {
continue;
}
let content = fs::read_to_string(&path)
.with_context(|| format!("failed to read tty log {}", path.display()))?;
if let Some(start_pos) = content.find(&start_marker) {
let after_start = content[start_pos..]
.find('\x07')
.map(|idx| start_pos + idx + 1)
.unwrap_or(start_pos);
if let Some(end_pos) = content[after_start..].find(&end_marker) {
let end_idx = after_start + end_pos;
let slice = &content[after_start..end_idx];
return Ok(slice.trim_matches(|c| c == '\n' || c == '\r').to_string());
}
}
}
bail!("command id {id} not found in tty logs; ensure command ran inside tmux")
}
#[derive(Clone)]
struct MetaEvent {
ts: String,
id: String,
kind: MetaKind,
cmd: Option<String>,
cwd: Option<String>,
status: Option<i32>,
}
#[derive(Clone)]
enum MetaKind {
Start,
End,
}
fn parse_meta_line(line: &str) -> Option<MetaEvent> {
let mut parts = line.split_whitespace();
let kind = parts.next()?;
let ts = parts.next()?.to_string();
match kind {
"start" => {
let id = parts.next()?.to_string();
let cwd_b64 = parts.next().unwrap_or("");
let cmd_b64 = parts.next().unwrap_or("");
Some(MetaEvent {
ts,
id,
kind: MetaKind::Start,
cwd: decode_b64(cwd_b64),
cmd: decode_b64(cmd_b64),
status: None,
})
}
"end" => {
let id = parts.next()?.to_string();
let status = parts.next().and_then(|s| s.parse::<i32>().ok());
Some(MetaEvent {
ts,
id,
kind: MetaKind::End,
cmd: None,
cwd: None,
status,
})
}
_ => None,
}
}
fn format_event(evt: MetaEvent, path: &Path) -> String {
match evt.kind {
MetaKind::Start => format!(
"[{} {}] start {} (cwd: {})",
path.file_stem().and_then(|s| s.to_str()).unwrap_or("pane"),
evt.ts,
evt.cmd.unwrap_or_else(|| "<unknown>".to_string()),
evt.cwd.unwrap_or_else(|| "?".to_string())
),
MetaKind::End => format!(
"[{} {}] end status={}",
path.file_stem().and_then(|s| s.to_str()).unwrap_or("pane"),
evt.ts,
evt.status
.map(|s| s.to_string())
.unwrap_or_else(|| "?".to_string())
),
}
}
fn latest_event(meta_dir: &Path) -> Result<(Option<MetaEvent>, HashMap<String, MetaEvent>)> {
let mut last: Option<MetaEvent> = None;
let mut starts: HashMap<String, MetaEvent> = HashMap::new();
for entry in
fs::read_dir(meta_dir).with_context(|| format!("failed to read {}", meta_dir.display()))?
{
let entry = entry?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("log") {
continue;
}
let file =
File::open(&path).with_context(|| format!("failed to open {}", path.display()))?;
let reader = BufReader::new(file);
for line in reader.lines() {
let line = match line {
Ok(l) => l,
Err(_) => continue,
};
if let Some(evt) = parse_meta_line(&line) {
if matches!(evt.kind, MetaKind::Start) {
starts.insert(evt.id.clone(), evt.clone());
}
if last.as_ref().map_or(true, |prev| evt.ts > prev.ts) {
last = Some(evt);
}
}
}
}
Ok((last, starts))
}
fn decode_b64(input: &str) -> Option<String> {
general_purpose::STANDARD
.decode(input.as_bytes())
.ok()
.and_then(|bytes| String::from_utf8(bytes).ok())
}
fn meta_dir() -> PathBuf {
home_dir().join(META_DIR_SUFFIX)
}
fn tty_dir() -> PathBuf {
home_dir().join(TTY_LOG_DIR_SUFFIX)
}
fn home_dir() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/env.rs | src/env.rs | //! Environment variable management via 1focus.
//!
//! Fetches, sets, and manages environment variables for projects
//! using the 1focus API.
use std::collections::{HashMap, HashSet};
use std::fs;
use std::io::{self, IsTerminal, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
use anyhow::{Context, Result, bail};
use chrono::{DateTime, Local, TimeZone, Utc};
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use crate::cli::EnvAction;
use crate::config;
use crate::deploy;
use crate::env_setup::{EnvSetupDefaults, run_env_setup};
use crate::storage::{create_jazz_worker_account, get_project_name as storage_project_name, sanitize_name};
use crate::sync;
use uuid::Uuid;
const DEFAULT_API_URL: &str = "https://1focus.ai";
/// Auth config stored in ~/.config/flow/auth.toml
#[derive(Debug, Serialize, Deserialize, Default)]
struct AuthConfig {
token: Option<String>,
api_url: Option<String>,
token_source: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
struct EnvReadUnlock {
expires_at: i64,
}
/// An env var with optional description.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct EnvVar {
pub value: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
/// Response from /api/env/:projectName
#[derive(Debug, Deserialize)]
struct EnvResponse {
env: HashMap<String, String>,
#[serde(default)]
descriptions: HashMap<String, String>,
project: String,
environment: String,
}
/// Response from POST /api/env/:projectName
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
struct SetEnvResponse {
success: bool,
project: String,
environment: String,
}
/// Response from /api/env/personal
#[derive(Debug, Deserialize)]
struct PersonalEnvResponse {
env: HashMap<String, String>,
}
/// Get the auth config path.
fn get_auth_config_path() -> PathBuf {
let config_dir = dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("flow");
config_dir.join("auth.toml")
}
/// Load auth config.
fn load_auth_config_raw() -> Result<AuthConfig> {
let path = get_auth_config_path();
if !path.exists() {
return Ok(AuthConfig::default());
}
let content =
fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str(&content).context("failed to parse auth.toml")
}
/// Load auth config and hydrate token from Keychain on macOS when configured.
fn load_auth_config() -> Result<AuthConfig> {
let mut auth = load_auth_config_raw()?;
if auth.token.is_none()
&& auth
.token_source
.as_deref()
.map(|source| source == "keychain")
.unwrap_or(false)
{
require_env_read_unlock()?;
if let Some(token) = get_keychain_token(&get_api_url(&auth))? {
auth.token = Some(token);
}
}
Ok(auth)
}
/// Save auth config.
fn save_auth_config(config: &AuthConfig) -> Result<()> {
let path = get_auth_config_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let content = toml::to_string_pretty(config)?;
fs::write(&path, content)?;
Ok(())
}
fn keychain_service(api_url: &str) -> String {
format!("flow-1focus-token:{}", api_url)
}
fn set_keychain_token(api_url: &str, token: &str) -> Result<()> {
let service = keychain_service(api_url);
let status = Command::new("security")
.args([
"add-generic-password",
"-a",
"flow",
"-s",
&service,
"-w",
token,
"-U",
])
.status()
.context("failed to store token in Keychain")?;
if !status.success() {
bail!("failed to store token in Keychain");
}
Ok(())
}
fn get_keychain_token(api_url: &str) -> Result<Option<String>> {
if !cfg!(target_os = "macos") {
return Ok(None);
}
let service = keychain_service(api_url);
let output = Command::new("security")
.args([
"find-generic-password",
"-a",
"flow",
"-s",
&service,
"-w",
])
.output()
.context("failed to read token from Keychain")?;
if output.status.success() {
let token = String::from_utf8_lossy(&output.stdout).trim().to_string();
if token.is_empty() {
return Ok(None);
}
return Ok(Some(token));
}
let stderr = String::from_utf8_lossy(&output.stderr);
if stderr.contains("could not be found") || stderr.contains("SecKeychainSearchCopyNext") {
return Ok(None);
}
bail!("failed to read token from Keychain: {}", stderr.trim());
}
fn store_auth_token(auth: &mut AuthConfig, token: String) -> Result<()> {
let api_url = get_api_url(auth);
if cfg!(target_os = "macos") {
if let Err(err) = set_keychain_token(&api_url, &token) {
eprintln!("⚠ Failed to store token in Keychain: {}", err);
eprintln!(" Falling back to auth.toml storage.");
auth.token = Some(token);
auth.token_source = None;
return Ok(());
}
auth.token = None;
auth.token_source = Some("keychain".to_string());
} else {
auth.token = Some(token);
auth.token_source = None;
}
Ok(())
}
fn get_env_unlock_path() -> PathBuf {
let config_dir = dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("flow");
config_dir.join("env_read_unlock.json")
}
fn load_env_unlock() -> Option<EnvReadUnlock> {
let path = get_env_unlock_path();
let content = fs::read_to_string(&path).ok()?;
serde_json::from_str(&content).ok()
}
fn unlock_expires_at(entry: &EnvReadUnlock) -> Option<DateTime<Utc>> {
Utc.timestamp_opt(entry.expires_at, 0).single()
}
fn save_env_unlock(expires_at: DateTime<Utc>) -> Result<()> {
let path = get_env_unlock_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let entry = EnvReadUnlock {
expires_at: expires_at.timestamp(),
};
let content = serde_json::to_string_pretty(&entry)?;
fs::write(&path, content)?;
Ok(())
}
fn next_local_midnight_utc() -> Result<DateTime<Utc>> {
let now = Local::now();
let tomorrow = now
.date_naive()
.succ_opt()
.ok_or_else(|| anyhow::anyhow!("failed to calculate next day"))?;
let naive = tomorrow
.and_hms_opt(0, 0, 0)
.ok_or_else(|| anyhow::anyhow!("failed to build midnight time"))?;
let local_dt = Local
.from_local_datetime(&naive)
.single()
.or_else(|| Local.from_local_datetime(&naive).earliest())
.ok_or_else(|| anyhow::anyhow!("failed to resolve local midnight"))?;
Ok(local_dt.with_timezone(&Utc))
}
fn prompt_touch_id() -> Result<()> {
if !cfg!(target_os = "macos") {
bail!("Touch ID is not available on this OS");
}
let reason = "Flow needs Touch ID to read env vars.";
let reason = reason.replace('\\', "\\\\").replace('"', "\\\"");
let script = format!(
r#"ObjC.import('stdlib');
ObjC.import('Foundation');
ObjC.import('LocalAuthentication');
const context = $.LAContext.alloc.init;
const policy = $.LAPolicyDeviceOwnerAuthenticationWithBiometrics;
const error = Ref();
if (!context.canEvaluatePolicyError(policy, error)) {{
$.exit(2);
}}
let ok = false;
let done = false;
context.evaluatePolicyLocalizedReasonReply(policy, "{reason}", function(success, err) {{
ok = success;
done = true;
}});
const runLoop = $.NSRunLoop.currentRunLoop;
while (!done) {{
runLoop.runUntilDate($.NSDate.dateWithTimeIntervalSinceNow(0.1));
}}
$.exit(ok ? 0 : 1);"#
);
let status = Command::new("osascript")
.args(["-l", "JavaScript", "-e", &script])
.status()
.context("failed to launch Touch ID prompt")?;
match status.code() {
Some(0) => Ok(()),
Some(1) => bail!("Touch ID verification failed"),
Some(2) => bail!("Touch ID is not available on this device"),
_ => bail!("Touch ID verification failed"),
}
}
fn unlock_env_read() -> Result<()> {
if !cfg!(target_os = "macos") {
println!("Touch ID unlock is not available on this OS.");
return Ok(());
}
if let Some(entry) = load_env_unlock() {
if let Some(expires_at) = unlock_expires_at(&entry) {
if expires_at > Utc::now() {
let local_expiry = expires_at.with_timezone(&Local);
println!(
"Env read access already unlocked until {}",
local_expiry.format("%Y-%m-%d %H:%M %Z")
);
return Ok(());
}
}
}
println!("Touch ID required to read env vars.");
prompt_touch_id()?;
let expires_at = next_local_midnight_utc()?;
save_env_unlock(expires_at)?;
let local_expiry = expires_at.with_timezone(&Local);
println!(
"✓ Env read access unlocked until {}",
local_expiry.format("%Y-%m-%d %H:%M %Z")
);
Ok(())
}
fn require_env_read_unlock() -> Result<()> {
if !cfg!(target_os = "macos") {
return Ok(());
}
if let Some(entry) = load_env_unlock() {
if let Some(expires_at) = unlock_expires_at(&entry) {
if expires_at > Utc::now() {
return Ok(());
}
}
}
unlock_env_read()
}
/// Get the project name from flow.toml.
fn get_project_name() -> Result<String> {
let cwd = std::env::current_dir()?;
let flow_toml = cwd.join("flow.toml");
if flow_toml.exists() {
let cfg = config::load(&flow_toml)?;
if let Some(name) = cfg.project_name {
return Ok(name);
}
}
// Fall back to directory name
let name = cwd
.file_name()
.and_then(|n| n.to_str())
.map(|s| s.to_string())
.unwrap_or_else(|| "unnamed".to_string());
Ok(name)
}
/// Get API URL from config or default.
fn get_api_url(auth: &AuthConfig) -> String {
auth.api_url
.clone()
.unwrap_or_else(|| DEFAULT_API_URL.to_string())
}
fn find_flow_toml(start: &PathBuf) -> Option<PathBuf> {
let mut current = start.clone();
loop {
let candidate = current.join("flow.toml");
if candidate.exists() {
return Some(candidate);
}
if !current.pop() {
return None;
}
}
}
fn is_1focus_source(source: Option<&str>) -> bool {
matches!(
source.map(|s| s.to_ascii_lowercase()).as_deref(),
Some("1focus") | Some("1f") | Some("onefocus")
)
}
fn format_default_hint(value: &str) -> String {
value.to_string()
}
pub fn get_personal_env_var(key: &str) -> Result<Option<String>> {
let auth = load_auth_config()?;
let token = match auth.token.as_ref() {
Some(t) => t,
None => return Ok(None),
};
require_env_read_unlock()?;
let api_url = get_api_url(&auth);
let url = format!("{}/api/env/personal?keys={}", api_url, key);
let client = Client::builder()
.timeout(std::time::Duration::from_secs(15))
.build()?;
let resp = client
.get(&url)
.header("Authorization", format!("Bearer {}", token))
.send()
.context("failed to connect to 1focus")?;
if resp.status() == 401 {
return Ok(None);
}
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().unwrap_or_default();
bail!("API error {}: {}", status, body);
}
let data: PersonalEnvResponse = resp.json().context("failed to parse response")?;
Ok(data.env.get(key).cloned())
}
/// Run the env subcommand.
pub fn run(action: Option<EnvAction>) -> Result<()> {
// No action = list env vars (or show status if not logged in)
let Some(action) = action else {
let auth = load_auth_config()?;
if auth.token.is_some() {
return list("production");
}
return status();
};
match action {
EnvAction::Sync => sync::run()?,
EnvAction::Unlock => unlock_env_read()?,
EnvAction::Login => login()?,
EnvAction::Pull { environment } => pull(&environment)?,
EnvAction::Push { environment } => push(&environment)?,
EnvAction::Guide { environment } => guide(&environment)?,
EnvAction::Apply => {
let cwd = std::env::current_dir()?;
let flow_path = find_flow_toml(&cwd)
.ok_or_else(|| anyhow::anyhow!("flow.toml not found. Run `f init` first."))?;
let project_root = flow_path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or(cwd);
let flow_config = config::load(&flow_path)?;
deploy::apply_cloudflare_env(&project_root, Some(&flow_config))?;
}
EnvAction::Bootstrap => {
let cwd = std::env::current_dir()?;
let flow_path = find_flow_toml(&cwd)
.ok_or_else(|| anyhow::anyhow!("flow.toml not found. Run `f init` first."))?;
let project_root = flow_path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or(cwd);
let flow_config = config::load(&flow_path)?;
bootstrap_cloudflare_secrets(&project_root, &flow_config)?;
}
EnvAction::Keys => {
show_keys()?;
}
EnvAction::Setup { env_file, environment } => setup(env_file, environment)?,
EnvAction::List { environment } => list(&environment)?,
EnvAction::Set { pair, environment, description } => set_var(&pair, &environment, description.as_deref())?,
EnvAction::Delete { keys, environment } => delete_vars(&keys, &environment)?,
EnvAction::Status => status()?,
EnvAction::Get { keys, personal, environment, format } => {
get_vars(&keys, personal, &environment, &format)?
}
EnvAction::Run { personal, environment, keys, command } => {
run_with_env(personal, &environment, &keys, &command)?
}
}
Ok(())
}
/// Login / set token.
fn login() -> Result<()> {
let mut auth = load_auth_config_raw()?;
println!("1focus Environment Manager");
println!("─────────────────────────────");
println!();
println!("To get a token:");
println!(" 1. Go to {} and sign in", DEFAULT_API_URL);
println!(" 2. Go to Settings → API Tokens");
println!(" 3. Create a new token");
println!();
let api_url = prompt_line_default("API base URL", Some(DEFAULT_API_URL))?;
if let Some(api_url) = api_url {
auth.api_url = Some(api_url);
}
print!("Enter your API token: ");
io::stdout().flush()?;
let mut token = String::new();
io::stdin().read_line(&mut token)?;
let token = token.trim().to_string();
if token.is_empty() {
bail!("Token cannot be empty");
}
if !token.starts_with("1f_") {
println!("Warning: Token doesn't start with '1f_' - are you sure this is correct?");
}
store_auth_token(&mut auth, token)?;
save_auth_config(&auth)?;
println!();
if auth.token_source.as_deref() == Some("keychain") {
println!("✓ Token saved to Keychain");
} else {
println!("✓ Token saved to {}", get_auth_config_path().display());
}
println!();
println!("You can now use:");
println!(" f env pull - Fetch env vars for this project");
println!(" f env push - Push local .env to 1focus");
println!(" f env list - List env vars");
Ok(())
}
/// Pull env vars from 1focus and write to .env.
fn pull(environment: &str) -> Result<()> {
let auth = load_auth_config()?;
let token = auth
.token
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Not logged in. Run `f env login` first."))?;
require_env_read_unlock()?;
let project = get_project_name()?;
let api_url = get_api_url(&auth);
println!("Fetching envs for '{}' ({})...", project, environment);
let client = Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()?;
let url = format!(
"{}/api/env/{}?environment={}",
api_url, project, environment
);
let resp = client
.get(&url)
.header("Authorization", format!("Bearer {}", token))
.send()
.context("failed to connect to 1focus")?;
if resp.status() == 401 {
bail!("Unauthorized. Check your token with `f env login`.");
}
if resp.status() == 404 {
bail!(
"Project '{}' not found. Create it with `f env push` first.",
project
);
}
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().unwrap_or_default();
bail!("API error {}: {}", status, body);
}
let data: EnvResponse = resp.json().context("failed to parse response")?;
if data.env.is_empty() {
println!("No env vars found for '{}' ({})", project, environment);
return Ok(());
}
// Write to .env
let mut content = String::new();
content.push_str(&format!(
"# Environment: {} (pulled from 1focus)\n",
environment
));
content.push_str(&format!("# Project: {}\n", project));
content.push_str("#\n");
let mut keys: Vec<_> = data.env.keys().collect();
keys.sort();
for key in keys {
let value = &data.env[key];
// Escape quotes in value
let escaped = value.replace('\"', "\\\"");
content.push_str(&format!("{}=\"{}\"\n", key, escaped));
}
let env_path = std::env::current_dir()?.join(".env");
fs::write(&env_path, &content)?;
println!("✓ Wrote {} env vars to .env", data.env.len());
Ok(())
}
/// Push local .env to 1focus.
fn push(environment: &str) -> Result<()> {
let env_path = std::env::current_dir()?.join(".env");
if !env_path.exists() {
bail!(".env file not found");
}
let content = fs::read_to_string(&env_path)?;
let vars = parse_env_file(&content);
if vars.is_empty() {
println!("No env vars found in .env");
return Ok(());
}
push_vars(environment, vars)
}
fn push_vars(environment: &str, vars: HashMap<String, String>) -> Result<()> {
if vars.is_empty() {
println!("No env vars selected.");
return Ok(());
}
let auth = load_auth_config()?;
let token = auth
.token
.as_ref()
.ok_or_else(|| anyhow::anyhow!("Not logged in. Run `f env login` first."))?;
let project = get_project_name()?;
let api_url = get_api_url(&auth);
println!(
"Pushing {} env vars to '{}' ({})...",
vars.len(),
project,
environment
);
let client = Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()?;
let url = format!("{}/api/env/{}", api_url, project);
let body = serde_json::json!({
"vars": vars,
"environment": environment,
});
let resp = client
.post(&url)
.header("Authorization", format!("Bearer {}", token))
.json(&body)
.send()
.context("failed to connect to 1focus")?;
if resp.status() == 401 {
bail!("Unauthorized. Check your token with `f env login`.");
}
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().unwrap_or_default();
bail!("API error {}: {}", status, body);
}
let _: SetEnvResponse = resp.json().context("failed to parse response")?;
println!("✓ Pushed {} env vars to 1focus", vars.len());
Ok(())
}
fn guide(environment: &str) -> Result<()> {
let cwd = std::env::current_dir()?;
let flow_path = find_flow_toml(&cwd)
.ok_or_else(|| anyhow::anyhow!("flow.toml not found. Run `f init` first."))?;
let cfg = config::load(&flow_path)?;
let cf_cfg = cfg
.cloudflare
.as_ref()
.context("No [cloudflare] section in flow.toml")?;
let mut required = Vec::new();
let mut seen = HashSet::new();
for key in cf_cfg.env_keys.iter().chain(cf_cfg.env_vars.iter()) {
if seen.insert(key.clone()) {
required.push(key.clone());
}
}
if required.is_empty() {
bail!("No env keys configured. Add cloudflare.env_keys or cloudflare.env_vars to flow.toml.");
}
println!("Checking required env vars for '{}'...", environment);
let existing = match fetch_project_env_vars(environment, &required) {
Ok(vars) => vars,
Err(err) => {
let msg = format!("{err:#}");
if msg.contains("Project not found.") {
println!(" (project not found yet; will create on first set)");
HashMap::new()
} else {
return Err(err);
}
}
};
let var_keys: HashSet<String> = cf_cfg.env_vars.iter().cloned().collect();
let mut missing = Vec::new();
for key in &required {
if existing.get(key).map(|v| !v.trim().is_empty()).unwrap_or(false) {
println!(" ✓ {}", key);
} else {
println!(" ✗ {} (missing)", key);
missing.push(key.clone());
}
}
if missing.is_empty() {
println!("✓ All required env vars are set.");
return Ok(());
}
println!();
println!("Enter missing values (leave empty to skip).");
for key in missing {
let default_value = cf_cfg.env_defaults.get(&key).map(|value| value.as_str());
let is_secret = !var_keys.contains(&key);
let value = prompt_value(&key, default_value, is_secret)?;
if let Some(value) = value {
set_project_env_var(&key, &value, environment, None)?;
}
}
Ok(())
}
fn bootstrap_cloudflare_secrets(project_root: &Path, cfg: &config::Config) -> Result<()> {
let cf_cfg = cfg
.cloudflare
.as_ref()
.context("No [cloudflare] section in flow.toml")?;
if cf_cfg.bootstrap_secrets.is_empty() {
bail!("No bootstrap secrets configured. Add cloudflare.bootstrap_secrets to flow.toml.");
}
println!("Bootstrap Cloudflare secrets");
println!("─────────────────────────────");
println!("Enter values (leave empty to skip).");
let mut values = HashMap::new();
let mut generated_env_token: Option<String> = None;
let needs_env_account = cf_cfg.bootstrap_secrets.iter().any(|key| {
key == "JAZZ_WORKER_ACCOUNT" || key == "JAZZ_WORKER_SECRET"
});
let needs_auth_account = cf_cfg.bootstrap_secrets.iter().any(|key| {
key == "JAZZ_AUTH_WORKER_ACCOUNT_ID" || key == "JAZZ_AUTH_WORKER_ACCOUNT_SECRET"
});
if needs_env_account || needs_auth_account {
let project = storage_project_name()?;
let default_env_name = format!("{}-jazz-env", sanitize_name(&project));
let default_auth_name = format!("{}-jazz-auth", sanitize_name(&project));
let default_peer = "wss://cloud.jazz.tools/?key=1focus@1focus.app";
if needs_env_account {
if prompt_confirm("Generate a new Jazz env-store account now? (y/N): ")? {
println!("Creating Jazz env-store account...");
let name = cf_cfg
.bootstrap_jazz_name
.as_deref()
.unwrap_or(&default_env_name);
let peer = cf_cfg
.bootstrap_jazz_peer
.as_deref()
.unwrap_or(default_peer);
let creds = create_jazz_worker_account(peer, name)?;
values.insert("JAZZ_WORKER_ACCOUNT".to_string(), creds.account_id);
values.insert("JAZZ_WORKER_SECRET".to_string(), creds.agent_secret);
println!("✓ Jazz env-store account created");
}
}
if needs_auth_account {
if prompt_confirm("Generate a new Jazz auth account now? (y/N): ")? {
println!("Creating Jazz auth account...");
let name = cf_cfg
.bootstrap_jazz_auth_name
.as_deref()
.unwrap_or(&default_auth_name);
let peer = cf_cfg
.bootstrap_jazz_auth_peer
.as_deref()
.unwrap_or(default_peer);
let creds = create_jazz_worker_account(peer, name)?;
values.insert(
"JAZZ_AUTH_WORKER_ACCOUNT_ID".to_string(),
creds.account_id,
);
values.insert(
"JAZZ_AUTH_WORKER_ACCOUNT_SECRET".to_string(),
creds.agent_secret,
);
println!("✓ Jazz auth account created");
}
}
}
for key in &cf_cfg.bootstrap_secrets {
if values.contains_key(key) {
continue;
}
if key == "ENV_API_TOKEN" || key == "FLOW_ENV_TOKEN" {
let value = prompt_secret(&format!("{} (leave empty to auto-generate): ", key))?;
let value = match value {
Some(value) => value,
None => {
if let Some(existing) = generated_env_token.clone() {
existing
} else {
let token = generate_env_api_token();
generated_env_token = Some(token.clone());
token
}
}
};
values.insert(key.clone(), value);
continue;
}
let value = prompt_secret(&format!("{}: ", key))?;
if let Some(value) = value {
values.insert(key.clone(), value);
}
}
values.retain(|_, value| !value.trim().is_empty());
if values.is_empty() {
println!("No secrets provided; nothing to set.");
return Ok(());
}
println!("Setting Cloudflare secrets...");
deploy::set_cloudflare_secrets(project_root, Some(cfg), &values)?;
println!("✓ Cloudflare secrets updated");
let mut auth = load_auth_config_raw()?;
let bootstrap_token = values
.get("ENV_API_TOKEN")
.or_else(|| values.get("FLOW_ENV_TOKEN"))
.cloned();
if let Some(token) = bootstrap_token {
store_auth_token(&mut auth, token)?;
let needs_default_api = auth
.api_url
.as_deref()
.map(|url| url.contains("workers.dev"))
.unwrap_or(true);
if needs_default_api {
auth.api_url = Some(DEFAULT_API_URL.to_string());
}
save_auth_config(&auth)?;
}
let env_name = cf_cfg
.environment
.clone()
.unwrap_or_else(|| "production".to_string());
let mut env_key_set: HashSet<String> = HashSet::new();
for key in cf_cfg.env_keys.iter().chain(cf_cfg.env_vars.iter()) {
env_key_set.insert(key.clone());
}
for (key, value) in &values {
if env_key_set.contains(key) {
if let Err(err) = set_project_env_var(key, value, &env_name, None) {
eprintln!("⚠ Failed to store {} in env store: {}", key, err);
}
}
}
if generated_env_token.is_some() {
if auth.token_source.as_deref() == Some("keychain") {
println!("✓ Saved ENV_API_TOKEN to Keychain");
} else {
println!("✓ Saved ENV_API_TOKEN to {}", get_auth_config_path().display());
}
}
Ok(())
}
fn prompt_line(label: &str) -> Result<Option<String>> {
print!("{}", label);
io::stdout().flush()?;
let mut value = String::new();
io::stdin().read_line(&mut value)?;
let value = value.trim().to_string();
if value.is_empty() {
Ok(None)
} else {
Ok(Some(value))
}
}
fn prompt_line_default(key: &str, default_value: Option<&str>) -> Result<Option<String>> {
let label = if let Some(default_value) = default_value {
format!("{} [{}]: ", key, default_value)
} else {
format!("{}: ", key)
};
let value = prompt_line(&label)?;
if value.is_none() {
Ok(default_value.map(|value| value.to_string()))
} else {
Ok(value)
}
}
fn prompt_value(
key: &str,
default_value: Option<&str>,
secret: bool,
) -> Result<Option<String>> {
if secret {
return prompt_secret(&format!("{}: ", key));
}
let default_value = default_value.and_then(|value| {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
});
let label = if let Some(default_value) = default_value {
format!("{} [{}]: ", key, default_value)
} else {
format!("{}: ", key)
};
let value = prompt_line(&label)?;
if value.is_none() {
Ok(default_value.map(|value| value.to_string()))
} else {
Ok(value)
}
}
fn prompt_confirm(label: &str) -> Result<bool> {
print!("{}", label);
io::stdout().flush()?;
if std::io::stdin().is_terminal() {
if let Ok(()) = crossterm::terminal::enable_raw_mode() {
let read = crossterm::event::read();
let _ = crossterm::terminal::disable_raw_mode();
if let Ok(crossterm::event::Event::Key(key)) = read {
println!();
return Ok(matches!(key.code, crossterm::event::KeyCode::Char('y' | 'Y')));
}
}
}
let value = prompt_line("")?;
Ok(matches!(
value.unwrap_or_default().trim().to_ascii_lowercase().as_str(),
"y" | "yes"
))
}
fn generate_env_api_token() -> String {
format!("1f_{}", Uuid::new_v4().simple())
}
fn prompt_secret(label: &str) -> Result<Option<String>> {
let value = rpassword::prompt_password(label)?;
let value = value.trim().to_string();
if value.is_empty() {
Ok(None)
} else {
Ok(Some(value))
}
}
fn setup(env_file: Option<PathBuf>, environment: Option<String>) -> Result<()> {
let cwd = std::env::current_dir()?;
let flow_path = find_flow_toml(&cwd);
let (project_root, flow_cfg) = if let Some(path) = flow_path {
let cfg = config::load(&path)?;
let root = path.parent().unwrap_or(&cwd).to_path_buf();
(root, Some(cfg))
} else {
(cwd, None)
};
let cf_cfg = flow_cfg.as_ref().and_then(|cfg| cfg.cloudflare.as_ref());
let default_env = environment
.clone()
.or_else(|| cf_cfg.and_then(|cfg| cfg.environment.clone()));
if env_file.is_none() {
if let Some(cfg) = cf_cfg {
if is_1focus_source(cfg.env_source.as_deref()) {
let env = default_env.unwrap_or_else(|| "production".to_string());
return guide(&env);
}
}
}
let defaults = EnvSetupDefaults {
env_file,
environment: default_env,
};
let Some(result) = run_env_setup(&project_root, defaults)? else {
return Ok(());
};
if !result.apply {
println!("Env setup canceled.");
return Ok(());
}
let Some(env_file) = result.env_file else {
println!("No env file selected; nothing to push.");
return Ok(());
};
let content = fs::read_to_string(&env_file)
.with_context(|| format!("failed to read {}", env_file.display()))?;
let vars = parse_env_file(&content);
if vars.is_empty() {
println!("No env vars found in {}", env_file.display());
return Ok(());
}
if result.selected_keys.is_empty() {
println!("No keys selected; nothing to push.");
return Ok(());
}
let mut selected = HashMap::new();
for key in result.selected_keys {
if let Some(value) = vars.get(&key) {
selected.insert(key, value.clone());
}
}
if selected.is_empty() {
println!("No matching keys found in {}", env_file.display());
return Ok(());
}
push_vars(&result.environment, selected)
}
fn show_keys() -> Result<()> {
let cwd = std::env::current_dir()?;
let flow_path = find_flow_toml(&cwd)
.ok_or_else(|| anyhow::anyhow!("flow.toml not found. Run `f init` first."))?;
let cfg = config::load(&flow_path)?;
let cf_cfg = cfg
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/repos.rs | src/repos.rs | //! Repository management commands.
//!
//! Supports cloning repos into a structured local directory.
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use anyhow::{Context, Result, bail};
use serde::Deserialize;
use crate::cli::{ReposAction, ReposCloneOpts, ReposCommand};
use crate::{config, upstream};
/// Run the repos subcommand.
pub fn run(cmd: ReposCommand) -> Result<()> {
match cmd.action {
Some(ReposAction::Clone(opts)) => {
clone_repo(opts)?;
Ok(())
}
None => {
println!("Usage: f repos clone <url>");
Ok(())
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct RepoRef {
pub(crate) owner: String,
pub(crate) repo: String,
}
#[derive(Debug, Deserialize)]
struct RepoInfo {
fork: bool,
parent: Option<RepoParent>,
source: Option<RepoParent>,
}
#[derive(Debug, Deserialize)]
struct RepoParent {
#[serde(rename = "ssh_url")]
ssh_url: String,
}
pub(crate) fn clone_repo(opts: ReposCloneOpts) -> Result<PathBuf> {
let repo_ref = parse_github_repo(&opts.url)?;
let root = normalize_root(&opts.root)?;
let owner_dir = root.join(&repo_ref.owner);
let target_dir = owner_dir.join(&repo_ref.repo);
if target_dir.exists() {
bail!("target already exists: {}", target_dir.display());
}
fs::create_dir_all(&owner_dir)
.with_context(|| format!("failed to create {}", owner_dir.display()))?;
let clone_url = format!("git@github.com:{}/{}.git", repo_ref.owner, repo_ref.repo);
let shallow = !opts.full;
let fetch_depth = if shallow { Some(1) } else { None };
run_git_clone(&clone_url, &target_dir, shallow)?;
println!("✓ cloned to {}", target_dir.display());
if opts.no_upstream {
if shallow {
spawn_background_history_fetch(&target_dir, false)?;
}
return Ok(target_dir);
}
let upstream_url = if let Some(url) = opts.upstream_url {
Some(url)
} else {
resolve_upstream_url(&repo_ref)?
};
let (upstream_url, upstream_is_origin) = match upstream_url {
Some(url) => {
let is_origin = url.trim() == clone_url.as_str();
(url, is_origin)
}
None => {
println!("No fork detected; using origin as upstream.");
(clone_url.clone(), true)
}
};
configure_upstream(&target_dir, &upstream_url, fetch_depth)?;
if shallow {
spawn_background_history_fetch(&target_dir, !upstream_is_origin)?;
}
Ok(target_dir)
}
pub(crate) fn parse_github_repo(input: &str) -> Result<RepoRef> {
let trimmed = input.trim();
if trimmed.is_empty() {
bail!("missing repository URL");
}
let path = if let Some(rest) = trimmed.strip_prefix("git@github.com:") {
rest
} else if let Some(idx) = trimmed.find("github.com/") {
&trimmed[idx + "github.com/".len()..]
} else {
trimmed
};
let path = path
.trim_start_matches('/')
.split(&['?', '#'][..])
.next()
.unwrap_or(path)
.trim_end_matches('/');
let mut parts = path.split('/');
let owner = parts.next().unwrap_or("").trim();
let repo = parts.next().unwrap_or("").trim();
if owner.is_empty() || repo.is_empty() {
bail!("unable to parse GitHub repo from: {}", input);
}
let repo = repo.strip_suffix(".git").unwrap_or(repo);
if repo.is_empty() {
bail!("unable to parse GitHub repo from: {}", input);
}
Ok(RepoRef {
owner: owner.to_string(),
repo: repo.to_string(),
})
}
pub(crate) fn normalize_root(raw: &str) -> Result<PathBuf> {
let expanded = config::expand_path(raw);
if expanded.is_absolute() {
return Ok(expanded);
}
let cwd = std::env::current_dir().context("failed to resolve current directory")?;
Ok(cwd.join(expanded))
}
fn run_git_clone(url: &str, target_dir: &Path, shallow: bool) -> Result<()> {
let mut cmd = Command::new("git");
cmd.arg("clone");
if shallow {
cmd.args(["--depth", "1"]);
}
let status = cmd
.arg(url)
.arg(target_dir)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.context("failed to run git clone")?;
if !status.success() {
bail!("git clone failed");
}
Ok(())
}
fn resolve_upstream_url(repo_ref: &RepoRef) -> Result<Option<String>> {
let output = match Command::new("gh")
.args(["api", &format!("repos/{}/{}", repo_ref.owner, repo_ref.repo)])
.output()
{
Ok(output) => output,
Err(err) => {
println!("gh not available; skipping upstream auto-setup ({})", err);
return Ok(None);
}
};
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let message = stderr.trim();
if message.is_empty() {
println!("gh api failed; skipping upstream auto-setup");
} else {
println!("gh api failed; skipping upstream auto-setup: {}", message);
}
println!("Authenticate with: gh auth login");
return Ok(None);
}
let info: RepoInfo =
serde_json::from_slice(&output.stdout).context("failed to parse gh api response")?;
if !info.fork {
return Ok(None);
}
let parent = info
.parent
.or(info.source)
.map(|parent| parent.ssh_url);
Ok(parent)
}
fn configure_upstream(repo_dir: &Path, upstream_url: &str, depth: Option<u32>) -> Result<()> {
println!("Setting up upstream: {}", upstream_url);
let cwd = std::env::current_dir().context("failed to capture current directory")?;
std::env::set_current_dir(repo_dir)
.with_context(|| format!("failed to enter {}", repo_dir.display()))?;
let result = upstream::setup_upstream_with_depth(
Some(upstream_url),
None,
depth,
);
if let Err(err) = std::env::set_current_dir(&cwd) {
println!("warning: failed to restore working directory: {}", err);
}
result
}
fn spawn_background_history_fetch(repo_dir: &Path, has_upstream: bool) -> Result<()> {
let mut command = String::from("git fetch --unshallow --tags origin");
if has_upstream {
command.push_str(" && git fetch --tags upstream");
}
let _child = Command::new("sh")
.arg("-c")
.arg(command)
.current_dir(repo_dir)
.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.context("failed to spawn background history fetch")?;
println!("Fetching full history in background...");
Ok(())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/todo.rs | src/todo.rs | use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result, bail};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::ai;
use crate::cli::{TodoAction, TodoCommand, TodoStatusArg};
#[derive(Debug, Serialize, Deserialize, Clone)]
struct TodoItem {
id: String,
title: String,
status: String,
created_at: String,
updated_at: Option<String>,
note: Option<String>,
session: Option<String>,
}
pub fn run(cmd: TodoCommand) -> Result<()> {
let action = cmd.action.unwrap_or(TodoAction::List { all: false });
match action {
TodoAction::Add {
title,
note,
session,
no_session,
status,
} => add(&title, note.as_deref(), session.as_deref(), no_session, status),
TodoAction::List { all } => list(all),
TodoAction::Done { id } => set_status(&id, TodoStatusArg::Completed),
TodoAction::Edit { id, title, status, note } => edit(&id, title.as_deref(), status, note),
TodoAction::Remove { id } => remove(&id),
}
}
fn add(
title: &str,
note: Option<&str>,
session: Option<&str>,
no_session: bool,
status: TodoStatusArg,
) -> Result<()> {
let trimmed = title.trim();
if trimmed.is_empty() {
bail!("todo title cannot be empty");
}
let (path, mut items) = load_items()?;
let session_ref = resolve_session_ref(session, no_session)?;
let now = Utc::now().to_rfc3339();
let item = TodoItem {
id: Uuid::new_v4().simple().to_string(),
title: trimmed.to_string(),
status: status_to_string(status).to_string(),
created_at: now,
updated_at: None,
note: note.map(|n| n.trim().to_string()).filter(|n| !n.is_empty()),
session: session_ref,
};
items.push(item.clone());
save_items(&path, &items)?;
println!("✓ Added {} [{}]", item.id, item.title);
Ok(())
}
fn list(show_all: bool) -> Result<()> {
let (_path, items) = load_items()?;
if items.is_empty() {
println!("No todos yet.");
return Ok(());
}
let mut count = 0;
for item in &items {
if !show_all && item.status == status_to_string(TodoStatusArg::Completed) {
continue;
}
count += 1;
println!("[{}] {} {}", item.status, item.id, item.title);
if let Some(note) = &item.note {
println!(" - {}", note);
}
if let Some(session) = &item.session {
println!(" @ {}", session);
}
}
if count == 0 {
println!("No active todos.");
}
Ok(())
}
fn edit(id: &str, title: Option<&str>, status: Option<TodoStatusArg>, note: Option<String>) -> Result<()> {
let (path, mut items) = load_items()?;
let idx = find_item_index(&items, id)?;
let item_id = {
let item = &mut items[idx];
if let Some(title) = title {
let title = title.trim();
if !title.is_empty() {
item.title = title.to_string();
}
}
if let Some(status) = status {
item.status = status_to_string(status).to_string();
}
if let Some(note) = note {
let note = note.trim().to_string();
item.note = if note.is_empty() { None } else { Some(note) };
}
item.updated_at = Some(Utc::now().to_rfc3339());
item.id.clone()
};
save_items(&path, &items)?;
println!("✓ Updated {}", item_id);
Ok(())
}
fn set_status(id: &str, status: TodoStatusArg) -> Result<()> {
let (path, mut items) = load_items()?;
let idx = find_item_index(&items, id)?;
let (item_id, item_status) = {
let item = &mut items[idx];
item.status = status_to_string(status).to_string();
item.updated_at = Some(Utc::now().to_rfc3339());
(item.id.clone(), item.status.clone())
};
save_items(&path, &items)?;
println!("✓ {} -> {}", item_id, item_status);
Ok(())
}
fn remove(id: &str) -> Result<()> {
let (path, mut items) = load_items()?;
let idx = find_item_index(&items, id)?;
let item = items.remove(idx);
save_items(&path, &items)?;
println!("✓ Removed {}", item.id);
Ok(())
}
fn status_to_string(status: TodoStatusArg) -> &'static str {
match status {
TodoStatusArg::Pending => "pending",
TodoStatusArg::InProgress => "in_progress",
TodoStatusArg::Completed => "completed",
TodoStatusArg::Blocked => "blocked",
}
}
fn load_items() -> Result<(PathBuf, Vec<TodoItem>)> {
let root = project_root();
let dir = root.join(".ai").join("todos");
let path = dir.join("todos.json");
if !path.exists() {
return Ok((path, Vec::new()));
}
let content = fs::read_to_string(&path)
.with_context(|| format!("failed to read {}", path.display()))?;
if content.trim().is_empty() {
return Ok((path, Vec::new()));
}
let items = serde_json::from_str(&content)
.with_context(|| format!("failed to parse {}", path.display()))?;
Ok((path, items))
}
fn save_items(path: &Path, items: &[TodoItem]) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let content = serde_json::to_string_pretty(items)?;
fs::write(path, content)?;
Ok(())
}
fn find_item_index(items: &[TodoItem], id: &str) -> Result<usize> {
let mut matches = Vec::new();
for (idx, item) in items.iter().enumerate() {
if item.id == id || item.id.starts_with(id) {
matches.push(idx);
}
}
match matches.len() {
0 => bail!("Todo '{}' not found", id),
1 => Ok(matches[0]),
_ => bail!("Todo id '{}' is ambiguous", id),
}
}
fn resolve_session_ref(session: Option<&str>, no_session: bool) -> Result<Option<String>> {
if no_session {
return Ok(None);
}
if let Some(session) = session {
let trimmed = session.trim();
return Ok(if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
});
}
let root = project_root();
match ai::get_latest_session_ref_for_path(&root)? {
Some(latest) => Ok(Some(latest)),
None => Ok(None),
}
}
fn project_root() -> PathBuf {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
if let Some(flow_path) = find_flow_toml(&cwd) {
return flow_path.parent().unwrap_or(&cwd).to_path_buf();
}
cwd
}
fn find_flow_toml(start: &PathBuf) -> Option<PathBuf> {
let mut current = start.clone();
loop {
let candidate = current.join("flow.toml");
if candidate.exists() {
return Some(candidate);
}
if !current.pop() {
return None;
}
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/log_server.rs | src/log_server.rs | use std::fs;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use std::sync::atomic::{AtomicI64, Ordering};
use std::time::Duration;
use anyhow::{Context, Result, bail};
use axum::{
Router,
extract::Query,
http::{Method, StatusCode},
response::{
IntoResponse, Json,
sse::{Event, KeepAlive, Sse},
},
routing::{get, post},
};
use futures::stream::{self, Stream, StreamExt};
use reqwest::blocking::Client;
use serde::Deserialize;
use serde_json::json;
use tower_http::cors::{Any, CorsLayer};
use crate::cli::{ServerAction, ServerOpts};
use crate::log_store::{self, LogEntry, LogQuery};
/// Run the flow HTTP server for log ingestion.
pub fn run(opts: ServerOpts) -> Result<()> {
let host = opts.host.clone();
let port = opts.port;
match opts.action {
Some(ServerAction::Stop) => stop_server(),
Some(ServerAction::Foreground) => run_foreground(&host, port),
None => ensure_server(&host, port),
}
}
/// Ensure server is running in background, start if not
fn ensure_server(host: &str, port: u16) -> Result<()> {
if server_healthy(host, port) {
println!("Flow server already running at http://{}:{}", host, port);
return Ok(());
}
// Kill stale process if exists
if let Some(pid) = load_server_pid()? {
if process_alive(pid) {
terminate_process(pid).ok();
}
remove_server_pid().ok();
}
// Start in background
let exe = std::env::current_exe().context("failed to get current exe")?;
let mut cmd = Command::new(exe);
cmd.arg("server")
.arg("--host")
.arg(host)
.arg("--port")
.arg(port.to_string())
.arg("foreground")
.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null());
let child = cmd.spawn().context("failed to start server process")?;
persist_server_pid(child.id())?;
// Wait for health
for _ in 0..20 {
std::thread::sleep(Duration::from_millis(100));
if server_healthy(host, port) {
println!("Flow server started at http://{}:{}", host, port);
return Ok(());
}
}
println!(
"Flow server starting at http://{}:{} (may take a moment)",
host, port
);
Ok(())
}
/// Run server in foreground (used by background process)
fn run_foreground(host: &str, port: u16) -> Result<()> {
// Initialize database and schema on startup
let conn = log_store::open_log_db().context("failed to initialize log database")?;
drop(conn);
let addr: SocketAddr = format!("{}:{}", host, port)
.parse()
.context("invalid host:port")?;
let rt = tokio::runtime::Runtime::new().context("failed to create tokio runtime")?;
rt.block_on(async {
let cors = CorsLayer::new()
.allow_origin(Any)
.allow_methods([Method::GET, Method::POST, Method::OPTIONS])
.allow_headers(Any);
let router = Router::new()
.route("/health", get(health))
.route("/logs/ingest", post(logs_ingest))
.route("/logs/query", get(logs_query))
.route("/logs/errors/stream", get(logs_errors_stream))
.layer(cors);
let listener = tokio::net::TcpListener::bind(addr)
.await
.context("failed to bind server")?;
axum::serve(listener, router)
.await
.context("server error")?;
Ok(())
})
}
fn stop_server() -> Result<()> {
if let Some(pid) = load_server_pid()? {
terminate_process(pid).ok();
remove_server_pid().ok();
println!("Flow server stopped");
} else {
println!("Flow server not running");
}
Ok(())
}
fn server_pid_path() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow/server.pid")
}
fn load_server_pid() -> Result<Option<u32>> {
let path = server_pid_path();
if !path.exists() {
return Ok(None);
}
let contents = fs::read_to_string(&path)?;
let pid: u32 = contents.trim().parse().unwrap_or(0);
Ok(if pid == 0 { None } else { Some(pid) })
}
fn persist_server_pid(pid: u32) -> Result<()> {
let path = server_pid_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&path, pid.to_string())?;
Ok(())
}
fn remove_server_pid() -> Result<()> {
let path = server_pid_path();
if path.exists() {
fs::remove_file(path).ok();
}
Ok(())
}
fn server_healthy(host: &str, port: u16) -> bool {
let url = format!("http://{}:{}/health", host, port);
Client::builder()
.timeout(Duration::from_millis(500))
.build()
.ok()
.and_then(|c| c.get(&url).send().ok())
.map(|r| r.status().is_success())
.unwrap_or(false)
}
fn process_alive(pid: u32) -> bool {
Command::new("kill")
.arg("-0")
.arg(pid.to_string())
.status()
.map(|s| s.success())
.unwrap_or(false)
}
fn terminate_process(pid: u32) -> Result<()> {
let status = Command::new("kill")
.arg(pid.to_string())
.status()
.context("failed to kill process")?;
if status.success() {
Ok(())
} else {
bail!("kill failed")
}
}
async fn health() -> impl IntoResponse {
Json(json!({ "status": "ok" }))
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum IngestRequest {
Single(LogEntry),
Batch(Vec<LogEntry>),
}
async fn logs_ingest(Json(payload): Json<IngestRequest>) -> impl IntoResponse {
let result = tokio::task::spawn_blocking(move || {
let mut conn = match log_store::open_log_db() {
Ok(c) => c,
Err(e) => return Err(e),
};
match payload {
IngestRequest::Single(entry) => {
let id = log_store::insert_log(&conn, &entry)?;
Ok(json!({ "inserted": 1, "ids": [id] }))
}
IngestRequest::Batch(entries) => {
let ids = log_store::insert_logs(&mut conn, &entries)?;
Ok(json!({ "inserted": ids.len(), "ids": ids }))
}
}
})
.await;
match result {
Ok(Ok(response)) => (StatusCode::OK, Json(response)).into_response(),
Ok(Err(err)) => {
tracing::error!(?err, "log ingest failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": err.to_string() })),
)
.into_response()
}
Err(err) => {
tracing::error!(?err, "log ingest task panicked");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "internal error" })),
)
.into_response()
}
}
}
async fn logs_query(Query(query): Query<LogQuery>) -> impl IntoResponse {
let result = tokio::task::spawn_blocking(move || {
let conn = log_store::open_log_db()?;
log_store::query_logs(&conn, &query)
})
.await;
match result {
Ok(Ok(entries)) => (StatusCode::OK, Json(entries)).into_response(),
Ok(Err(err)) => {
tracing::error!(?err, "log query failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": err.to_string() })),
)
.into_response()
}
Err(err) => {
tracing::error!(?err, "log query task panicked");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "internal error" })),
)
.into_response()
}
}
}
/// SSE stream of error logs - polls DB and emits new errors
async fn logs_errors_stream() -> Sse<impl Stream<Item = Result<Event, std::convert::Infallible>>> {
let last_id = Arc::new(AtomicI64::new(0));
// Get current max ID to start from
if let Ok(conn) = log_store::open_log_db() {
if let Ok(entries) = log_store::query_logs(
&conn,
&LogQuery {
log_type: Some("error".to_string()),
limit: 1,
..Default::default()
},
) {
if let Some(entry) = entries.first() {
last_id.store(entry.id, Ordering::SeqCst);
}
}
}
let stream = stream::unfold(last_id, |last_id| async move {
tokio::time::sleep(Duration::from_millis(500)).await;
let current_last = last_id.load(Ordering::SeqCst);
let new_errors = tokio::task::spawn_blocking(move || {
let conn = match log_store::open_log_db() {
Ok(c) => c,
Err(_) => return Vec::new(),
};
log_store::query_logs(
&conn,
&LogQuery {
log_type: Some("error".to_string()),
limit: 100,
..Default::default()
},
)
.unwrap_or_default()
.into_iter()
.filter(|e| e.id > current_last)
.collect::<Vec<_>>()
})
.await
.unwrap_or_default();
let events: Vec<Result<Event, std::convert::Infallible>> = new_errors
.into_iter()
.map(|entry| {
last_id.store(
entry.id.max(last_id.load(Ordering::SeqCst)),
Ordering::SeqCst,
);
let data = serde_json::to_string(&entry).unwrap_or_default();
Ok(Event::default().data(data))
})
.collect();
Some((stream::iter(events), last_id))
})
.flatten();
Sse::new(stream).keep_alive(KeepAlive::default())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/storage.rs | src/storage.rs | use std::io::Read;
use std::path::PathBuf;
use std::process::{Command, Output, Stdio};
use std::thread;
use std::time::{Duration, Instant};
use anyhow::{Context, Result, bail};
use serde::Deserialize;
use crate::cli::{JazzStorageAction, JazzStorageKind, StorageAction, StorageCommand};
use crate::{config, env};
const DEFAULT_JAZZ_API_KEY_MIRROR: &str = "jazz-gitedit-prod";
const DEFAULT_JAZZ_PEER_MIRROR: &str = "wss://cloud.jazz.tools/?key=jazz-gitedit-prod";
const DEFAULT_JAZZ_API_KEY_ENV: &str = "1focus@1focus.app";
const DEFAULT_JAZZ_PEER_ENV: &str = "wss://cloud.jazz.tools/?key=1focus@1focus.app";
#[derive(Debug, Deserialize)]
pub(crate) struct JazzCreateOutput {
#[serde(rename = "accountID")]
pub(crate) account_id: String,
#[serde(rename = "agentSecret")]
pub(crate) agent_secret: String,
}
pub fn run(cmd: StorageCommand) -> Result<()> {
match cmd.action {
StorageAction::Jazz(jazz) => run_jazz(jazz.action),
}
}
fn run_jazz(action: JazzStorageAction) -> Result<()> {
match action {
JazzStorageAction::New {
kind,
name,
peer,
api_key,
environment,
} => jazz_new(kind, name, peer, api_key, &environment),
}
}
fn jazz_new(
kind: JazzStorageKind,
name: Option<String>,
peer: Option<String>,
api_key: Option<String>,
environment: &str,
) -> Result<()> {
let project = get_project_name()?;
let default_name = match kind {
JazzStorageKind::Mirror => format!("{}-jazz-mirror", sanitize_name(&project)),
JazzStorageKind::EnvStore => format!("{}-jazz-env", sanitize_name(&project)),
};
let name = name.unwrap_or(default_name);
let default_peer = match kind {
JazzStorageKind::Mirror => DEFAULT_JAZZ_PEER_MIRROR,
JazzStorageKind::EnvStore => DEFAULT_JAZZ_PEER_ENV,
};
let peer = match (peer, api_key.as_deref()) {
(Some(peer), _) => peer,
(None, Some(key)) => format!("wss://cloud.jazz.tools/?key={}", key),
(None, None) => default_peer.to_string(),
};
let creds = create_jazz_worker_account(&peer, &name)?;
match kind {
JazzStorageKind::Mirror => {
env::set_project_env_var(
"JAZZ_MIRROR_ACCOUNT_ID",
&creds.account_id,
environment,
Some("Jazz mirror worker account ID"),
)?;
env::set_project_env_var(
"JAZZ_MIRROR_ACCOUNT_SECRET",
&creds.agent_secret,
environment,
Some("Jazz mirror worker account secret"),
)?;
}
JazzStorageKind::EnvStore => {
env::set_project_env_var(
"JAZZ_WORKER_ACCOUNT",
&creds.account_id,
environment,
Some("Jazz worker account ID"),
)?;
env::set_project_env_var(
"JAZZ_WORKER_SECRET",
&creds.agent_secret,
environment,
Some("Jazz worker account secret"),
)?;
}
}
if api_key.is_some() {
let key = api_key.unwrap_or_else(|| match kind {
JazzStorageKind::Mirror => DEFAULT_JAZZ_API_KEY_MIRROR.to_string(),
JazzStorageKind::EnvStore => DEFAULT_JAZZ_API_KEY_ENV.to_string(),
});
env::set_project_env_var(
"JAZZ_API_KEY",
&key,
environment,
Some("Jazz API key for cloud sync"),
)?;
}
if peer != default_peer {
let (key, desc) = match kind {
JazzStorageKind::Mirror => (
"JAZZ_MIRROR_SYNC_SERVER",
"Custom Jazz sync server for mirror worker",
),
JazzStorageKind::EnvStore => (
"JAZZ_SYNC_SERVER",
"Custom Jazz sync server for env worker",
),
};
env::set_project_env_var(key, &peer, environment, Some(desc))?;
}
println!("✓ Jazz storage initialized for {}", project);
Ok(())
}
pub(crate) fn create_jazz_worker_account(peer: &str, name: &str) -> Result<JazzCreateOutput> {
let redacted_peer = redact_peer(peer);
println!(
"Creating Jazz worker account '{}' via {} (this can take a minute)...",
name, redacted_peer
);
let output = if let Some(path) = find_in_path("jazz-run") {
println!(
"Running: {} account create --peer {} --name {} --json",
path.display(),
redacted_peer,
name
);
{
let mut cmd = Command::new(path);
cmd.args([
"account",
"create",
"--peer",
peer,
"--name",
name,
"--json",
]);
run_command_with_output(cmd)
}
.context("failed to spawn jazz-run")?
} else {
println!(
"Running: npx --yes jazz-run account create --peer {} --name {} --json",
redacted_peer, name
);
{
let mut cmd = Command::new("npx");
cmd.args([
"--yes",
"jazz-run",
"account",
"create",
"--peer",
peer,
"--name",
name,
"--json",
]);
run_command_with_output(cmd)
}
.context("failed to spawn npx")?
};
if !output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
bail!(
"jazz account create failed: {}{}",
stdout.trim(),
stderr.trim()
);
}
let stdout = String::from_utf8_lossy(&output.stdout);
let json = extract_json_object(&stdout)
.ok_or_else(|| anyhow::anyhow!("jazz-run did not return JSON output"))?;
let creds: JazzCreateOutput =
serde_json::from_str(json).context("failed to parse jazz-run JSON output")?;
Ok(creds)
}
fn run_command_with_output(mut cmd: Command) -> Result<Output> {
let mut child = cmd
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let mut stdout = child
.stdout
.take()
.ok_or_else(|| anyhow::anyhow!("failed to capture stdout"))?;
let mut stderr = child
.stderr
.take()
.ok_or_else(|| anyhow::anyhow!("failed to capture stderr"))?;
let stdout_handle = thread::spawn(move || {
let mut buf = Vec::new();
let _ = stdout.read_to_end(&mut buf);
buf
});
let stderr_handle = thread::spawn(move || {
let mut buf = Vec::new();
let _ = stderr.read_to_end(&mut buf);
buf
});
let start = Instant::now();
let mut next_log = Duration::from_secs(10);
let status = loop {
if let Some(status) = child.try_wait()? {
break status;
}
let elapsed = start.elapsed();
if elapsed >= next_log {
println!(
"... still creating Jazz worker account ({}s)",
elapsed.as_secs()
);
next_log += Duration::from_secs(10);
}
thread::sleep(Duration::from_millis(200));
};
let stdout = stdout_handle.join().unwrap_or_default();
let stderr = stderr_handle.join().unwrap_or_default();
Ok(Output {
status,
stdout,
stderr,
})
}
fn redact_peer(peer: &str) -> String {
if let Some(idx) = peer.find("key=") {
let start = idx + 4;
let end = peer[start..]
.find('&')
.map(|offset| start + offset)
.unwrap_or(peer.len());
let mut redacted = peer.to_string();
if start < end && end <= redacted.len() {
redacted.replace_range(start..end, "***");
}
return redacted;
}
peer.to_string()
}
fn extract_json_object(output: &str) -> Option<&str> {
let start = output.find('{')?;
let end = output.rfind('}')?;
if end <= start {
return None;
}
Some(output[start..=end].trim())
}
fn find_in_path(binary: &str) -> Option<PathBuf> {
let path = std::env::var_os("PATH")?;
for dir in std::env::split_paths(&path) {
let candidate = dir.join(binary);
if candidate.is_file() {
return Some(candidate);
}
}
None
}
pub(crate) fn sanitize_name(name: &str) -> String {
let mut out = String::new();
let mut last_dash = false;
for ch in name.chars() {
let ch = ch.to_ascii_lowercase();
if ch.is_ascii_alphanumeric() {
out.push(ch);
last_dash = false;
} else if !last_dash {
out.push('-');
last_dash = true;
}
}
let trimmed = out.trim_matches('-').to_string();
if trimmed.is_empty() {
"flow-jazz-mirror".to_string()
} else {
trimmed
}
}
pub(crate) fn get_project_name() -> Result<String> {
let cwd = std::env::current_dir()?;
let flow_toml = cwd.join("flow.toml");
if flow_toml.exists() {
if let Ok(cfg) = config::load(&flow_toml) {
if let Some(name) = cfg.project_name {
return Ok(name);
}
}
}
Ok(cwd
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("flow")
.to_string())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/watchers.rs | src/watchers.rs | use std::{
path::{Path, PathBuf},
process::Command,
sync::mpsc::{self, Receiver, Sender},
thread,
time::{Duration, Instant},
};
use anyhow::{Context, Result};
use notify::RecursiveMode;
use notify_debouncer_mini::{DebouncedEvent, new_debouncer};
use crate::config::{WatcherConfig, WatcherDriver, expand_path};
pub struct WatchManager {
handles: Vec<WatcherHandle>,
}
impl WatchManager {
pub fn start(configs: &[WatcherConfig]) -> Result<Option<Self>> {
if configs.is_empty() {
return Ok(None);
}
let mut handles = Vec::new();
for cfg in configs.iter().cloned() {
match WatcherHandle::spawn(cfg) {
Ok(handle) => handles.push(handle),
Err(err) => {
tracing::error!(?err, "failed to start watcher");
}
}
}
if handles.is_empty() {
Ok(None)
} else {
Ok(Some(Self { handles }))
}
}
}
impl Drop for WatchManager {
fn drop(&mut self) {
self.handles.clear();
}
}
pub struct WatcherHandle {
shutdown: Option<Sender<()>>,
join: Option<thread::JoinHandle<()>>,
}
impl WatcherHandle {
fn spawn(cfg: WatcherConfig) -> Result<Self> {
match cfg.driver {
WatcherDriver::Shell => Self::spawn_shell(cfg),
WatcherDriver::Poltergeist => Self::spawn_poltergeist(cfg),
}
}
fn spawn_shell(cfg: WatcherConfig) -> Result<Self> {
let (shutdown_tx, shutdown_rx) = mpsc::channel();
let handle = thread::spawn(move || {
if let Err(err) = run_shell_watcher(cfg, shutdown_rx) {
tracing::error!(?err, "watcher exited with error");
}
});
Ok(Self {
shutdown: Some(shutdown_tx),
join: Some(handle),
})
}
fn spawn_poltergeist(cfg: WatcherConfig) -> Result<Self> {
let (shutdown_tx, shutdown_rx) = mpsc::channel();
let handle = thread::spawn(move || {
if let Err(err) = run_poltergeist_watcher(cfg, shutdown_rx) {
tracing::error!(?err, "poltergeist watcher exited with error");
}
});
Ok(Self {
shutdown: Some(shutdown_tx),
join: Some(handle),
})
}
}
impl Drop for WatcherHandle {
fn drop(&mut self) {
if let Some(tx) = self.shutdown.take() {
let _ = tx.send(());
}
if let Some(handle) = self.join.take() {
let _ = handle.join();
}
}
}
fn run_shell_watcher(cfg: WatcherConfig, shutdown: Receiver<()>) -> Result<()> {
let watch_path = expand_path(&cfg.path);
if !watch_path.exists() {
anyhow::bail!(
"watch path {} does not exist (watcher {})",
watch_path.display(),
cfg.name
);
}
let workdir = if watch_path.is_dir() {
watch_path.clone()
} else {
watch_path
.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from("."))
};
if cfg.run_on_start {
run_command(&cfg, &workdir);
}
let debounce = Duration::from_millis(cfg.debounce_ms.max(50));
let (event_tx, event_rx) = mpsc::channel();
let mut debouncer =
new_debouncer(debounce, event_tx).context("failed to initialize file watcher")?;
debouncer
.watcher()
.watch(&watch_path, RecursiveMode::Recursive)
.with_context(|| format!("failed to watch path {}", watch_path.display()))?;
tracing::info!(
name = cfg.name,
path = %watch_path.display(),
"watcher started"
);
loop {
if shutdown.try_recv().is_ok() {
break;
}
match event_rx.recv_timeout(Duration::from_millis(200)) {
Ok(Ok(events)) => {
if matches_filter(&events, cfg.filter.as_deref()) {
run_command(&cfg, &workdir);
}
}
Ok(Err(err)) => {
tracing::warn!(?err, watcher = cfg.name, "watcher error");
}
Err(mpsc::RecvTimeoutError::Timeout) => {}
Err(mpsc::RecvTimeoutError::Disconnected) => break,
}
}
tracing::info!(name = cfg.name, "watcher stopped");
Ok(())
}
fn matches_filter(events: &[DebouncedEvent], filter: Option<&str>) -> bool {
match filter {
None => true,
Some(target) => events.iter().any(|event| {
event
.path
.file_name()
.and_then(|name| name.to_str())
.map(|name| name == target || name.contains(target))
.unwrap_or(false)
}),
}
}
fn run_poltergeist_watcher(cfg: WatcherConfig, shutdown: Receiver<()>) -> Result<()> {
let watch_path = expand_path(&cfg.path);
if !watch_path.exists() {
anyhow::bail!(
"watch path {} does not exist (watcher {})",
watch_path.display(),
cfg.name
);
}
let workdir = if watch_path.is_dir() {
watch_path.clone()
} else {
watch_path
.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from("."))
};
let poltergeist = cfg.poltergeist.clone().unwrap_or_default();
tracing::info!(
name = cfg.name,
path = %workdir.display(),
mode = %poltergeist.mode.as_subcommand(),
binary = %poltergeist.binary,
"starting poltergeist watcher"
);
let mut command = Command::new(&poltergeist.binary);
command.arg(poltergeist.mode.as_subcommand());
if !poltergeist.args.is_empty() {
command.args(&poltergeist.args);
}
command.current_dir(&workdir);
command.envs(cfg.env.iter().map(|(k, v)| (k, v)));
command.stdout(std::process::Stdio::inherit());
command.stderr(std::process::Stdio::inherit());
let mut child = command
.spawn()
.with_context(|| format!("failed to launch poltergeist for {}", cfg.name))?;
loop {
if shutdown.try_recv().is_ok() {
tracing::info!(name = cfg.name, "stopping poltergeist watcher");
if let Err(err) = child.kill() {
tracing::warn!(
?err,
watcher = cfg.name,
"failed to kill poltergeist process"
);
}
let _ = child.wait();
break;
}
match child.try_wait() {
Ok(Some(status)) => {
if status.success() {
tracing::info!(name = cfg.name, ?status, "poltergeist watcher exited");
} else {
tracing::warn!(
name = cfg.name,
?status,
"poltergeist watcher exited with error"
);
}
break;
}
Ok(None) => {
thread::sleep(Duration::from_millis(500));
}
Err(err) => {
tracing::error!(
?err,
name = cfg.name,
"failed to query poltergeist watcher status"
);
break;
}
}
}
Ok(())
}
fn run_command(cfg: &WatcherConfig, workdir: &Path) {
let Some(command) = cfg
.command
.as_deref()
.map(str::trim)
.filter(|cmd| !cmd.is_empty())
else {
tracing::warn!(name = cfg.name, "watcher missing command; skipping");
return;
};
tracing::info!(
name = cfg.name,
command = command,
"running watcher command"
);
let start = Instant::now();
let mut cmd = Command::new("/bin/sh");
cmd.arg("-c").arg(command).current_dir(workdir);
cmd.envs(cfg.env.iter().map(|(k, v)| (k, v)));
cmd.stdout(std::process::Stdio::null());
cmd.stderr(std::process::Stdio::piped());
match cmd.spawn() {
Ok(mut child) => {
let _ = child.wait();
tracing::info!(name = cfg.name, ?workdir, elapsed = ?start.elapsed(), "watcher command finished");
}
Err(err) => {
tracing::error!(?err, name = cfg.name, "failed to execute watcher command");
}
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/deploy.rs | src/deploy.rs | //! Deploy projects to hosts and cloud platforms.
//!
//! Supports:
//! - Linux hosts via SSH (with systemd + nginx)
//! - Cloudflare Workers
//! - Railway
use std::collections::{HashMap, HashSet};
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use anyhow::{Context, Result, bail};
use serde::{Deserialize, Serialize};
use crate::cli::{DeployAction, DeployCommand, EnvAction};
use crate::config::Config;
use crate::deploy_setup::{
CloudflareSetupDefaults,
CloudflareSetupResult,
discover_wrangler_configs,
run_cloudflare_setup,
};
use crate::env::parse_env_file;
/// Host configuration stored globally at ~/.config/flow/deploy.json
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct DeployConfig {
/// SSH user@host:port for linux host deployments.
pub host: Option<HostConnection>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HostConnection {
pub user: String,
pub host: String,
pub port: u16,
}
impl HostConnection {
/// Parse connection string like "user@host:port" or "user@host".
pub fn parse(s: &str) -> Result<Self> {
let (user_host, port) = if let Some((uh, p)) = s.rsplit_once(':') {
(uh, p.parse::<u16>().unwrap_or(22))
} else {
(s, 22)
};
let (user, host) = user_host
.split_once('@')
.context("connection string must be user@host[:port]")?;
Ok(Self {
user: user.to_string(),
host: host.to_string(),
port,
})
}
/// Format as user@host for SSH commands.
pub fn ssh_target(&self) -> String {
format!("{}@{}", self.user, self.host)
}
}
/// Host deployment config from flow.toml [host] section.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct HostConfig {
/// Remote destination path (e.g., /opt/myapp).
pub dest: Option<String>,
/// Setup script to run after syncing.
pub setup: Option<String>,
/// Command to run the service.
pub run: Option<String>,
/// Port the service listens on.
pub port: Option<u16>,
/// Systemd service name.
pub service: Option<String>,
/// Path to .env file for secrets.
pub env_file: Option<String>,
/// Public domain for nginx.
pub domain: Option<String>,
/// Enable SSL via Let's Encrypt.
#[serde(default)]
pub ssl: bool,
}
/// Cloudflare deployment config from flow.toml [cloudflare] section.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct CloudflareConfig {
/// Path to worker directory (relative to project root).
pub path: Option<String>,
/// Path to .env file for secrets.
pub env_file: Option<String>,
/// Env source for secrets ("1focus" or "file").
pub env_source: Option<String>,
/// Specific env keys to fetch when env_source = "1focus".
#[serde(default)]
pub env_keys: Vec<String>,
/// Env keys to set as non-secret vars when env_source = "1focus".
#[serde(default)]
pub env_vars: Vec<String>,
/// Default values for env vars (key/value).
#[serde(default)]
pub env_defaults: HashMap<String, String>,
/// Secret keys to bootstrap directly in Cloudflare.
#[serde(default)]
pub bootstrap_secrets: Vec<String>,
/// Optional Jazz sync peer for bootstrap (env store).
pub bootstrap_jazz_peer: Option<String>,
/// Optional Jazz worker account name for bootstrap (env store).
pub bootstrap_jazz_name: Option<String>,
/// Optional Jazz sync peer for bootstrap (auth store).
pub bootstrap_jazz_auth_peer: Option<String>,
/// Optional Jazz worker account name for bootstrap (auth store).
pub bootstrap_jazz_auth_name: Option<String>,
/// Env apply mode: "always", "auto", or "never".
pub env_apply: Option<String>,
/// Wrangler environment name (e.g., staging).
#[serde(default, alias = "env")]
pub environment: Option<String>,
/// Custom deploy command.
pub deploy: Option<String>,
/// Custom dev command.
pub dev: Option<String>,
/// URL for health checks (e.g., https://my-worker.workers.dev).
pub url: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum EnvApplyMode {
Always,
Auto,
Never,
}
fn env_apply_mode_from_str(value: Option<&str>) -> EnvApplyMode {
match value.map(|s| s.to_ascii_lowercase()) {
Some(ref v) if v == "always" => EnvApplyMode::Always,
Some(ref v) if v == "auto" => EnvApplyMode::Auto,
Some(ref v) if v == "never" => EnvApplyMode::Never,
_ => EnvApplyMode::Never,
}
}
fn is_tls_connect_error(err: &anyhow::Error) -> bool {
let msg = format!("{err:#}");
msg.contains("certificate was not trusted")
|| msg.contains("client error (Connect)")
|| msg.contains("failed to connect to 1focus")
}
/// Railway deployment config from flow.toml [railway] section.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct RailwayConfig {
/// Railway project ID.
pub project: Option<String>,
/// Service name.
pub service: Option<String>,
/// Environment (production, staging).
pub environment: Option<String>,
/// Start command.
pub start: Option<String>,
/// Path to .env file.
pub env_file: Option<String>,
}
/// Get the deploy config file path.
fn deploy_config_path() -> PathBuf {
dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("flow")
.join("deploy.json")
}
/// Load global deploy config.
pub fn load_deploy_config() -> Result<DeployConfig> {
let path = deploy_config_path();
if path.exists() {
let content = fs::read_to_string(&path)?;
Ok(serde_json::from_str(&content).unwrap_or_default())
} else {
Ok(DeployConfig::default())
}
}
/// Save global deploy config.
pub fn save_deploy_config(config: &DeployConfig) -> Result<()> {
let path = deploy_config_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let content = serde_json::to_string_pretty(config)?;
fs::write(&path, content)?;
Ok(())
}
/// Run the deploy command.
pub fn run(cmd: DeployCommand) -> Result<()> {
let project_root = std::env::current_dir()?;
let config_path = project_root.join("flow.toml");
let flow_config = if config_path.exists() {
Some(crate::config::load(&config_path)?)
} else {
None
};
match cmd.action {
None => {
// Auto-detect platform from flow.toml
auto_deploy(&project_root, flow_config.as_ref())
}
Some(DeployAction::Host { remote_build, setup }) => {
deploy_host(&project_root, flow_config.as_ref(), remote_build, setup)
}
Some(DeployAction::Cloudflare { secrets, dev }) => {
deploy_cloudflare(&project_root, flow_config.as_ref(), secrets, dev)
}
Some(DeployAction::Setup) => setup_cloudflare(&project_root, flow_config.as_ref()),
Some(DeployAction::Railway) => deploy_railway(&project_root, flow_config.as_ref()),
Some(DeployAction::Status) => show_status(&project_root, flow_config.as_ref()),
Some(DeployAction::Logs { follow, lines }) => {
show_logs(&project_root, flow_config.as_ref(), follow, lines)
}
Some(DeployAction::Restart) => restart_service(&project_root, flow_config.as_ref()),
Some(DeployAction::Stop) => stop_service(&project_root, flow_config.as_ref()),
Some(DeployAction::Shell) => open_shell(),
Some(DeployAction::SetHost { connection }) => set_host(&connection),
Some(DeployAction::ShowHost) => show_host(),
Some(DeployAction::Health { url, status }) => {
check_health(&project_root, flow_config.as_ref(), url, status)
}
}
}
/// Auto-detect platform and deploy.
fn auto_deploy(project_root: &Path, config: Option<&Config>) -> Result<()> {
let config = config.context("No flow.toml found. Run 'f init' first.")?;
// Check which platform configs exist
if config.host.is_some() {
println!("Detected [host] config, deploying to Linux host...");
return deploy_host(project_root, Some(config), false, false);
}
if config.cloudflare.is_some() {
println!("Detected [cloudflare] config, deploying to Cloudflare...");
return deploy_cloudflare(project_root, Some(config), false, false);
}
if config.railway.is_some() {
println!("Detected [railway] config, deploying to Railway...");
return deploy_railway(project_root, Some(config));
}
bail!(
"No deployment config found in flow.toml.\n\n\
Add one of:\n\
[host]\n\
dest = \"/opt/myapp\"\n\
run = \"./server\"\n\n\
[cloudflare]\n\
path = \"worker\"\n\n\
[railway]\n\
project = \"my-project\"\n\n\
Or run:\n\
f deploy setup"
);
}
/// Deploy to a Linux host via SSH.
fn deploy_host(
project_root: &Path,
config: Option<&Config>,
_remote_build: bool,
force_setup: bool,
) -> Result<()> {
let deploy_config = load_deploy_config()?;
let conn = deploy_config
.host
.as_ref()
.context("No host configured. Run: f deploy set-host user@host:port")?;
let host_cfg = config
.and_then(|c| c.host.as_ref())
.context("No [host] section in flow.toml")?;
let dest = host_cfg.dest.as_deref().unwrap_or("/opt/app");
let service_name = host_cfg
.service
.as_deref()
.unwrap_or_else(|| project_root.file_name().unwrap().to_str().unwrap());
println!("Deploying to {}:{}", conn.ssh_target(), dest);
// 1. Sync files via rsync
println!("\n==> Syncing files...");
rsync_upload(project_root, conn, dest)?;
// 2. Copy env file if specified
if let Some(env_file) = &host_cfg.env_file {
let local_env = project_root.join(env_file);
if local_env.exists() {
println!("==> Copying {}...", env_file);
let remote_env = format!("{}/.env", dest);
scp_file(&local_env, conn, &remote_env)?;
}
}
// 3. Run setup script if needed
if let Some(setup) = &host_cfg.setup {
if force_setup || !service_exists(conn, service_name)? {
println!("==> Running setup...");
ssh_run(conn, &format!("cd {} && {}", dest, setup))?;
}
}
// 4. Create/update systemd service
if let Some(run_cmd) = &host_cfg.run {
println!("==> Configuring systemd service: {}", service_name);
create_systemd_service(conn, service_name, dest, run_cmd, host_cfg)?;
}
// 5. Configure nginx if domain specified
if let Some(domain) = &host_cfg.domain {
if let Some(port) = host_cfg.port {
println!("==> Configuring nginx for {}", domain);
setup_nginx(conn, domain, port, host_cfg.ssl)?;
}
}
// 6. Restart service
println!("==> Starting service...");
ssh_run(conn, &format!("systemctl restart {}", service_name))?;
println!("\n✓ Deployed successfully!");
if let Some(domain) = &host_cfg.domain {
let scheme = if host_cfg.ssl { "https" } else { "http" };
println!(" URL: {}://{}", scheme, domain);
}
Ok(())
}
/// Deploy to Cloudflare Workers.
fn deploy_cloudflare(
project_root: &Path,
config: Option<&Config>,
set_secrets: bool,
dev_mode: bool,
) -> Result<()> {
let default_cf = CloudflareConfig::default();
let cf_cfg = config
.and_then(|c| c.cloudflare.as_ref())
.unwrap_or(&default_cf);
let worker_path = cf_cfg
.path
.as_ref()
.map(|p| project_root.join(p))
.unwrap_or_else(|| project_root.to_path_buf());
ensure_wrangler_config(&worker_path)?;
let env_name = cf_cfg.environment.as_deref();
let env_apply_mode = if set_secrets {
EnvApplyMode::Always
} else {
env_apply_mode_from_str(cf_cfg.env_apply.as_deref())
};
let should_apply = matches!(env_apply_mode, EnvApplyMode::Always | EnvApplyMode::Auto);
let use_1focus = is_1focus_source(cf_cfg.env_source.as_deref());
let onefocus_env = env_name.unwrap_or("production");
let mut onefocus_vars: HashMap<String, String> = HashMap::new();
let mut onefocus_loaded = false;
if use_1focus {
let keys = collect_cloudflare_env_keys(cf_cfg);
if !cf_cfg.env_defaults.is_empty() {
for key in &keys {
if let Some(value) = cf_cfg.env_defaults.get(key) {
if !value.trim().is_empty() {
onefocus_vars.insert(key.clone(), value.clone());
}
}
}
}
if !keys.is_empty() {
match crate::env::fetch_project_env_vars(onefocus_env, &keys) {
Ok(vars) => {
if !vars.is_empty() {
onefocus_loaded = true;
}
onefocus_vars.extend(vars);
}
Err(err) => {
if env_apply_mode == EnvApplyMode::Auto {
if is_tls_connect_error(&err) {
eprintln!(
"⚠ Unable to reach 1focus (TLS/connect). Skipping env sync for now."
);
} else {
eprintln!("⚠ Env sync skipped: {err}");
}
} else if env_apply_mode == EnvApplyMode::Always {
eprintln!("⚠ Env sync skipped: {err}");
} else {
eprintln!("⚠ Env sync skipped: {err}");
}
}
}
}
}
if should_apply {
if use_1focus {
if onefocus_loaded {
apply_cloudflare_env_map(project_root, cf_cfg, &onefocus_vars)?;
} else if env_apply_mode == EnvApplyMode::Always {
eprintln!(
"⚠ No env vars found in 1focus for environment '{}' (using defaults only).",
onefocus_env
);
}
} else if let Some(env_file) = &cf_cfg.env_file {
let env_path = project_root.join(env_file);
if env_path.exists() {
println!("==> Setting secrets from {}...", env_file);
set_wrangler_secrets(&worker_path, &env_path, env_name, None)?;
}
}
}
// Deploy or dev
let cmd = if dev_mode {
cf_cfg.dev.as_deref().unwrap_or("wrangler dev")
} else {
cf_cfg.deploy.as_deref().unwrap_or("wrangler deploy")
};
let cmd = append_env_arg(cmd, env_name);
println!("==> Running: {}", cmd);
let mut deploy_cmd = Command::new("sh");
deploy_cmd
.arg("-c")
.arg(cmd)
.current_dir(&worker_path)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if use_1focus && !onefocus_vars.is_empty() {
deploy_cmd.envs(&onefocus_vars);
}
let status = deploy_cmd.status()?;
if !status.success() {
bail!("Cloudflare deployment failed");
}
println!("\n✓ Deployed to Cloudflare!");
Ok(())
}
pub fn apply_cloudflare_env(project_root: &Path, config: Option<&Config>) -> Result<()> {
let cf_cfg = config
.and_then(|c| c.cloudflare.as_ref())
.context("No [cloudflare] section in flow.toml")?;
apply_cloudflare_env_from_config(project_root, cf_cfg)
}
pub fn set_cloudflare_secrets(
project_root: &Path,
config: Option<&Config>,
secrets: &HashMap<String, String>,
) -> Result<()> {
let cf_cfg = config
.and_then(|c| c.cloudflare.as_ref())
.context("No [cloudflare] section in flow.toml")?;
let worker_path = cf_cfg
.path
.as_ref()
.map(|p| project_root.join(p))
.unwrap_or_else(|| project_root.to_path_buf());
ensure_wrangler_config(&worker_path)?;
let env_name = cf_cfg.environment.as_deref();
let mut keys: Vec<_> = secrets.keys().cloned().collect();
keys.sort();
for key in keys {
if let Some(value) = secrets.get(&key) {
println!(" Setting secret {}...", key);
set_wrangler_secret_value(&worker_path, env_name, &key, value)?;
}
}
Ok(())
}
fn apply_cloudflare_env_from_config(project_root: &Path, cf_cfg: &CloudflareConfig) -> Result<()> {
if !is_1focus_source(cf_cfg.env_source.as_deref()) {
bail!("cloudflare.env_source must be set to \"1focus\" to apply envs");
}
let onefocus_env = cf_cfg
.environment
.as_deref()
.unwrap_or("production");
let keys = collect_cloudflare_env_keys(cf_cfg);
let vars = crate::env::fetch_project_env_vars(onefocus_env, &keys)?;
if vars.is_empty() {
bail!("No env vars found in 1focus for environment '{}'", onefocus_env);
}
apply_cloudflare_env_map(project_root, cf_cfg, &vars)?;
Ok(())
}
fn collect_cloudflare_env_keys(cf_cfg: &CloudflareConfig) -> Vec<String> {
let mut keys = Vec::new();
let mut seen = HashSet::new();
for key in cf_cfg.env_keys.iter().chain(cf_cfg.env_vars.iter()) {
if seen.insert(key.clone()) {
keys.push(key.clone());
}
}
keys
}
fn apply_cloudflare_env_map(
project_root: &Path,
cf_cfg: &CloudflareConfig,
vars: &HashMap<String, String>,
) -> Result<()> {
let worker_path = cf_cfg
.path
.as_ref()
.map(|p| project_root.join(p))
.unwrap_or_else(|| project_root.to_path_buf());
ensure_wrangler_config(&worker_path)?;
let wrangler_env = cf_cfg.environment.as_deref();
let var_keys: HashSet<String> = cf_cfg.env_vars.iter().cloned().collect();
println!("==> Applying {} env var(s) from 1focus...", vars.len());
set_wrangler_env_map(&worker_path, wrangler_env, vars, &var_keys)?;
Ok(())
}
fn ensure_wrangler_config(worker_path: &Path) -> Result<()> {
let has_wrangler = worker_path.join("wrangler.toml").exists()
|| worker_path.join("wrangler.jsonc").exists()
|| worker_path.join("wrangler.json").exists();
if !has_wrangler {
bail!(
"No wrangler config found in {}.\n\
Create a wrangler.toml or run: npx wrangler init",
worker_path.display()
);
}
Ok(())
}
fn wrangler_command(worker_path: &Path) -> Command {
let local_bin = worker_path.join("node_modules").join(".bin").join("wrangler");
let mut cmd = if local_bin.exists() {
Command::new(local_bin)
} else if worker_path.join("package.json").exists() {
let mut cmd = Command::new("pnpm");
cmd.args(["exec", "wrangler"]);
cmd
} else {
Command::new("wrangler")
};
cmd.current_dir(worker_path);
cmd
}
fn is_1focus_source(source: Option<&str>) -> bool {
matches!(
source.map(|s| s.to_ascii_lowercase()).as_deref(),
Some("1focus") | Some("1f") | Some("onefocus")
)
}
fn set_wrangler_env_map(
worker_path: &Path,
env_name: Option<&str>,
vars: &HashMap<String, String>,
var_keys: &HashSet<String>,
) -> Result<()> {
for (key, value) in vars {
if var_keys.contains(key) {
println!(" Setting var {}...", key);
set_wrangler_var_value(worker_path, env_name, key, value)?;
} else {
println!(" Setting secret {}...", key);
set_wrangler_secret_value(worker_path, env_name, key, value)?;
}
}
Ok(())
}
fn set_wrangler_var_value(
worker_path: &Path,
env_name: Option<&str>,
key: &str,
value: &str,
) -> Result<()> {
let mut cmd = wrangler_command(worker_path);
cmd.args(["vars", "set", key, value]);
if let Some(env) = env_name {
cmd.args(["--env", env]);
}
let status = cmd
.stdin(Stdio::null())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()?;
if !status.success() {
bail!("Failed to set wrangler var {}", key);
}
Ok(())
}
fn set_wrangler_secret_value(
worker_path: &Path,
env_name: Option<&str>,
key: &str,
value: &str,
) -> Result<()> {
let mut cmd = wrangler_command(worker_path);
cmd.args(["secret", "put", key]);
if let Some(env) = env_name {
cmd.args(["--env", env]);
}
let mut child = cmd
.stdin(Stdio::piped())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()?;
if let Some(mut stdin) = child.stdin.take() {
writeln!(stdin, "{}", value)?;
}
let status = child.wait()?;
if !status.success() {
bail!("Failed to set wrangler secret {}", key);
}
Ok(())
}
fn setup_cloudflare(project_root: &Path, config: Option<&Config>) -> Result<()> {
let default_cf = CloudflareConfig::default();
let cf_cfg = config
.and_then(|c| c.cloudflare.as_ref())
.unwrap_or(&default_cf);
if is_1focus_source(cf_cfg.env_source.as_deref()) {
let worker_path = if let Some(path) = cf_cfg.path.as_ref() {
project_root.join(path)
} else {
let workers = discover_wrangler_configs(project_root)?;
if workers.is_empty() {
println!("No Cloudflare Worker config found (wrangler.toml/json).");
println!("Run `wrangler init` first, then try: f deploy setup");
return Ok(());
}
if workers.len() > 1 {
bail!(
"Multiple Cloudflare worker configs found. Set [cloudflare].path in flow.toml."
);
}
workers[0].clone()
};
ensure_wrangler_config(&worker_path)?;
println!("Using Cloudflare worker: {}", worker_path.display());
if !cf_cfg.bootstrap_secrets.is_empty() {
println!("==> Bootstrapping secrets (optional)...");
crate::env::run(Some(EnvAction::Bootstrap))?;
}
let env_name = cf_cfg
.environment
.clone()
.unwrap_or_else(|| "production".to_string());
let keys = collect_cloudflare_env_keys(cf_cfg);
let env_store_ok = if keys.is_empty() {
true
} else {
match crate::env::fetch_project_env_vars(&env_name, &keys) {
Ok(vars) => !vars.is_empty(),
Err(err) => {
eprintln!("⚠ Env store unavailable: {err}");
false
}
}
};
if env_store_ok {
crate::env::run(Some(EnvAction::Guide { environment: env_name }))?;
crate::env::run(Some(EnvAction::Apply))?;
} else {
eprintln!("⚠ Skipping env guide/apply (1focus unavailable).");
}
println!("\n✓ Cloudflare deploy setup complete.");
return Ok(());
}
let defaults = CloudflareSetupDefaults {
worker_path: cf_cfg
.path
.as_ref()
.map(|p| project_root.join(p)),
env_file: if is_1focus_source(cf_cfg.env_source.as_deref()) {
None
} else {
cf_cfg.env_file
.as_ref()
.map(|p| project_root.join(p))
},
environment: cf_cfg.environment.clone(),
};
let result = run_cloudflare_setup(project_root, defaults)?;
let Some(result) = result else {
return Ok(());
};
let flow_path = project_root.join("flow.toml");
if !flow_path.exists() {
bail!("flow.toml not found. Run `f init` first.");
}
update_flow_toml_cloudflare(&flow_path, project_root, &result)?;
if result.apply_secrets {
if is_1focus_source(cf_cfg.env_source.as_deref()) {
if !cf_cfg.bootstrap_secrets.is_empty() {
println!("==> Bootstrapping secrets (optional)...");
crate::env::run(Some(EnvAction::Bootstrap))?;
}
let env_name = result
.environment
.clone()
.unwrap_or_else(|| "production".to_string());
crate::env::run(Some(EnvAction::Guide { environment: env_name }))?;
crate::env::run(Some(EnvAction::Apply))?;
} else if let Some(env_file) = result.env_file.as_ref() {
let env_name = result.environment.as_deref();
set_wrangler_secrets(
&result.worker_path,
env_file,
env_name,
Some(&result.selected_keys),
)?;
}
}
println!("\n✓ Cloudflare deploy setup complete.");
Ok(())
}
/// Deploy to Railway.
fn deploy_railway(project_root: &Path, config: Option<&Config>) -> Result<()> {
let default_rail = RailwayConfig::default();
let rail_cfg = config
.and_then(|c| c.railway.as_ref())
.unwrap_or(&default_rail);
// Check railway CLI
if which::which("railway").is_err() {
bail!("Railway CLI not found. Install: npm install -g @railway/cli");
}
// Link project if specified
if let (Some(project), Some(env)) = (&rail_cfg.project, &rail_cfg.environment) {
println!("==> Linking to Railway project...");
let status = Command::new("railway")
.args(["link", project, "--environment", env])
.current_dir(project_root)
.status()?;
if !status.success() {
bail!("Failed to link Railway project");
}
}
// Set env vars from file
if let Some(env_file) = &rail_cfg.env_file {
let env_path = project_root.join(env_file);
if env_path.exists() {
println!("==> Setting environment variables...");
set_railway_env(&env_path)?;
}
}
// Deploy
println!("==> Deploying to Railway...");
let status = Command::new("railway")
.args(["up", "--detach"])
.current_dir(project_root)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()?;
if !status.success() {
bail!("Railway deployment failed");
}
println!("\n✓ Deployed to Railway!");
Ok(())
}
/// Show deployment status.
fn show_status(_project_root: &Path, config: Option<&Config>) -> Result<()> {
let deploy_config = load_deploy_config()?;
println!("Deployment Status\n");
// Host status
if let Some(conn) = &deploy_config.host {
println!("Host: {}@{}:{}", conn.user, conn.host, conn.port);
if let Some(cfg) = config.and_then(|c| c.host.as_ref()) {
if let Some(service) = &cfg.service {
let output = ssh_capture(conn, &format!("systemctl is-active {} 2>/dev/null || echo inactive", service))?;
println!(" Service '{}': {}", service, output.trim());
}
}
} else {
println!("Host: not configured");
}
Ok(())
}
/// Show deployment logs.
fn show_logs(project_root: &Path, config: Option<&Config>, follow: bool, lines: usize) -> Result<()> {
if let Some(cf_cfg) = config.and_then(|c| c.cloudflare.as_ref()) {
return show_cloudflare_logs(project_root, cf_cfg, follow, lines);
}
let deploy_config = load_deploy_config()?;
let conn = deploy_config
.host
.as_ref()
.context("No host configured")?;
let service = config
.and_then(|c| c.host.as_ref())
.and_then(|h| h.service.as_ref())
.context("No service name in [host] config")?;
let follow_flag = if follow { "-f" } else { "" };
let cmd = format!(
"journalctl -u {} -n {} {} --no-pager",
service, lines, follow_flag
);
ssh_run(conn, &cmd)?;
Ok(())
}
fn show_cloudflare_logs(
project_root: &Path,
cf_cfg: &CloudflareConfig,
follow: bool,
lines: usize,
) -> Result<()> {
let worker_path = cf_cfg
.path
.as_ref()
.map(|p| project_root.join(p))
.unwrap_or_else(|| project_root.to_path_buf());
ensure_wrangler_config(&worker_path)?;
if !follow {
eprintln!("Note: wrangler tail streams logs until you stop it (Ctrl+C).");
let _ = lines;
}
let mut cmd = wrangler_command(&worker_path);
cmd.arg("tail").args(["--format", "pretty"]);
if let Some(env) = cf_cfg.environment.as_deref() {
cmd.args(["--env", env]);
}
let status = cmd
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()?;
if !status.success() {
bail!("Cloudflare log tail failed");
}
Ok(())
}
/// Restart the deployed service.
fn restart_service(_project_root: &Path, config: Option<&Config>) -> Result<()> {
let deploy_config = load_deploy_config()?;
let conn = deploy_config.host.as_ref().context("No host configured")?;
let service = config
.and_then(|c| c.host.as_ref())
.and_then(|h| h.service.as_ref())
.context("No service name")?;
println!("Restarting {}...", service);
ssh_run(conn, &format!("systemctl restart {}", service))?;
println!("✓ Restarted");
Ok(())
}
/// Stop the deployed service.
fn stop_service(_project_root: &Path, config: Option<&Config>) -> Result<()> {
let deploy_config = load_deploy_config()?;
let conn = deploy_config.host.as_ref().context("No host configured")?;
let service = config
.and_then(|c| c.host.as_ref())
.and_then(|h| h.service.as_ref())
.context("No service name")?;
println!("Stopping {}...", service);
ssh_run(conn, &format!("systemctl stop {}", service))?;
println!("✓ Stopped");
Ok(())
}
/// Open SSH shell to host.
fn open_shell() -> Result<()> {
let deploy_config = load_deploy_config()?;
let conn = deploy_config.host.as_ref().context("No host configured")?;
println!("Connecting to {}...", conn.ssh_target());
let status = Command::new("ssh")
.args(["-p", &conn.port.to_string(), &conn.ssh_target()])
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()?;
if !status.success() {
bail!("SSH connection failed");
}
Ok(())
}
/// Set the host connection.
fn set_host(connection: &str) -> Result<()> {
let conn = HostConnection::parse(connection)?;
let mut config = load_deploy_config()?;
config.host = Some(conn.clone());
save_deploy_config(&config)?;
println!("✓ Host set: {}@{}:{}", conn.user, conn.host, conn.port);
println!("\nTest connection: f deploy shell");
Ok(())
}
/// Show current host.
fn show_host() -> Result<()> {
let config = load_deploy_config()?;
if let Some(conn) = &config.host {
println!("Host: {}@{}:{}", conn.user, conn.host, conn.port);
} else {
println!("No host configured.");
println!("Set one with: f deploy set-host user@host:port");
}
Ok(())
}
// ─────────────────────────────────────────────────────────────
// SSH/rsync helpers
// ─────────────────────────────────────────────────────────────
/// Run SSH command with inherited stdio.
fn ssh_run(conn: &HostConnection, cmd: &str) -> Result<()> {
let status = Command::new("ssh")
.args([
"-p",
&conn.port.to_string(),
"-o",
"StrictHostKeyChecking=accept-new",
&conn.ssh_target(),
cmd,
])
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.context("Failed to run SSH")?;
if !status.success() {
bail!("SSH command failed: {}", cmd);
}
Ok(())
}
/// Run SSH command and capture output.
fn ssh_capture(conn: &HostConnection, cmd: &str) -> Result<String> {
let output = Command::new("ssh")
.args([
"-p",
&conn.port.to_string(),
"-o",
"StrictHostKeyChecking=accept-new",
&conn.ssh_target(),
cmd,
])
.output()
.context("Failed to run SSH")?;
Ok(String::from_utf8_lossy(&output.stdout).to_string())
}
/// Sync directory via rsync.
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/commit.rs | src/commit.rs | //! AI-powered git commit command using OpenAI.
use std::collections::hash_map::DefaultHasher;
use std::fs;
use std::hash::{Hash, Hasher};
use std::io::{self, Write};
use std::net::IpAddr;
use std::env;
use std::path::Path;
use std::process::{Command, Stdio};
use std::time::Duration;
use anyhow::{Context, Result, bail};
use clap::ValueEnum;
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tempfile::NamedTempFile;
use tracing::{debug, info};
use crate::ai;
use crate::config;
use crate::hub;
use crate::notify;
const MODEL: &str = "gpt-4.1-nano";
const MAX_DIFF_CHARS: usize = 12_000;
const HUB_HOST: IpAddr = IpAddr::V4(std::net::Ipv4Addr::new(127, 0, 0, 1));
const HUB_PORT: u16 = 9050;
/// Patterns for files that likely contain secrets and shouldn't be committed.
const SENSITIVE_PATTERNS: &[&str] = &[
".env",
".env.local",
".env.production",
".env.development",
".env.staging",
"credentials.json",
"secrets.json",
"service-account.json",
".pem",
".key",
".p12",
".pfx",
".keystore",
"id_rsa",
"id_ed25519",
"id_ecdsa",
"id_dsa",
".npmrc",
".pypirc",
".netrc",
"htpasswd",
".htpasswd",
"shadow",
"passwd",
];
const SYSTEM_PROMPT: &str = "You are an expert software engineer who writes clear, concise git commit messages. Use imperative mood, keep the subject line under 72 characters, and include an optional body with bullet points if helpful. Never wrap the message in quotes. Never include secrets, credentials, or file contents from .env files, environment variables, keys, or other sensitive data—even if they appear in the diff.";
#[derive(Copy, Clone, Debug, ValueEnum)]
pub enum ReviewModelArg {
/// Use Claude Opus 1 for review.
ClaudeOpus,
/// Use Codex high-capacity review (gpt-5.1-codex-max).
CodexHigh,
/// Use Codex mini review model (gpt-5.1-codex-mini).
CodexMini,
}
impl ReviewModelArg {
fn as_arg(&self) -> &'static str {
match self {
ReviewModelArg::ClaudeOpus => "claude-opus",
ReviewModelArg::CodexHigh => "codex-high",
ReviewModelArg::CodexMini => "codex-mini",
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum CodexModel {
High,
Mini,
}
impl CodexModel {
fn as_codex_arg(&self) -> &'static str {
match self {
CodexModel::High => "gpt-5.1-codex-max",
CodexModel::Mini => "gpt-5.1-codex-mini",
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum ClaudeModel {
Sonnet,
Opus,
}
impl ClaudeModel {
fn as_claude_arg(&self) -> &'static str {
match self {
ClaudeModel::Sonnet => "claude-sonnet-4-20250514",
ClaudeModel::Opus => "claude-opus-1",
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum ReviewSelection {
Codex(CodexModel),
Claude(ClaudeModel),
}
impl ReviewSelection {
fn is_claude(&self) -> bool {
matches!(self, ReviewSelection::Claude(_))
}
fn is_codex(&self) -> bool {
matches!(self, ReviewSelection::Codex(_))
}
fn review_model_arg(&self) -> Option<ReviewModelArg> {
match self {
ReviewSelection::Codex(CodexModel::High) => Some(ReviewModelArg::CodexHigh),
ReviewSelection::Codex(CodexModel::Mini) => Some(ReviewModelArg::CodexMini),
ReviewSelection::Claude(ClaudeModel::Opus) => Some(ReviewModelArg::ClaudeOpus),
ReviewSelection::Claude(ClaudeModel::Sonnet) => None,
}
}
fn model_label(&self) -> &'static str {
match self {
ReviewSelection::Codex(model) => model.as_codex_arg(),
ReviewSelection::Claude(model) => model.as_claude_arg(),
}
}
}
/// Check staged files for potentially sensitive content and warn the user.
/// Returns list of sensitive files found.
fn check_sensitive_files(repo_root: &Path) -> Vec<String> {
let output = Command::new("git")
.args(["diff", "--cached", "--name-only"])
.current_dir(repo_root)
.output();
let Ok(output) = output else {
return Vec::new();
};
if !output.status.success() {
return Vec::new();
}
let files = String::from_utf8_lossy(&output.stdout);
let mut sensitive = Vec::new();
for file in files.lines() {
let file_lower = file.to_lowercase();
let file_name = Path::new(file)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or(file)
.to_lowercase();
for pattern in SENSITIVE_PATTERNS {
let pattern_lower = pattern.to_lowercase();
// Check if filename matches or ends with pattern
if file_name == pattern_lower
|| file_name.ends_with(&pattern_lower)
|| file_lower.contains(&format!("/{}", pattern_lower))
{
sensitive.push(file.to_string());
break;
}
}
}
sensitive
}
/// Warn about sensitive files and optionally abort.
fn warn_sensitive_files(files: &[String]) -> Result<()> {
if files.is_empty() {
return Ok(());
}
println!("\n⚠️ Warning: Potentially sensitive files detected:");
for file in files {
println!(" - {}", file);
}
println!();
println!("These files may contain secrets. Consider:");
println!(" - Adding them to .gitignore");
println!(" - Using `git reset HEAD <file>` to unstage");
println!();
// For now just warn, don't block
// In the future, could add --force flag to skip this check
Ok(())
}
/// Threshold for "large" file changes (lines added + removed).
const LARGE_DIFF_THRESHOLD: usize = 500;
/// Check for files with unusually large diffs.
/// Returns list of (filename, lines_changed) for files over threshold.
fn check_large_diffs(repo_root: &Path) -> Vec<(String, usize)> {
let output = Command::new("git")
.args(["diff", "--cached", "--numstat"])
.current_dir(repo_root)
.output();
let Ok(output) = output else {
return Vec::new();
};
if !output.status.success() {
return Vec::new();
}
let stats = String::from_utf8_lossy(&output.stdout);
let mut large_files = Vec::new();
for line in stats.lines() {
let parts: Vec<&str> = line.split('\t').collect();
if parts.len() >= 3 {
// Format: added<tab>removed<tab>filename
// Binary files show "-" for added/removed
let added: usize = parts[0].parse().unwrap_or(0);
let removed: usize = parts[1].parse().unwrap_or(0);
let filename = parts[2].to_string();
let total = added + removed;
if total >= LARGE_DIFF_THRESHOLD {
large_files.push((filename, total));
}
}
}
// Sort by size descending
large_files.sort_by(|a, b| b.1.cmp(&a.1));
large_files
}
/// Warn about files with large diffs.
fn warn_large_diffs(files: &[(String, usize)]) -> Result<()> {
if files.is_empty() {
return Ok(());
}
println!("⚠️ Warning: Files with large diffs ({}+ lines):", LARGE_DIFF_THRESHOLD);
for (file, lines) in files {
println!(" - {} ({} lines)", file, lines);
}
println!();
println!("These might be generated/lock files. Consider:");
println!(" - Adding them to .gitignore if generated");
println!(" - Using `git reset HEAD <file>` to unstage");
println!();
Ok(())
}
pub fn resolve_review_selection(
use_claude: bool,
override_model: Option<ReviewModelArg>,
) -> ReviewSelection {
if let Some(model) = override_model {
return match model {
ReviewModelArg::ClaudeOpus => ReviewSelection::Claude(ClaudeModel::Opus),
ReviewModelArg::CodexHigh => ReviewSelection::Codex(CodexModel::High),
ReviewModelArg::CodexMini => ReviewSelection::Codex(CodexModel::Mini),
};
}
if use_claude {
ReviewSelection::Claude(ClaudeModel::Sonnet)
} else {
ReviewSelection::Codex(CodexModel::High)
}
}
/// New default: Claude is default, --codex flag to use Codex
pub fn resolve_review_selection_v2(
use_codex: bool,
override_model: Option<ReviewModelArg>,
) -> ReviewSelection {
if let Some(model) = override_model {
return match model {
ReviewModelArg::ClaudeOpus => ReviewSelection::Claude(ClaudeModel::Opus),
ReviewModelArg::CodexHigh => ReviewSelection::Codex(CodexModel::High),
ReviewModelArg::CodexMini => ReviewSelection::Codex(CodexModel::Mini),
};
}
if use_codex {
ReviewSelection::Codex(CodexModel::High)
} else {
// Default: Claude Sonnet
ReviewSelection::Claude(ClaudeModel::Sonnet)
}
}
#[derive(Debug, Deserialize)]
struct ReviewJson {
issues_found: bool,
#[serde(default)]
issues: Vec<String>,
#[serde(default)]
summary: Option<String>,
}
#[derive(Debug, Serialize)]
struct RemoteReviewRequest {
diff: String,
context: Option<String>,
model: String,
#[serde(skip_serializing_if = "Option::is_none")]
review_instructions: Option<String>,
}
#[derive(Debug, Deserialize)]
struct RemoteReviewResponse {
output: String,
#[serde(default)]
stderr: String,
}
#[derive(Debug)]
struct ReviewResult {
issues_found: bool,
issues: Vec<String>,
summary: Option<String>,
timed_out: bool,
}
#[derive(Debug)]
struct StagedSnapshot {
patch_path: Option<std::path::PathBuf>,
}
#[derive(Debug, Serialize)]
struct ChatRequest {
model: String,
messages: Vec<Message>,
temperature: f32,
}
#[derive(Debug, Serialize)]
struct Message {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: Option<ResponseMessage>,
}
#[derive(Debug, Deserialize)]
struct ResponseMessage {
content: String,
}
/// Dry run: show the context that would be passed to Codex without committing.
pub fn dry_run_context() -> Result<()> {
println!("Dry run: showing context that would be passed to Codex\n");
// Ensure we're in a git repo
ensure_git_repo()?;
// Show checkpoint info
let cwd = std::env::current_dir()?;
let checkpoints = ai::load_checkpoints(&cwd).unwrap_or_default();
println!("────────────────────────────────────────");
println!("COMMIT CHECKPOINT");
println!("────────────────────────────────────────");
if let Some(ref checkpoint) = checkpoints.last_commit {
println!("Last commit: {}", checkpoint.timestamp);
if let Some(ref ts) = checkpoint.last_entry_timestamp {
println!("Last entry included: {}", ts);
}
if let Some(ref sid) = checkpoint.session_id {
println!("Session: {}...", &sid[..8.min(sid.len())]);
}
} else {
println!("No previous checkpoint (first commit with context)");
}
// Get diff
let diff = git_capture(&["diff", "--cached"]).or_else(|_| git_capture(&["diff"]))?;
if diff.trim().is_empty() {
println!("\nNo changes to show (no staged or unstaged diff)");
println!("\nTrying to show what would be staged with 'git add .'...");
git_run(&["add", "--dry-run", "."])?;
}
// Get AI session context since checkpoint
println!("\n────────────────────────────────────────");
println!("AI SESSION CONTEXT (since checkpoint)");
println!("────────────────────────────────────────");
match ai::get_context_since_checkpoint() {
Ok(Some(context)) => {
println!(
"Context length: {} chars, {} lines\n",
context.len(),
context.lines().count()
);
println!("{}", context);
}
Ok(None) => {
println!("No new AI session context since last checkpoint.");
println!("\nThis could mean:");
println!(" - No exchanges since last commit");
println!(" - No Claude Code or Codex session in this project");
}
Err(e) => {
println!("Error getting context: {}", e);
}
}
println!("────────────────────────────────────────");
println!("\nDiff that would be reviewed:");
println!("────────────────────────────────────────");
let (diff_for_prompt, truncated) = truncate_diff(&diff);
println!("{}", diff_for_prompt);
if truncated {
println!("\n[Diff truncated to {} chars]", MAX_DIFF_CHARS);
}
println!("────────────────────────────────────────");
Ok(())
}
/// Run the commit workflow: stage, generate message, commit, push.
/// If hub is running, delegates to it for async execution.
pub fn run(push: bool) -> Result<()> {
// Check if hub is running - if so, delegate
if hub::hub_healthy(HUB_HOST, HUB_PORT) {
return delegate_to_hub(push);
}
run_sync(push)
}
/// Run commit synchronously (called directly or by hub).
pub fn run_sync(push: bool) -> Result<()> {
info!(push = push, "starting commit workflow");
// Ensure we're in a git repo
ensure_git_repo()?;
debug!("verified git repository");
// Get API key
let api_key = get_openai_key()?;
debug!("got OpenAI API key");
// Stage all changes
print!("Staging changes... ");
io::stdout().flush()?;
git_run(&["add", "."])?;
println!("done");
debug!("staged all changes");
// Check for sensitive files before proceeding
let cwd = std::env::current_dir()?;
let sensitive_files = check_sensitive_files(&cwd);
warn_sensitive_files(&sensitive_files)?;
// Check for files with large diffs
let large_diffs = check_large_diffs(&cwd);
warn_large_diffs(&large_diffs)?;
// Get diff
let diff = git_capture(&["diff", "--cached"])?;
if diff.trim().is_empty() {
bail!("No staged changes to commit");
}
debug!(diff_len = diff.len(), "got cached diff");
// Get status
let status = git_capture(&["status", "--short"]).unwrap_or_default();
debug!(status_lines = status.lines().count(), "got git status");
// Truncate diff if needed
let (diff_for_prompt, truncated) = truncate_diff(&diff);
debug!(
truncated = truncated,
prompt_len = diff_for_prompt.len(),
"prepared diff for prompt"
);
// Generate commit message
print!("Generating commit message... ");
io::stdout().flush()?;
info!(model = MODEL, "calling OpenAI API");
let message = generate_commit_message(&api_key, &diff_for_prompt, &status, truncated)?;
println!("done\n");
debug!(message_len = message.len(), "got commit message");
// Show the message
println!("Commit message:");
println!("────────────────────────────────────────");
println!("{}", message);
println!("────────────────────────────────────────\n");
// Commit
let paragraphs = split_paragraphs(&message);
debug!(
paragraphs = paragraphs.len(),
"split message into paragraphs"
);
let mut args = vec!["commit"];
for p in ¶graphs {
args.push("-m");
args.push(p);
}
git_run(&args)?;
println!("✓ Committed");
info!("created commit");
// Push if requested
if push {
print!("Pushing... ");
io::stdout().flush()?;
match git_try(&["push"]) {
Ok(_) => {
println!("done");
info!("pushed to remote");
}
Err(_) => {
// Push failed, likely remote has new commits
println!("failed (remote ahead)");
print!("Pulling with rebase... ");
io::stdout().flush()?;
match git_try(&["pull", "--rebase"]) {
Ok(_) => {
println!("done");
print!("Pushing... ");
io::stdout().flush()?;
git_run(&["push"])?;
println!("done");
info!("pulled and pushed to remote");
}
Err(_) => {
println!("conflict!");
println!();
println!("Rebase conflict detected. Resolve manually:");
println!(" 1. Fix conflicts in the listed files");
println!(" 2. git add <files>");
println!(" 3. git rebase --continue");
println!(" 4. git push");
println!();
println!("Or abort with: git rebase --abort");
bail!("Rebase conflict - manual resolution required");
}
}
}
}
}
// Sync to gitedit if enabled
let cwd = std::env::current_dir().unwrap_or_default();
if gitedit_mirror_enabled() {
sync_to_gitedit(&cwd, "commit", &[], None, None);
}
Ok(())
}
/// Run commit with code review: stage, review with Codex or Claude, generate message, commit, push.
/// If hub is running, delegates to it for async execution.
pub fn run_with_check(
push: bool,
include_context: bool,
review_selection: ReviewSelection,
author_message: Option<&str>,
max_tokens: usize,
) -> Result<()> {
if commit_with_check_async_enabled() && hub::hub_healthy(HUB_HOST, HUB_PORT) {
return delegate_to_hub_with_check(
"commitWithCheck",
push,
include_context,
review_selection,
author_message,
max_tokens,
);
}
run_with_check_sync(
push,
include_context,
review_selection,
author_message,
max_tokens,
false,
)
}
/// Run commitWithCheck and always sync AI sessions to GitEdit (ignores config).
pub fn run_with_check_with_gitedit(
push: bool,
include_context: bool,
review_selection: ReviewSelection,
author_message: Option<&str>,
max_tokens: usize,
) -> Result<()> {
if commit_with_check_async_enabled() && hub::hub_healthy(HUB_HOST, HUB_PORT) {
return delegate_to_hub_with_check(
"commit", // CLI command name
push,
include_context,
review_selection,
author_message,
max_tokens,
);
}
run_with_check_sync(
push,
include_context,
review_selection,
author_message,
max_tokens,
true,
)
}
fn commit_with_check_async_enabled() -> bool {
let cwd = std::env::current_dir().ok();
if let Some(cwd) = cwd {
let local_config = cwd.join("flow.toml");
if local_config.exists() {
if let Ok(cfg) = config::load(&local_config) {
return cfg.options.commit_with_check_async.unwrap_or(true);
}
return true;
}
}
let global_config = config::default_config_path();
if global_config.exists() {
if let Ok(cfg) = config::load(&global_config) {
return cfg.options.commit_with_check_async.unwrap_or(true);
}
}
true
}
fn commit_with_check_use_repo_root() -> bool {
let cwd = std::env::current_dir().ok();
if let Some(cwd) = cwd {
let local_config = cwd.join("flow.toml");
if local_config.exists() {
if let Ok(cfg) = config::load(&local_config) {
return cfg.options.commit_with_check_use_repo_root.unwrap_or(true);
}
return true;
}
}
let global_config = config::default_config_path();
if global_config.exists() {
if let Ok(cfg) = config::load(&global_config) {
return cfg.options.commit_with_check_use_repo_root.unwrap_or(true);
}
}
true
}
fn resolve_commit_with_check_root() -> Result<std::path::PathBuf> {
if !commit_with_check_use_repo_root() {
return std::env::current_dir().context("failed to get current directory");
}
let output = Command::new("git")
.args(["rev-parse", "--show-toplevel"])
.output()
.context("failed to run git rev-parse --show-toplevel")?;
if !output.status.success() {
bail!("failed to resolve git repo root");
}
let root = String::from_utf8_lossy(&output.stdout).trim().to_string();
if root.is_empty() {
bail!("git repo root was empty");
}
Ok(std::path::PathBuf::from(root))
}
fn commit_with_check_timeout_secs() -> u64 {
let cwd = std::env::current_dir().ok();
if let Some(cwd) = cwd {
let local_config = cwd.join("flow.toml");
if local_config.exists() {
if let Ok(cfg) = config::load(&local_config) {
return cfg.options.commit_with_check_timeout_secs.unwrap_or(120);
}
return 120;
}
}
let global_config = config::default_config_path();
if global_config.exists() {
if let Ok(cfg) = config::load(&global_config) {
return cfg.options.commit_with_check_timeout_secs.unwrap_or(120);
}
}
120
}
fn commit_with_check_review_url() -> Option<String> {
if let Ok(url) = env::var("FLOW_REVIEW_URL") {
let trimmed = url.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
let cwd = std::env::current_dir().ok();
if let Some(cwd) = cwd {
let local_config = cwd.join("flow.toml");
if local_config.exists() {
if let Ok(cfg) = config::load(&local_config) {
if let Some(url) = cfg.options.commit_with_check_review_url {
let trimmed = url.trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
}
}
}
let global_config = config::default_config_path();
if global_config.exists() {
if let Ok(cfg) = config::load(&global_config) {
if let Some(url) = cfg.options.commit_with_check_review_url {
let trimmed = url.trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
}
}
None
}
fn commit_with_check_review_token() -> Option<String> {
if let Ok(token) = env::var("FLOW_REVIEW_TOKEN") {
let trimmed = token.trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
let cwd = std::env::current_dir().ok();
if let Some(cwd) = cwd {
let local_config = cwd.join("flow.toml");
if local_config.exists() {
if let Ok(cfg) = config::load(&local_config) {
if let Some(token) = cfg.options.commit_with_check_review_token {
let trimmed = token.trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
}
}
}
let global_config = config::default_config_path();
if global_config.exists() {
if let Ok(cfg) = config::load(&global_config) {
if let Some(token) = cfg.options.commit_with_check_review_token {
let trimmed = token.trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
}
}
None
}
fn prompt_yes_no(message: &str) -> Result<bool> {
print!("{} [y/N]: ", message);
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let answer = input.trim().to_ascii_lowercase();
Ok(answer == "y" || answer == "yes")
}
/// Run commit with code review synchronously (called directly or by hub).
/// If `include_context` is true, AI session context is passed for better understanding.
/// `review_selection` determines whether Claude or Codex runs and which model is used.
/// If `author_message` is provided, it's appended to the commit message.
pub fn run_with_check_sync(
push: bool,
include_context: bool,
review_selection: ReviewSelection,
author_message: Option<&str>,
max_tokens: usize,
force_gitedit: bool,
) -> Result<()> {
// Convert tokens to chars (roughly 4 chars per token)
let max_context = max_tokens * 4;
info!(
push = push,
include_context = include_context,
review_model = review_selection.model_label(),
max_tokens = max_tokens,
"starting commit with check workflow"
);
// Ensure we're in a git repo
ensure_git_repo()?;
let repo_root = resolve_commit_with_check_root()?;
// Capture current staged changes so we can restore if we cancel.
let staged_snapshot = capture_staged_snapshot_in(&repo_root)?;
// Run pre-commit fixers if configured
if let Ok(fixed) = run_fixers(&repo_root) {
if fixed {
println!();
}
}
// Stage all changes
print!("Staging changes... ");
io::stdout().flush()?;
git_run_in(&repo_root, &["add", "."])?;
println!("done");
// Check for sensitive files before proceeding
let sensitive_files = check_sensitive_files(&repo_root);
warn_sensitive_files(&sensitive_files)?;
// Check for files with large diffs
let large_diffs = check_large_diffs(&repo_root);
warn_large_diffs(&large_diffs)?;
// Get diff
let diff = git_capture_in(&repo_root, &["diff", "--cached"])?;
if diff.trim().is_empty() {
println!("\nnotify: No staged changes to commit");
bail!("No staged changes to commit");
}
// Get AI session context since last checkpoint (if enabled)
let session_context = if include_context {
ai::get_context_since_checkpoint_for_path(&repo_root)
.ok()
.flatten()
.map(|context| truncate_context(&context, max_context))
} else {
None
};
if let Some(context) = session_context.as_ref() {
let line_count = context.lines().count();
println!(
"Using AI session context ({} chars, {} lines) since last checkpoint",
context.len(),
line_count
);
if should_show_review_context() {
println!("--- AI session context ---");
println!("{}", context);
println!("--- End AI session context ---");
}
}
// Get custom review instructions from [commit] config
let review_instructions = get_review_instructions(&repo_root);
// Run code review (Codex or Claude)
if review_selection.is_claude() {
println!("\nRunning Claude code review...");
} else {
println!("\nRunning Codex code review...");
}
println!("Model: {}", review_selection.model_label());
if session_context.is_some() {
println!("(with AI session context)");
}
if review_instructions.is_some() {
println!("(with custom review instructions)");
}
println!("────────────────────────────────────────");
let review = match review_selection {
ReviewSelection::Claude(model) => {
run_claude_review(&diff, session_context.as_deref(), review_instructions.as_deref(), &repo_root, model)
}
ReviewSelection::Codex(model) => {
run_codex_review(&diff, session_context.as_deref(), review_instructions.as_deref(), &repo_root, model)
}
};
let review = match review {
Ok(review) => review,
Err(err) => {
restore_staged_snapshot_in(&repo_root, &staged_snapshot)?;
return Err(err);
}
};
println!("────────────────────────────────────────\n");
// Log review result for async tracking
let context_chars = session_context.as_ref().map(|c| c.len()).unwrap_or(0);
ai::log_review_result(
&repo_root,
review.issues_found,
&review.issues,
context_chars,
0, // TODO: track actual review time
);
if review.timed_out {
println!(
"⚠ Review timed out after {}s, proceeding anyway",
commit_with_check_timeout_secs()
);
}
// Show review results (informational only, never blocks)
if review.issues_found {
if let Some(summary) = review.summary.as_ref() {
if !summary.trim().is_empty() {
println!("Summary: {}", summary.trim());
println!();
}
}
if !review.issues.is_empty() {
println!("Issues found:");
for issue in &review.issues {
println!("- {}", issue);
}
println!();
// Send notification for critical issues (secrets, security)
let critical_issues: Vec<_> = review
.issues
.iter()
.filter(|i| {
let lower = i.to_lowercase();
lower.contains("secret")
|| lower.contains(".env")
|| lower.contains("credential")
|| lower.contains("api key")
|| lower.contains("password")
|| lower.contains("token")
|| lower.contains("security")
|| lower.contains("vulnerability")
})
.collect();
if !critical_issues.is_empty() {
let alert_msg = format!(
"⚠️ Review found {} critical issue(s): {}",
critical_issues.len(),
critical_issues
.iter()
.map(|s| s.as_str())
.collect::<Vec<_>>()
.join("; ")
);
// Truncate if too long
let alert_msg = if alert_msg.len() > 200 {
format!("{}...", &alert_msg[..200])
} else {
alert_msg
};
let _ = notify::send_warning(&alert_msg);
// Also try to POST to 1focus
send_to_1focus(&repo_root, &review.issues, review.summary.as_deref());
}
}
println!("Proceeding with commit...");
} else if !review.timed_out {
println!("✓ Review passed");
}
// Continue with normal commit flow
let api_key = get_openai_key()?;
// Get status
let status = git_capture_in(&repo_root, &["status", "--short"]).unwrap_or_default();
// Truncate diff if needed
let (diff_for_prompt, truncated) = truncate_diff(&diff);
// Generate commit message
print!("Generating commit message... ");
io::stdout().flush()?;
let message = generate_commit_message(&api_key, &diff_for_prompt, &status, truncated)?;
println!("done\n");
let mut gitedit_sessions: Vec<ai::GitEditSessionData> = Vec::new();
let mut gitedit_session_hash: Option<String> = None;
let gitedit_enabled =
force_gitedit || gitedit_mirror_enabled_for_commit_with_check(&repo_root);
if gitedit_enabled {
match ai::get_sessions_for_gitedit(&repo_root) {
Ok(sessions) => {
if !sessions.is_empty() {
// Get owner/repo for the hash
if let Some((owner, repo)) = get_gitedit_project(&repo_root) {
gitedit_session_hash = gitedit_sessions_hash(&owner, &repo, &sessions);
}
gitedit_sessions = sessions;
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | true |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/parallel.rs | src/parallel.rs | //! Parallel task runner with pretty status display.
use std::io::{self, Write};
use std::process::Stdio;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use anyhow::{Result, bail};
use crossterm::terminal;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::sync::{Mutex, Semaphore};
// ANSI escape codes
const RESET: &str = "\x1b[0m";
const BOLD: &str = "\x1b[1m";
const DIM: &str = "\x1b[2m";
const RED: &str = "\x1b[31m";
const GREEN: &str = "\x1b[32m";
const BLUE: &str = "\x1b[34m";
const MAGENTA: &str = "\x1b[35m";
const CYAN: &str = "\x1b[36m";
const CLEAR_LINE: &str = "\x1b[2K";
const HIDE_CURSOR: &str = "\x1b[?25l";
const SHOW_CURSOR: &str = "\x1b[?25h";
// Spinner frames
const SPINNER_FRAMES: &[&str] = &["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
const SPINNER_COLORS: &[&str] = &[CYAN, BLUE, MAGENTA, BLUE];
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TaskStatus {
Pending,
Running,
Success,
Failure,
Skipped,
}
#[derive(Debug, Clone)]
pub struct Task {
pub label: String,
pub command: String,
pub status: TaskStatus,
pub last_line: String,
pub exit_code: Option<i32>,
pub output: Vec<String>,
pub duration: Option<Duration>,
}
impl Task {
pub fn new(label: impl Into<String>, command: impl Into<String>) -> Self {
Self {
label: label.into(),
command: command.into(),
status: TaskStatus::Pending,
last_line: String::new(),
exit_code: None,
output: Vec::new(),
duration: None,
}
}
}
pub struct ParallelRunner {
tasks: Arc<Mutex<Vec<Task>>>,
max_jobs: usize,
fail_fast: bool,
spinner_index: AtomicUsize,
lines_printed: AtomicUsize,
should_stop: AtomicBool,
first_failure_code: Arc<Mutex<Option<i32>>>,
}
impl ParallelRunner {
pub fn new(tasks: Vec<Task>, max_jobs: usize, fail_fast: bool) -> Self {
Self {
tasks: Arc::new(Mutex::new(tasks)),
max_jobs,
fail_fast,
spinner_index: AtomicUsize::new(0),
lines_printed: AtomicUsize::new(0),
should_stop: AtomicBool::new(false),
first_failure_code: Arc::new(Mutex::new(None)),
}
}
fn get_spinner(&self) -> String {
let idx = self.spinner_index.load(Ordering::Relaxed);
let frame = SPINNER_FRAMES[idx % SPINNER_FRAMES.len()];
let color = SPINNER_COLORS[idx % SPINNER_COLORS.len()];
format!("{}{}{}", color, frame, RESET)
}
fn terminal_width() -> usize {
terminal::size().map(|(w, _)| w as usize).unwrap_or(80)
}
fn truncate_line(text: &str, max_width: usize) -> String {
if text.len() <= max_width {
return text.to_string();
}
if max_width <= 1 {
return text.chars().take(max_width).collect();
}
format!("{}…", &text[..max_width - 1])
}
fn strip_ansi(text: &str) -> String {
let mut result = String::with_capacity(text.len());
let mut chars = text.chars().peekable();
while let Some(c) = chars.next() {
if c == '\x1b' {
// Skip escape sequence
if chars.peek() == Some(&'[') {
chars.next();
while let Some(&next) = chars.peek() {
chars.next();
if next.is_ascii_alphabetic() {
break;
}
}
}
} else {
result.push(c);
}
}
result
}
fn format_task_line(&self, task: &Task, label_width: usize) -> String {
let term_width = Self::terminal_width();
let icon = match task.status {
TaskStatus::Pending => format!("{}○{}", DIM, RESET),
TaskStatus::Running => self.get_spinner(),
TaskStatus::Success => format!("{}✓{}", GREEN, RESET),
TaskStatus::Failure => format!("{}✗{}", RED, RESET),
TaskStatus::Skipped => format!("{}○{}", DIM, RESET),
};
let label = format!("{:width$}", task.label, width = label_width);
let prefix = format!("{} {}{}{}", icon, BOLD, label, RESET);
let prefix_len = 1 + 1 + label_width;
match task.status {
TaskStatus::Success => {
if let Some(dur) = task.duration {
format!("{} {}({:.1}s){}", prefix, DIM, dur.as_secs_f64(), RESET)
} else {
prefix
}
}
TaskStatus::Failure => {
format!("{} {}(exit {}){}", prefix, DIM, task.exit_code.unwrap_or(-1), RESET)
}
TaskStatus::Skipped => {
format!("{} {}(skipped){}", prefix, DIM, RESET)
}
TaskStatus::Pending => prefix,
TaskStatus::Running => {
if !task.last_line.is_empty() {
let clean = Self::strip_ansi(&task.last_line)
.chars()
.filter(|c| c.is_ascii_graphic() || *c == ' ')
.collect::<String>();
let available = term_width.saturating_sub(prefix_len + 3);
if available > 0 {
let truncated = Self::truncate_line(&clean, available);
format!("{} {}{}{}", prefix, DIM, truncated, RESET)
} else {
prefix
}
} else {
prefix
}
}
}
}
async fn render_display(&self) {
let tasks = self.tasks.lock().await;
let lines_printed = self.lines_printed.load(Ordering::Relaxed);
// Move cursor up
if lines_printed > 0 {
print!("\x1b[{}A", lines_printed);
}
let label_width = tasks.iter().map(|t| t.label.len()).max().unwrap_or(0);
for task in tasks.iter() {
let line = self.format_task_line(task, label_width);
println!("{}{}", CLEAR_LINE, line);
}
self.lines_printed.store(tasks.len(), Ordering::Relaxed);
let _ = io::stdout().flush();
}
async fn run_task(&self, task_idx: usize, semaphore: Arc<Semaphore>) {
let _permit = semaphore.acquire().await.unwrap();
if self.should_stop.load(Ordering::Relaxed) {
let mut tasks = self.tasks.lock().await;
tasks[task_idx].status = TaskStatus::Skipped;
return;
}
let command = {
let mut tasks = self.tasks.lock().await;
tasks[task_idx].status = TaskStatus::Running;
tasks[task_idx].command.clone()
};
let start = Instant::now();
let mut child = match Command::new("sh")
.arg("-c")
.arg(&command)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
{
Ok(c) => c,
Err(e) => {
let mut tasks = self.tasks.lock().await;
tasks[task_idx].status = TaskStatus::Failure;
tasks[task_idx].exit_code = Some(-1);
tasks[task_idx].output.push(format!("Failed to spawn: {}", e));
tasks[task_idx].duration = Some(start.elapsed());
if self.fail_fast {
self.should_stop.store(true, Ordering::Relaxed);
let mut first = self.first_failure_code.lock().await;
if first.is_none() {
*first = Some(-1);
}
}
return;
}
};
// Read stdout and stderr
let stdout = child.stdout.take();
let stderr = child.stderr.take();
let tasks_clone = Arc::clone(&self.tasks);
let idx = task_idx;
let stdout_handle = if let Some(stdout) = stdout {
let tasks = Arc::clone(&tasks_clone);
Some(tokio::spawn(async move {
let mut reader = BufReader::new(stdout).lines();
while let Ok(Some(line)) = reader.next_line().await {
let mut tasks = tasks.lock().await;
tasks[idx].output.push(format!("{}\n", line));
tasks[idx].last_line = line;
}
}))
} else {
None
};
let stderr_handle = if let Some(stderr) = stderr {
let tasks = Arc::clone(&tasks_clone);
Some(tokio::spawn(async move {
let mut reader = BufReader::new(stderr).lines();
while let Ok(Some(line)) = reader.next_line().await {
let mut tasks = tasks.lock().await;
tasks[idx].output.push(format!("{}\n", line));
if tasks[idx].last_line.is_empty() {
tasks[idx].last_line = line;
}
}
}))
} else {
None
};
// Wait for process
let status = child.wait().await;
let duration = start.elapsed();
// Wait for output readers
if let Some(h) = stdout_handle {
let _ = h.await;
}
if let Some(h) = stderr_handle {
let _ = h.await;
}
let exit_code = status.map(|s| s.code().unwrap_or(-1)).unwrap_or(-1);
{
let mut tasks = self.tasks.lock().await;
tasks[task_idx].exit_code = Some(exit_code);
tasks[task_idx].duration = Some(duration);
if exit_code == 0 {
tasks[task_idx].status = TaskStatus::Success;
} else {
tasks[task_idx].status = TaskStatus::Failure;
if self.fail_fast {
self.should_stop.store(true, Ordering::Relaxed);
}
let mut first = self.first_failure_code.lock().await;
if first.is_none() {
*first = Some(exit_code);
}
}
}
}
pub async fn run(self: Arc<Self>) -> i32 {
// Hide cursor
print!("{}", HIDE_CURSOR);
let _ = io::stdout().flush();
let semaphore = Arc::new(Semaphore::new(self.max_jobs));
let task_count = self.tasks.lock().await.len();
// Spawn all tasks
let mut handles = Vec::new();
for i in 0..task_count {
let sem = Arc::clone(&semaphore);
let runner = Arc::clone(&self);
handles.push(tokio::spawn(async move {
runner.run_task(i, sem).await;
}));
}
// Spinner loop
let spinner_handle = {
let runner = Arc::clone(&self);
tokio::spawn(async move {
loop {
if runner.should_stop.load(Ordering::Relaxed) {
let tasks = runner.tasks.lock().await;
if tasks.iter().all(|t| {
matches!(t.status, TaskStatus::Success | TaskStatus::Failure | TaskStatus::Skipped)
}) {
break;
}
}
runner.spinner_index.fetch_add(1, Ordering::Relaxed);
runner.render_display().await;
tokio::time::sleep(Duration::from_millis(80)).await;
let tasks = runner.tasks.lock().await;
if tasks.iter().all(|t| {
matches!(t.status, TaskStatus::Success | TaskStatus::Failure | TaskStatus::Skipped)
}) {
break;
}
}
})
};
// Wait for all tasks
for h in handles {
let _ = h.await;
}
self.should_stop.store(true, Ordering::Relaxed);
let _ = spinner_handle.await;
// Final render
self.render_display().await;
// Print failures
let tasks = self.tasks.lock().await;
let failed: Vec<_> = tasks.iter().filter(|t| t.status == TaskStatus::Failure).collect();
if !failed.is_empty() {
println!();
for task in failed {
println!("{}{}━━━ {} (exit {}) ━━━{}", RED, BOLD, task.label, task.exit_code.unwrap_or(-1), RESET);
let output = task.output.join("");
if !output.trim().is_empty() {
print!("{}", output);
}
println!();
}
}
// Show cursor
print!("{}", SHOW_CURSOR);
let _ = io::stdout().flush();
self.first_failure_code.lock().await.unwrap_or(0)
}
}
/// Run tasks in parallel with pretty output.
pub async fn run_parallel(
tasks: Vec<(&str, &str)>,
max_jobs: usize,
fail_fast: bool,
) -> Result<()> {
if tasks.is_empty() {
bail!("No tasks specified");
}
let tasks: Vec<Task> = tasks
.into_iter()
.map(|(label, cmd)| Task::new(label, cmd))
.collect();
let runner = Arc::new(ParallelRunner::new(tasks, max_jobs, fail_fast));
let exit_code = runner.run().await;
if exit_code != 0 {
std::process::exit(exit_code);
}
Ok(())
}
/// CLI entry point for `f parallel`.
pub fn run(cmd: crate::cli::ParallelCommand) -> Result<()> {
use tokio::runtime::Runtime;
if cmd.tasks.is_empty() {
bail!("No tasks specified. Usage: f parallel 'echo hello' 'echo world' or 'label:command'");
}
// Parse tasks: either "label:command" or just "command" (auto-labeled)
let tasks: Vec<(String, String)> = cmd
.tasks
.iter()
.enumerate()
.map(|(i, t)| {
if let Some((label, command)) = t.split_once(':') {
(label.to_string(), command.to_string())
} else {
// Auto-generate label from command or use index
let label = t
.split_whitespace()
.next()
.unwrap_or(&format!("task{}", i + 1))
.to_string();
(label, t.to_string())
}
})
.collect();
let max_jobs = cmd.jobs.unwrap_or_else(|| {
std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4)
});
let rt = Runtime::new()?;
rt.block_on(async {
let task_refs: Vec<(&str, &str)> = tasks.iter().map(|(l, c)| (l.as_str(), c.as_str())).collect();
run_parallel(task_refs, max_jobs, cmd.fail_fast).await
})
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_parallel_success() {
let tasks = vec![
Task::new("echo1", "echo hello"),
Task::new("echo2", "echo world"),
];
let runner = Arc::new(ParallelRunner::new(tasks, 4, false));
let code = runner.run().await;
assert_eq!(code, 0);
}
#[tokio::test]
async fn test_parallel_failure() {
let tasks = vec![
Task::new("fail", "exit 1"),
Task::new("pass", "echo ok"),
];
let runner = Arc::new(ParallelRunner::new(tasks, 4, false));
let code = runner.run().await;
assert_eq!(code, 1);
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/deps.rs | src/deps.rs | use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use std::process::Command;
use anyhow::{Context, Result, bail};
use ignore::WalkBuilder;
use serde::Deserialize;
use toml::Value;
use toml::map::Map;
use crate::cli::{DepsAction, DepsCommand, DepsManager, ReposCloneOpts};
use crate::{config, repos, upstream};
pub fn run(cmd: DepsCommand) -> Result<()> {
let action = cmd.action;
let project_root = project_root()?;
let manager = cmd.manager.unwrap_or_else(|| detect_manager(&project_root));
match action {
None | Some(DepsAction::Pick) => {
pick_dependency(&project_root)?;
}
Some(DepsAction::Repo { repo, root, private }) => {
link_repo_dependency(&project_root, &repo, &root, private)?;
}
Some(other) => {
let (program, args) = build_command(manager, &project_root, &other)?;
let status = Command::new(program)
.args(&args)
.current_dir(&project_root)
.status()
.with_context(|| format!("failed to run {}", program))?;
if !status.success() {
bail!("dependency command failed");
}
}
}
Ok(())
}
fn build_command(
manager: DepsManager,
project_root: &Path,
action: &DepsAction,
) -> Result<(&'static str, Vec<String>)> {
let workspace = is_workspace(project_root);
let (base, mut args) = match (manager, workspace) {
(DepsManager::Pnpm, true) => ("pnpm", vec!["-r".to_string()]),
(DepsManager::Pnpm, false) => ("pnpm", Vec::new()),
(DepsManager::Yarn, _) => ("yarn", Vec::new()),
(DepsManager::Bun, _) => ("bun", Vec::new()),
(DepsManager::Npm, _) => ("npm", Vec::new()),
};
match action {
DepsAction::Install { args: extra } => {
args.push("install".to_string());
args.extend(extra.clone());
}
DepsAction::Update { args: extra } => {
match manager {
DepsManager::Pnpm => {
args.push("up".to_string());
args.push("--latest".to_string());
}
DepsManager::Yarn => {
args.push("up".to_string());
}
DepsManager::Bun => {
args.push("update".to_string());
}
DepsManager::Npm => {
args.push("update".to_string());
}
}
args.extend(extra.clone());
}
DepsAction::Repo { .. } | DepsAction::Pick => {
bail!("dependency action is not a package manager command");
}
}
Ok((base, args))
}
fn detect_manager(project_root: &Path) -> DepsManager {
if project_root.join("pnpm-lock.yaml").exists() || project_root.join("pnpm-workspace.yaml").exists() {
return DepsManager::Pnpm;
}
if project_root.join("bun.lockb").exists() || project_root.join("bun.lock").exists() {
return DepsManager::Bun;
}
if project_root.join("yarn.lock").exists() {
return DepsManager::Yarn;
}
if project_root.join("package-lock.json").exists() {
return DepsManager::Npm;
}
DepsManager::Npm
}
fn is_workspace(project_root: &Path) -> bool {
project_root.join("pnpm-workspace.yaml").exists()
}
fn project_root() -> Result<PathBuf> {
let cwd = std::env::current_dir().context("failed to read current directory")?;
if let Some(flow_path) = find_flow_toml(&cwd) {
return Ok(flow_path.parent().unwrap_or(&cwd).to_path_buf());
}
Ok(cwd)
}
fn find_flow_toml(start: &PathBuf) -> Option<PathBuf> {
let mut current = start.clone();
loop {
let candidate = current.join("flow.toml");
if candidate.exists() {
return Some(candidate);
}
if !current.pop() {
return None;
}
}
}
#[derive(Debug)]
enum DepPickAction {
RepoLink { repo: String },
RepoOpen { owner: String, repo: String },
Project { path: PathBuf },
Message { text: String },
}
#[derive(Debug)]
struct DepPickEntry {
display: String,
action: DepPickAction,
}
#[derive(Debug, Deserialize)]
struct RepoManifest {
root: Option<String>,
repos: Option<Vec<RepoManifestEntry>>,
}
#[derive(Debug, Deserialize)]
struct RepoManifestEntry {
owner: String,
repo: String,
url: Option<String>,
}
fn pick_dependency(project_root: &Path) -> Result<()> {
let manifest = load_repo_manifest(project_root)?;
let default_root = manifest
.as_ref()
.and_then(|m| m.root.clone())
.unwrap_or_else(|| "~/repos".to_string());
let root_path = repos::normalize_root(&default_root)?;
let entries = build_pick_entries(project_root, &root_path, manifest.as_ref())?;
if entries.is_empty() {
println!("No linked repos or dependency metadata found.");
return Ok(());
}
if which::which("fzf").is_err() {
println!("fzf not found on PATH – install it to use fuzzy selection.");
for entry in &entries {
println!(" {}", entry.display);
}
return Ok(());
}
let Some(entry) = run_deps_fzf(&entries)? else {
return Ok(());
};
match &entry.action {
DepPickAction::RepoLink { repo } => {
link_repo_dependency(project_root, repo, &default_root, false)?
}
DepPickAction::RepoOpen { owner, repo } => {
let repo_ref = repos::RepoRef {
owner: owner.clone(),
repo: repo.clone(),
};
let repo_path = root_path.join(&repo_ref.owner).join(&repo_ref.repo);
if !repo_path.exists() {
let repo_id = format!("{}/{}", repo_ref.owner, repo_ref.repo);
link_repo_dependency(project_root, &repo_id, &default_root, false)?;
}
open_in_zed(&repo_path)?;
}
DepPickAction::Project { path } => {
println!("Project path: {}", path.display());
println!("Hint: cd {}", path.display());
}
DepPickAction::Message { text } => {
println!("{}", text);
}
}
Ok(())
}
fn run_deps_fzf<'a>(entries: &'a [DepPickEntry]) -> Result<Option<&'a DepPickEntry>> {
use std::io::Write;
use std::process::Stdio;
let mut child = Command::new("fzf")
.arg("--prompt")
.arg("deps> ")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.context("failed to spawn fzf")?;
{
let stdin = child.stdin.as_mut().context("failed to open fzf stdin")?;
for entry in entries {
writeln!(stdin, "{}", entry.display)?;
}
}
let output = child.wait_with_output()?;
if !output.status.success() {
return Ok(None);
}
let selection = String::from_utf8(output.stdout).context("fzf output was not valid UTF-8")?;
let selection = selection.trim();
if selection.is_empty() {
return Ok(None);
}
Ok(entries.iter().find(|entry| entry.display == selection))
}
fn build_pick_entries(
project_root: &Path,
root_path: &Path,
manifest: Option<&RepoManifest>,
) -> Result<Vec<DepPickEntry>> {
let mut entries = Vec::new();
if let Some(manifest) = manifest {
if let Some(repos) = &manifest.repos {
for repo in repos {
let repo_id = format!("{}/{}", repo.owner, repo.repo);
let is_local = root_path.join(&repo.owner).join(&repo.repo).exists();
let _repo_url = repo.url.clone().unwrap_or_else(|| repo_id.clone());
entries.push(DepPickEntry {
display: format!("[linked] {}{}", repo_id, if is_local { " (local)" } else { "" }),
action: DepPickAction::RepoOpen {
owner: repo.owner.clone(),
repo: repo.repo.clone(),
},
});
}
}
}
let scan = scan_project_files(project_root)?;
let mut js_deps = BTreeSet::new();
let mut cargo_deps = BTreeSet::new();
let mut project_entries = Vec::new();
for path in scan {
if path.file_name().and_then(|n| n.to_str()) == Some("package.json") {
if let Ok(info) = parse_package_json(&path) {
if let Some(name) = info.name {
let dir = path.parent().unwrap_or(&path);
if !is_project_root(project_root, dir) {
project_entries.push(DepPickEntry {
display: format!(
"[project] {} ({})",
name,
path_relative(project_root, dir)
),
action: DepPickAction::Project {
path: dir.to_path_buf(),
},
});
}
}
for dep in info.deps {
js_deps.insert((dep, path.parent().unwrap_or(&path).to_path_buf()));
}
}
} else if path.file_name().and_then(|n| n.to_str()) == Some("Cargo.toml") {
if let Ok(info) = parse_cargo_toml(&path) {
if let Some(name) = info.name {
let dir = path.parent().unwrap_or(&path);
if !is_project_root(project_root, dir) {
project_entries.push(DepPickEntry {
display: format!(
"[project] {} ({})",
name,
path_relative(project_root, dir)
),
action: DepPickAction::Project {
path: dir.to_path_buf(),
},
});
}
}
for dep in info.deps {
cargo_deps.insert(dep);
}
}
}
}
entries.extend(project_entries);
let cargo_lock = load_cargo_lock(project_root).unwrap_or_default();
for (dep, base_dir) in js_deps {
let repo_url = resolve_js_repo(project_root, &base_dir, &dep);
if let Some(repo_url) = repo_url {
let is_local = local_repo_is_present(root_path, &repo_url);
let label = if is_local { "[linked-js]" } else { "[js]" };
let display = display_repo(&repo_url);
let action = if is_local {
match repos::parse_github_repo(&repo_url) {
Ok(repo_ref) => DepPickAction::RepoOpen {
owner: repo_ref.owner,
repo: repo_ref.repo,
},
Err(_) => DepPickAction::RepoLink { repo: repo_url.clone() },
}
} else {
DepPickAction::RepoLink { repo: repo_url.clone() }
};
entries.push(DepPickEntry {
display: format!("{} {} -> {}", label, dep, display),
action,
});
} else {
entries.push(DepPickEntry {
display: format!("[js] {} (no repo found)", dep),
action: DepPickAction::Message {
text: format!("No repository URL found for {}", dep),
},
});
}
}
for dep in cargo_deps {
let repo_url = resolve_cargo_repo(&cargo_lock, &dep);
if let Some(repo_url) = repo_url {
let is_local = local_repo_is_present(root_path, &repo_url);
let label = if is_local { "[linked-crate]" } else { "[crate]" };
let display = display_repo(&repo_url);
let action = if is_local {
match repos::parse_github_repo(&repo_url) {
Ok(repo_ref) => DepPickAction::RepoOpen {
owner: repo_ref.owner,
repo: repo_ref.repo,
},
Err(_) => DepPickAction::RepoLink { repo: repo_url.clone() },
}
} else {
DepPickAction::RepoLink { repo: repo_url.clone() }
};
entries.push(DepPickEntry {
display: format!("{} {} -> {}", label, dep, display),
action,
});
} else {
entries.push(DepPickEntry {
display: format!("[crate] {} (no repo found)", dep),
action: DepPickAction::Message {
text: format!("No repository URL found for {}", dep),
},
});
}
}
Ok(entries)
}
fn load_repo_manifest(project_root: &Path) -> Result<Option<RepoManifest>> {
let path = project_root.join(".ai").join("repos.toml");
if !path.exists() {
return Ok(None);
}
let contents = std::fs::read_to_string(&path)
.with_context(|| format!("failed to read {}", path.display()))?;
let manifest = toml::from_str::<RepoManifest>(&contents)
.context("failed to parse .ai/repos.toml")?;
Ok(Some(manifest))
}
fn scan_project_files(root: &Path) -> Result<Vec<PathBuf>> {
let mut paths = Vec::new();
let mut builder = WalkBuilder::new(root);
builder.hidden(false);
builder.filter_entry(|entry| {
let name = entry.file_name().to_string_lossy();
match name.as_ref() {
".git" | ".ai" | "node_modules" | "target" | "dist" | "build" | ".next" | ".turbo" => {
return false;
}
_ => {}
}
true
});
for entry in builder.build() {
let entry = match entry {
Ok(entry) => entry,
Err(_) => continue,
};
if !entry.file_type().map(|t| t.is_file()).unwrap_or(false) {
continue;
}
let name = entry.file_name().to_string_lossy();
if name == "package.json" || name == "Cargo.toml" {
paths.push(entry.into_path());
}
}
Ok(paths)
}
struct PackageJsonInfo {
name: Option<String>,
deps: Vec<String>,
}
fn parse_package_json(path: &Path) -> Result<PackageJsonInfo> {
let contents = std::fs::read_to_string(path)
.with_context(|| format!("failed to read {}", path.display()))?;
let value: serde_json::Value = serde_json::from_str(&contents)
.with_context(|| format!("failed to parse {}", path.display()))?;
let name = value.get("name").and_then(|v| v.as_str()).map(|s| s.to_string());
let mut deps = BTreeSet::new();
for key in ["dependencies", "devDependencies", "optionalDependencies", "peerDependencies"] {
if let Some(obj) = value.get(key).and_then(|v| v.as_object()) {
for dep in obj.keys() {
deps.insert(dep.to_string());
}
}
}
Ok(PackageJsonInfo {
name,
deps: deps.into_iter().collect(),
})
}
struct CargoTomlInfo {
name: Option<String>,
deps: Vec<String>,
}
fn parse_cargo_toml(path: &Path) -> Result<CargoTomlInfo> {
let contents = std::fs::read_to_string(path)
.with_context(|| format!("failed to read {}", path.display()))?;
let value: toml::Value = toml::from_str(&contents)
.with_context(|| format!("failed to parse {}", path.display()))?;
let name = value
.get("package")
.and_then(Value::as_table)
.and_then(|pkg| pkg.get("name"))
.and_then(Value::as_str)
.map(|s| s.to_string());
let mut deps = BTreeSet::new();
for key in ["dependencies", "dev-dependencies", "build-dependencies"] {
if let Some(table) = value.get(key).and_then(Value::as_table) {
for dep in table.keys() {
deps.insert(dep.to_string());
}
}
}
Ok(CargoTomlInfo {
name,
deps: deps.into_iter().collect(),
})
}
#[derive(Default)]
struct CargoLockIndex {
versions: BTreeMap<String, String>,
sources: BTreeMap<String, String>,
}
fn load_cargo_lock(project_root: &Path) -> Result<CargoLockIndex> {
let lock_path = project_root.join("Cargo.lock");
if !lock_path.exists() {
return Ok(CargoLockIndex::default());
}
let contents = std::fs::read_to_string(&lock_path)
.with_context(|| format!("failed to read {}", lock_path.display()))?;
let value: toml::Value = toml::from_str(&contents)
.with_context(|| format!("failed to parse {}", lock_path.display()))?;
let mut index = CargoLockIndex::default();
let packages = value
.get("package")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
for pkg in packages {
let table = match pkg.as_table() {
Some(table) => table,
None => continue,
};
let name = match table.get("name").and_then(Value::as_str) {
Some(name) => name.to_string(),
None => continue,
};
if let Some(version) = table.get("version").and_then(Value::as_str) {
index.versions.entry(name.clone()).or_insert_with(|| version.to_string());
}
if let Some(source) = table.get("source").and_then(Value::as_str) {
if source.starts_with("registry+") {
continue;
}
if let Some(url) = normalize_github_url(source) {
index.sources.entry(name).or_insert(url);
}
}
}
Ok(index)
}
fn resolve_js_repo(project_root: &Path, base_dir: &Path, dep: &str) -> Option<String> {
let mut candidates = Vec::new();
if base_dir.join("node_modules").exists() {
candidates.push(base_dir.join("node_modules"));
}
if project_root.join("node_modules").exists() {
candidates.push(project_root.join("node_modules"));
}
for base in candidates {
let dep_path = join_node_modules(&base, dep).join("package.json");
if dep_path.exists() {
if let Ok(contents) = std::fs::read_to_string(&dep_path) {
if let Ok(value) = serde_json::from_str::<serde_json::Value>(&contents) {
if let Some(repo) = extract_repo_url(&value) {
if let Some(url) = normalize_github_url(&repo) {
return Some(url);
}
}
}
}
}
}
None
}
fn resolve_cargo_repo(index: &CargoLockIndex, dep: &str) -> Option<String> {
if let Some(url) = index.sources.get(dep) {
return Some(url.clone());
}
let version = index.versions.get(dep)?;
let cargo_home = cargo_home();
let registry_src = cargo_home.join("registry").join("src");
let entries = std::fs::read_dir(®istry_src).ok()?;
for entry in entries.flatten() {
let candidate = entry
.path()
.join(format!("{}-{}", dep, version))
.join("Cargo.toml");
if candidate.exists() {
if let Ok(contents) = std::fs::read_to_string(&candidate) {
if let Ok(value) = toml::from_str::<toml::Value>(&contents) {
if let Some(repo) = value
.get("package")
.and_then(Value::as_table)
.and_then(|pkg| pkg.get("repository"))
.and_then(Value::as_str)
{
if let Some(url) = normalize_github_url(repo) {
return Some(url);
}
}
}
}
}
}
None
}
fn cargo_home() -> PathBuf {
let raw = std::env::var("CARGO_HOME").unwrap_or_else(|_| "~/.cargo".to_string());
config::expand_path(&raw)
}
fn join_node_modules(base: &Path, dep: &str) -> PathBuf {
if let Some((scope, name)) = dep.split_once('/') {
if scope.starts_with('@') {
return base.join(scope).join(name);
}
}
base.join(dep)
}
fn extract_repo_url(value: &serde_json::Value) -> Option<String> {
match value.get("repository") {
Some(serde_json::Value::String(url)) => Some(url.to_string()),
Some(serde_json::Value::Object(map)) => map
.get("url")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
_ => None,
}
}
fn normalize_github_url(raw: &str) -> Option<String> {
let trimmed = raw.trim().trim_start_matches("git+");
let cleaned = trimmed
.trim_end_matches('/')
.trim_end_matches(".git");
if cleaned.contains("crates.io-index") {
return None;
}
if let Ok(repo_ref) = repos::parse_github_repo(cleaned) {
return Some(format!("https://github.com/{}/{}", repo_ref.owner, repo_ref.repo));
}
None
}
fn display_repo(url: &str) -> String {
if let Ok(repo_ref) = repos::parse_github_repo(url) {
return format!("{}/{}", repo_ref.owner, repo_ref.repo);
}
url.to_string()
}
fn local_repo_is_present(root_path: &Path, url: &str) -> bool {
if let Ok(repo_ref) = repos::parse_github_repo(url) {
if root_path.join(repo_ref.owner).join(repo_ref.repo).exists() {
return true;
}
}
false
}
fn open_in_zed(path: &Path) -> Result<()> {
let status = Command::new("open")
.args(["-a", "/Applications/Zed.app"])
.arg(path)
.status()
.context("failed to launch Zed")?;
if status.success() {
Ok(())
} else {
bail!("failed to open {}", path.display());
}
}
fn path_relative(root: &Path, path: &Path) -> String {
path.strip_prefix(root)
.unwrap_or(path)
.display()
.to_string()
}
fn is_project_root(root: &Path, candidate: &Path) -> bool {
let root = root
.canonicalize()
.unwrap_or_else(|_| root.to_path_buf());
let candidate = candidate
.canonicalize()
.unwrap_or_else(|_| candidate.to_path_buf());
root == candidate
}
fn link_repo_dependency(
project_root: &Path,
repo: &str,
root: &str,
private_origin: bool,
) -> Result<()> {
let ai_dir = project_root.join(".ai");
let repos_dir = ai_dir.join("repos");
std::fs::create_dir_all(&repos_dir)
.with_context(|| format!("failed to create {}", repos_dir.display()))?;
let root_path = repos::normalize_root(root)?;
let repo_ref = if looks_like_repo_ref(repo) {
repos::parse_github_repo(repo)?
} else {
resolve_repo_by_name(&root_path, repo)?
};
let target_dir = root_path.join(&repo_ref.owner).join(&repo_ref.repo);
let mut cloned = false;
if !target_dir.exists() {
let opts = ReposCloneOpts {
url: repo.to_string(),
root: root.to_string(),
full: false,
no_upstream: false,
upstream_url: None,
};
repos::clone_repo(opts)?;
cloned = true;
} else {
println!("✓ found repo at {}", target_dir.display());
}
let origin_url = format!("git@github.com:{}/{}.git", repo_ref.owner, repo_ref.repo);
if cloned {
if let Err(err) = ensure_upstream(&target_dir, &origin_url) {
println!("⚠ upstream setup skipped: {}", err);
}
}
if private_origin {
if let Err(err) = maybe_setup_private_origin(&target_dir, &repo_ref, &origin_url) {
println!("⚠ private origin setup skipped: {}", err);
}
}
let owner_dir = repos_dir.join(&repo_ref.owner);
std::fs::create_dir_all(&owner_dir)
.with_context(|| format!("failed to create {}", owner_dir.display()))?;
let link_path = owner_dir.join(&repo_ref.repo);
if link_path.exists() {
println!("✓ link already exists: {}", link_path.display());
} else {
#[cfg(unix)]
{
std::os::unix::fs::symlink(&target_dir, &link_path)
.with_context(|| format!("failed to link {}", link_path.display()))?;
}
#[cfg(windows)]
{
std::os::windows::fs::symlink_dir(&target_dir, &link_path)
.with_context(|| format!("failed to link {}", link_path.display()))?;
}
println!("✓ linked {}", link_path.display());
}
let manifest_path = ai_dir.join("repos.toml");
upsert_repo_manifest(&manifest_path, root, &repo_ref, repo)?;
Ok(())
}
fn looks_like_repo_ref(input: &str) -> bool {
let trimmed = input.trim();
trimmed.contains("github.com/")
|| trimmed.starts_with("git@github.com:")
|| trimmed.contains('/')
|| trimmed.ends_with(".git")
}
fn resolve_repo_by_name(root: &Path, name: &str) -> Result<repos::RepoRef> {
let mut matches = Vec::new();
let root_entries = std::fs::read_dir(root)
.with_context(|| format!("failed to read {}", root.display()))?;
for owner_entry in root_entries.flatten() {
if !owner_entry.file_type().map(|t| t.is_dir()).unwrap_or(false) {
continue;
}
let owner = owner_entry.file_name().to_string_lossy().to_string();
let repo_path = owner_entry.path().join(name);
if repo_path.is_dir() {
matches.push(repos::RepoRef { owner, repo: name.to_string() });
}
}
if matches.is_empty() {
bail!(
"repo '{}' not found under {}. Use owner/repo or run: f repos clone <url>",
name,
root.display()
);
}
if matches.len() > 1 {
let options = matches
.iter()
.map(|repo| format!("{}/{}", repo.owner, repo.repo))
.collect::<Vec<_>>()
.join(", ");
bail!("multiple matches for '{}': {}. Use owner/repo.", name, options);
}
Ok(matches.remove(0))
}
fn upsert_repo_manifest(path: &Path, root: &str, repo: &repos::RepoRef, url: &str) -> Result<()> {
let mut doc = if path.exists() {
let contents = std::fs::read_to_string(path)
.with_context(|| format!("failed to read {}", path.display()))?;
toml::from_str::<Value>(&contents).unwrap_or(Value::Table(Map::new()))
} else {
Value::Table(Map::new())
};
let table = doc.as_table_mut().ok_or_else(|| anyhow::anyhow!("invalid repos.toml"))?;
table.entry("root".to_string()).or_insert_with(|| Value::String(root.to_string()));
let repos_value = table.entry("repos".to_string()).or_insert_with(|| Value::Array(Vec::new()));
let repos_array = repos_value
.as_array_mut()
.ok_or_else(|| anyhow::anyhow!("invalid repos list"))?;
let exists = repos_array.iter().any(|entry| {
entry.get("owner").and_then(Value::as_str) == Some(repo.owner.as_str())
&& entry.get("repo").and_then(Value::as_str) == Some(repo.repo.as_str())
});
if !exists {
let mut entry = Map::new();
entry.insert("owner".to_string(), Value::String(repo.owner.clone()));
entry.insert("repo".to_string(), Value::String(repo.repo.clone()));
entry.insert("url".to_string(), Value::String(url.to_string()));
repos_array.push(Value::Table(entry));
}
let rendered = toml::to_string_pretty(&doc)?;
std::fs::write(path, rendered).with_context(|| format!("failed to write {}", path.display()))?;
println!("✓ updated {}", path.display());
Ok(())
}
fn maybe_setup_private_origin(
repo_dir: &Path,
repo_ref: &repos::RepoRef,
origin_url: &str,
) -> Result<()> {
if !gh_available() {
return Ok(());
}
if !gh_authenticated()? {
println!("gh not authenticated; skipping private origin setup");
println!("Authenticate with: gh auth login");
return Ok(());
}
let gh_user = gh_username()?;
if gh_user.is_empty() || repo_ref.owner == gh_user {
return Ok(());
}
if !repo_dir.join(".git").exists() {
return Ok(());
}
let origin_remote = git_remote_get(repo_dir, "origin")?;
if let Some(origin_remote) = origin_remote {
if origin_remote.contains(&format!("github.com:{}/", gh_user))
|| origin_remote.contains(&format!("github.com/{}/", gh_user))
{
return Ok(());
}
}
let private_repo = format!("{}/{}", gh_user, repo_ref.repo);
let private_url = format!("git@github.com:{}.git", private_repo);
if !gh_repo_exists(&private_repo)? {
println!("Creating private repo: {}", private_repo);
let status = Command::new("gh")
.args(["repo", "create", &private_repo, "--private"])
.status()
.context("failed to create private repo")?;
if !status.success() {
bail!("failed to create private repo {}", private_repo);
}
}
set_origin_remote(repo_dir, &private_url)?;
let upstream_remote = git_remote_get(repo_dir, "upstream")?;
if upstream_remote.is_none() {
configure_upstream(repo_dir, origin_url)?;
} else if upstream_remote.as_deref() != Some(origin_url) {
println!(
"⚠ upstream already set to {} (expected {})",
upstream_remote.unwrap_or_default(),
origin_url
);
}
println!("✓ origin -> {}", private_repo);
Ok(())
}
fn ensure_upstream(repo_dir: &Path, origin_url: &str) -> Result<()> {
if !repo_dir.join(".git").exists() {
return Ok(());
}
if git_remote_get(repo_dir, "upstream")?.is_some() {
return Ok(());
}
configure_upstream(repo_dir, origin_url)?;
Ok(())
}
fn gh_available() -> bool {
Command::new("gh").arg("--version").output().is_ok()
}
fn gh_authenticated() -> Result<bool> {
let status = Command::new("gh").args(["auth", "status"]).output()?;
Ok(status.status.success())
}
fn gh_username() -> Result<String> {
let output = Command::new("gh")
.args(["api", "user", "-q", ".login"])
.output()
.context("failed to get GitHub username")?;
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
fn gh_repo_exists(full_name: &str) -> Result<bool> {
let output = Command::new("gh")
.args(["repo", "view", full_name])
.output()
.context("failed to check repo")?;
Ok(output.status.success())
}
fn git_remote_get(repo_dir: &Path, name: &str) -> Result<Option<String>> {
let output = Command::new("git")
.args(["remote", "get-url", name])
.current_dir(repo_dir)
.output();
let output = match output {
Ok(output) if output.status.success() => output,
_ => return Ok(None),
};
Ok(Some(String::from_utf8_lossy(&output.stdout).trim().to_string()))
}
fn set_origin_remote(repo_dir: &Path, url: &str) -> Result<()> {
if git_remote_get(repo_dir, "origin")?.is_some() {
Command::new("git")
.args(["remote", "set-url", "origin", url])
.current_dir(repo_dir)
.status()
.context("failed to set origin")?;
} else {
Command::new("git")
.args(["remote", "add", "origin", url])
.current_dir(repo_dir)
.status()
.context("failed to add origin")?;
}
Ok(())
}
fn configure_upstream(repo_dir: &Path, upstream_url: &str) -> Result<()> {
let cwd = std::env::current_dir().context("failed to capture current directory")?;
std::env::set_current_dir(repo_dir)
.with_context(|| format!("failed to enter {}", repo_dir.display()))?;
let result = upstream::setup_upstream_with_depth(Some(upstream_url), None, None);
if let Err(err) = std::env::set_current_dir(&cwd) {
println!("warning: failed to restore working directory: {}", err);
}
result
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/projects.rs | src/projects.rs | use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use rusqlite::{Connection, params};
use serde::{Deserialize, Serialize};
use crate::cli::ActiveOpts;
use crate::{db, running};
/// Single project record.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectEntry {
pub name: String,
pub project_root: PathBuf,
pub config_path: PathBuf,
pub updated_ms: u128,
}
/// Persist the project name -> path mapping. Idempotent.
pub fn register_project(name: &str, config_path: &Path) -> Result<()> {
let canonical_config = config_path
.canonicalize()
.unwrap_or_else(|_| config_path.to_path_buf());
let project_root = config_path
.parent()
.unwrap_or(Path::new("."))
.canonicalize()
.unwrap_or_else(|_| config_path.parent().unwrap_or(Path::new(".")).to_path_buf());
let conn = open_db()?;
create_schema(&conn)?;
conn.execute(
r#"
INSERT INTO projects (name, project_root, config_path, updated_ms)
VALUES (?1, ?2, ?3, ?4)
ON CONFLICT(name) DO UPDATE SET
project_root=excluded.project_root,
config_path=excluded.config_path,
updated_ms=excluded.updated_ms
"#,
params![
name,
project_root.to_string_lossy(),
canonical_config.to_string_lossy(),
running::now_ms() as i64
],
)
.context("failed to upsert project")?;
Ok(())
}
/// Return the most recent entry for a given project name, if present.
pub fn resolve_project(name: &str) -> Result<Option<ProjectEntry>> {
let conn = open_db()?;
create_schema(&conn)?;
let mut stmt = conn.prepare(
"SELECT name, project_root, config_path, updated_ms FROM projects WHERE name = ?1",
)?;
let mut rows = stmt.query([name])?;
if let Some(row) = rows.next()? {
let entry = ProjectEntry {
name: row.get(0)?,
project_root: PathBuf::from(row.get::<_, String>(1)?),
config_path: PathBuf::from(row.get::<_, String>(2)?),
updated_ms: row.get::<_, i64>(3)? as u128,
};
Ok(Some(entry))
} else {
Ok(None)
}
}
/// List all registered projects, ordered by most recently updated.
pub fn list_projects() -> Result<Vec<ProjectEntry>> {
let conn = open_db()?;
create_schema(&conn)?;
let mut stmt = conn.prepare(
"SELECT name, project_root, config_path, updated_ms FROM projects ORDER BY updated_ms DESC",
)?;
let mut rows = stmt.query([])?;
let mut entries = Vec::new();
while let Some(row) = rows.next()? {
entries.push(ProjectEntry {
name: row.get(0)?,
project_root: PathBuf::from(row.get::<_, String>(1)?),
config_path: PathBuf::from(row.get::<_, String>(2)?),
updated_ms: row.get::<_, i64>(3)? as u128,
});
}
Ok(entries)
}
/// Print all registered projects.
pub fn show_projects() -> Result<()> {
let projects = list_projects()?;
if projects.is_empty() {
println!("No registered projects.");
println!("Projects are registered when you run a task in a flow.toml with a 'name' field.");
return Ok(());
}
println!("Registered projects:\n");
for entry in &projects {
let age = format_age(entry.updated_ms);
println!(" {} ({})", entry.name, age);
println!(" {}", entry.project_root.display());
}
Ok(())
}
fn format_age(timestamp_ms: u128) -> String {
let now = running::now_ms();
let elapsed_secs = ((now.saturating_sub(timestamp_ms)) / 1000) as u64;
if elapsed_secs < 60 {
format!("{}s ago", elapsed_secs)
} else if elapsed_secs < 3600 {
format!("{}m ago", elapsed_secs / 60)
} else if elapsed_secs < 86400 {
format!("{}h ago", elapsed_secs / 3600)
} else {
format!("{}d ago", elapsed_secs / 86400)
}
}
fn open_db() -> Result<Connection> {
db::open_db()
}
fn create_schema(conn: &Connection) -> Result<()> {
conn.execute_batch(
r#"
CREATE TABLE IF NOT EXISTS projects (
name TEXT PRIMARY KEY,
project_root TEXT NOT NULL,
config_path TEXT NOT NULL,
updated_ms INTEGER NOT NULL
);
"#,
)
.context("failed to create schema")?;
Ok(())
}
// ============================================================================
// Active Project
// ============================================================================
fn active_project_path() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow/active_project")
}
/// Set the active project name.
pub fn set_active_project(name: &str) -> Result<()> {
let path = active_project_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).context("failed to create config dir")?;
}
fs::write(&path, name).context("failed to write active project")?;
Ok(())
}
/// Get the current active project name, if set.
pub fn get_active_project() -> Option<String> {
let path = active_project_path();
fs::read_to_string(&path)
.ok()
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
}
/// Clear the active project.
pub fn clear_active_project() -> Result<()> {
let path = active_project_path();
if path.exists() {
fs::remove_file(&path).context("failed to remove active project")?;
}
Ok(())
}
/// Handle the `f active` command.
pub fn handle_active(opts: ActiveOpts) -> Result<()> {
if opts.clear {
clear_active_project()?;
println!("Active project cleared.");
return Ok(());
}
if let Some(name) = opts.project {
// Verify project exists
if resolve_project(&name)?.is_none() {
anyhow::bail!(
"Project '{}' not found. Use `f projects` to see registered projects.",
name
);
}
set_active_project(&name)?;
println!("Active project set to: {}", name);
return Ok(());
}
// Show current active project
match get_active_project() {
Some(name) => println!("{}", name),
None => println!("No active project set."),
}
Ok(())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/history.rs | src/history.rs | use std::{
fs::{self, OpenOptions},
io::Write,
path::{Path, PathBuf},
time::{SystemTime, UNIX_EPOCH},
};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct InvocationRecord {
pub timestamp_ms: u128,
pub duration_ms: u128,
pub project_root: String,
#[serde(default)]
pub project_name: Option<String>,
pub config_path: String,
pub task_name: String,
pub command: String,
#[serde(default)]
pub user_input: String,
pub status: Option<i32>,
pub success: bool,
pub used_flox: bool,
pub output: String,
pub flow_version: String,
}
impl InvocationRecord {
pub fn new(
project_root: impl Into<String>,
config_path: impl Into<String>,
project_name: Option<&str>,
task_name: impl Into<String>,
command: impl Into<String>,
user_input: impl Into<String>,
used_flox: bool,
) -> Self {
Self {
timestamp_ms: now_ms(),
duration_ms: 0,
project_root: project_root.into(),
project_name: project_name.map(|s| s.to_string()),
config_path: config_path.into(),
task_name: task_name.into(),
command: command.into(),
user_input: user_input.into(),
status: None,
success: false,
used_flox,
output: String::new(),
flow_version: env!("CARGO_PKG_VERSION").to_string(),
}
}
}
pub fn record(invocation: InvocationRecord) -> Result<()> {
let path = history_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create history dir {}", parent.display()))?;
}
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.with_context(|| format!("failed to open history file {}", path.display()))?;
let line = serde_json::to_string(&invocation).context("failed to serialize invocation")?;
writeln!(file, "{line}").context("failed to write invocation to history")?;
Ok(())
}
/// Print the most recent invocation with only the user input and the resulting output or error.
pub fn print_last_record() -> Result<()> {
let path = history_path();
let record = load_last_record(&path)?;
let Some(rec) = record else {
if path.exists() {
println!("No valid history entries found in {}", path.display());
} else {
println!("No history found at {}", path.display());
}
return Ok(());
};
let user_input = if rec.user_input.trim().is_empty() {
rec.task_name.clone()
} else {
rec.user_input.clone()
};
println!("{user_input}");
if rec.output.trim().is_empty() {
if !rec.success {
let status = rec
.status
.map(|s| s.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!("error (status: {status})");
}
} else {
print!("{}", rec.output);
if !rec.output.ends_with('\n') {
println!();
}
}
Ok(())
}
/// Print the most recent invocation with output and status.
pub fn print_last_record_full() -> Result<()> {
let path = history_path();
let record = load_last_record(&path)?;
let Some(rec) = record else {
if path.exists() {
println!("No valid history entries found in {}", path.display());
} else {
println!("No history found at {}", path.display());
}
return Ok(());
};
println!("task: {}", rec.task_name);
println!("command: {}", rec.command);
println!("project: {}", rec.project_root);
if let Some(name) = rec.project_name.as_deref() {
println!("project_name: {name}");
}
println!("config: {}", rec.config_path);
println!(
"status: {} (code: {})",
if rec.success { "success" } else { "failure" },
rec.status
.map(|s| s.to_string())
.unwrap_or_else(|| "unknown".to_string())
);
println!("duration_ms: {}", rec.duration_ms);
println!("flow_version: {}", rec.flow_version);
println!("--- output ---");
print!("{}", rec.output);
Ok(())
}
fn load_last_record(path: &Path) -> Result<Option<InvocationRecord>> {
if !path.exists() {
return Ok(None);
}
let contents = fs::read_to_string(path)
.with_context(|| format!("failed to read history at {}", path.display()))?;
for line in contents.lines().rev() {
if line.trim().is_empty() {
continue;
}
if let Ok(rec) = serde_json::from_str::<InvocationRecord>(line) {
return Ok(Some(rec));
}
}
Ok(None)
}
/// Load the last invocation record for a specific project root.
pub fn load_last_record_for_project(project_root: &Path) -> Result<Option<InvocationRecord>> {
let path = history_path();
if !path.exists() {
return Ok(None);
}
let canonical_root = project_root
.canonicalize()
.unwrap_or_else(|_| project_root.to_path_buf());
let canonical_str = canonical_root.to_string_lossy();
let contents = fs::read_to_string(&path)
.with_context(|| format!("failed to read history at {}", path.display()))?;
for line in contents.lines().rev() {
if line.trim().is_empty() {
continue;
}
if let Ok(rec) = serde_json::from_str::<InvocationRecord>(line) {
if rec.project_root == canonical_str {
return Ok(Some(rec));
}
}
}
Ok(None)
}
fn history_path() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config")
.join("flow")
.join("history.jsonl")
}
fn now_ms() -> u128 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/terminal.rs | src/terminal.rs | use std::{
env, fs,
path::{Path, PathBuf},
process::Command,
};
use anyhow::{Context, Result, bail};
use which::which;
use crate::config::OptionsConfig;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
const LOG_DIR_SUFFIX: &str = ".flow/tmux-logs";
const META_DIR_SUFFIX: &str = ".flow/tty-meta";
const SCRIPT_PATH_SUFFIX: &str = ".config/flow/tmux-enable-tracing.sh";
const FISH_CONF_SUFFIX: &str = ".config/fish/conf.d/flow-trace.fish";
pub fn maybe_enable_terminal_tracing(options: &OptionsConfig) {
if !options.trace_terminal_io {
return;
}
if let Err(err) = enforce_tmux_logging() {
tracing::warn!(?err, "failed to enable tmux-based terminal tracing");
}
if let Err(err) = install_fish_hooks() {
tracing::warn!(?err, "failed to install fish tracing hooks");
}
}
fn enforce_tmux_logging() -> Result<()> {
if which("tmux").is_err() {
tracing::info!("tmux not found on PATH; skipping terminal IO tracing");
return Ok(());
}
let home = home_dir();
let log_dir = home.join(LOG_DIR_SUFFIX);
fs::create_dir_all(&log_dir)
.with_context(|| format!("failed to create tmux log dir {}", log_dir.display()))?;
let script_path = home.join(SCRIPT_PATH_SUFFIX);
write_enable_script(&script_path, &log_dir)?;
run_tmux(&["start-server"], "start tmux server for tracing")?;
install_hooks(&script_path)?;
prime_existing_panes(&script_path)?;
tracing::info!(dir = %log_dir.display(), "tmux terminal tracing enabled");
Ok(())
}
fn install_fish_hooks() -> Result<()> {
if which("fish").is_err() {
tracing::debug!("fish not found on PATH; skipping fish hook installation");
return Ok(());
}
let home = home_dir();
let meta_dir = home.join(META_DIR_SUFFIX);
fs::create_dir_all(&meta_dir)
.with_context(|| format!("failed to create fish meta dir {}", meta_dir.display()))?;
let conf_path = home.join(FISH_CONF_SUFFIX);
write_fish_conf(&conf_path, &meta_dir)?;
Ok(())
}
fn install_hooks(script_path: &Path) -> Result<()> {
let script_cmd = format!("run-shell {}", sh_quote(script_path));
for hook in ["pane-add", "client-session-changed", "session-created"] {
run_tmux(
&["set-hook", "-g", hook, &script_cmd],
"install tmux tracing hook",
)?;
}
Ok(())
}
fn prime_existing_panes(script_path: &Path) -> Result<()> {
let output = Command::new("tmux")
.args(["list-panes", "-a", "-F", "#{pane_id}"])
.output();
let output = match output {
Ok(out) if out.status.success() => out,
Ok(_) => return Ok(()), // No panes yet; hooks will handle future ones.
Err(err) => {
tracing::warn!(?err, "unable to list tmux panes for tracing bootstrap");
return Ok(());
}
};
let script_cmd = sh_quote(script_path);
for pane in String::from_utf8_lossy(&output.stdout).lines() {
let pane = pane.trim();
if pane.is_empty() {
continue;
}
let run_shell_cmd = format!("{script_cmd} {pane}");
run_tmux(&["run-shell", &run_shell_cmd], "prime tmux pane tracing")?;
}
Ok(())
}
fn write_enable_script(script_path: &Path, log_dir: &Path) -> Result<()> {
if let Some(parent) = script_path.parent() {
fs::create_dir_all(parent).with_context(|| {
format!(
"failed to create directory for tmux tracing script {}",
parent.display()
)
})?;
}
let contents = format!(
r#"#!/bin/sh
set -e
LOG_DIR={log_dir}
mkdir -p "$LOG_DIR"
TARGET="${{1:-!}}"
tmux pipe-pane -o -t "$TARGET" "cat >>${{LOG_DIR}}/pane-#{{session_name}}-#{{window_index}}-#{{pane_index}}.log"
"#,
log_dir = sh_quote(log_dir)
);
fs::write(script_path, contents).with_context(|| {
format!(
"failed to write tmux tracing helper to {}",
script_path.display()
)
})?;
#[cfg(unix)]
fs::set_permissions(script_path, fs::Permissions::from_mode(0o755)).with_context(|| {
format!(
"failed to mark tmux tracing script executable at {}",
script_path.display()
)
})?;
Ok(())
}
fn write_fish_conf(conf_path: &Path, meta_dir: &Path) -> Result<()> {
const CONTENTS: &str = r#"if status --is-interactive
if not set -q TMUX
if not set -q FLOW_SKIP_AUTO_TMUX
if type -q tmux
set -l __flow_trace_tmux_session "flow"
if set -q FLOW_AUTO_TMUX_SESSION
set __flow_trace_tmux_session $FLOW_AUTO_TMUX_SESSION
end
exec tmux new-session -A -s $__flow_trace_tmux_session
end
end
end
end
set -g __flow_trace_meta_dir "%META_DIR%"
mkdir -p $__flow_trace_meta_dir
function __flow_trace_preexec --on-event fish_preexec
set -l id (uuidgen)
set -gx FLOW_CMD_ID $id
set -l ts (date -Ins)
set -l cmd (string join ' ' $argv)
set -l pane (set -q TMUX_PANE; and echo $TMUX_PANE; or echo "nopane")
set -l cwd (pwd)
set -l cwd_b64 (printf "%s" $cwd | base64)
set -l cmd_b64 (printf "%s" $cmd | base64)
printf "\e]133;A;flow-cmd-start;%s\a" $id
printf "start %s %s %s %s\n" $ts $id $cwd_b64 $cmd_b64 >> $__flow_trace_meta_dir/$pane.log
end
function __flow_trace_postexec --on-event fish_postexec
set -l ts (date -Ins)
set -l pane (set -q TMUX_PANE; and echo $TMUX_PANE; or echo "nopane")
printf "\e]133;B;flow-cmd-end;%s;%s\a" $FLOW_CMD_ID $status
printf "end %s %s %s\n" $ts $FLOW_CMD_ID $status >> $__flow_trace_meta_dir/$pane.log
end
"#;
let rendered = CONTENTS.replace("%META_DIR%", &meta_dir.to_string_lossy());
if let Some(parent) = conf_path.parent() {
fs::create_dir_all(parent).with_context(|| {
format!(
"failed to create directory for fish tracing conf {}",
parent.display()
)
})?;
}
// Avoid rewriting if unchanged to keep user shells happy.
if let Ok(existing) = fs::read_to_string(conf_path) {
if existing == rendered {
return Ok(());
}
}
fs::write(conf_path, rendered).with_context(|| {
format!(
"failed to write fish tracing hooks to {}",
conf_path.display()
)
})
}
fn run_tmux(args: &[&str], context: &str) -> Result<()> {
let status = Command::new("tmux")
.args(args)
.status()
.with_context(|| format!("failed to execute tmux to {context}"))?;
if status.success() {
Ok(())
} else {
bail!(
"tmux exited with status {} while attempting to {context}",
status.code().unwrap_or(-1)
);
}
}
fn sh_quote(path: &Path) -> String {
let value = path.to_string_lossy();
let escaped = value.replace('\'', r"'\''");
format!("'{escaped}'")
}
fn home_dir() -> PathBuf {
env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/flox.rs | src/flox.rs | use std::{
collections::BTreeMap,
fs,
path::{Path, PathBuf},
process::{Command, Stdio},
};
use anyhow::{Context, Result, bail};
use serde::Serialize;
use crate::config::FloxInstallSpec;
const MANIFEST_VERSION: u8 = 1;
const ENV_VERSION: u8 = 1;
/// Paths needed to invoke `flox activate` for a generated manifest.
#[derive(Clone, Debug)]
pub struct FloxEnv {
pub project_root: PathBuf,
pub manifest_path: PathBuf,
pub lockfile_path: PathBuf,
}
#[derive(Serialize)]
struct ManifestFile {
version: u8,
install: BTreeMap<String, FloxInstallSpec>,
}
#[derive(Serialize)]
struct EnvJson {
version: u8,
manifest: String,
lockfile: String,
}
/// Ensure a flox manifest exists for the given packages and return the paths to use.
pub fn ensure_env(project_root: &Path, packages: &[(String, FloxInstallSpec)]) -> Result<FloxEnv> {
if packages.is_empty() {
bail!("flox environment requested without any packages");
}
let flox_bin = which::which("flox")
.context("flox is required to use [deps]; install flox and ensure it is on PATH")?;
let env_dir = project_root.join(".flox").join("env");
let manifest_path = env_dir.join("manifest.toml");
let lockfile_path = env_dir.join("manifest.lock");
fs::create_dir_all(&env_dir)
.with_context(|| format!("failed to create flox env directory {}", env_dir.display()))?;
let manifest_toml = render_manifest(packages)?;
let manifest_changed = write_if_changed(&manifest_path, &manifest_toml)?;
// Produce a lockfile so flox activations don't need to mutate state.
if manifest_changed || !lockfile_path.exists() {
let output = Command::new(&flox_bin)
.arg("lock-manifest")
.arg(&manifest_path)
.output()
.with_context(|| "failed to run 'flox lock-manifest'")?;
if output.status.success() {
write_if_changed(
&lockfile_path,
String::from_utf8_lossy(&output.stdout).as_ref(),
)?;
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
bail!("flox lock-manifest failed: {}", stderr.trim());
}
}
write_env_json(project_root, &manifest_path, &lockfile_path)?;
Ok(FloxEnv {
project_root: project_root.to_path_buf(),
manifest_path,
lockfile_path,
})
}
/// Run a shell command inside the prepared flox environment.
pub fn run_in_env(env: &FloxEnv, workdir: &Path, command: &str) -> Result<()> {
write_env_json(&env.project_root, &env.manifest_path, &env.lockfile_path)?;
let flox_bin = which::which("flox").context("flox is required to run tasks with flox deps")?;
let status = Command::new(&flox_bin)
.arg("activate")
.arg("-d")
.arg(&env.project_root)
.arg("--")
.arg("/bin/sh")
.arg("-c")
.arg(command)
.current_dir(workdir)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.status()
.with_context(|| "failed to spawn flox activate for task")?;
if status.success() {
return Ok(());
}
tracing::debug!(
status = ?status.code(),
"flox activate failed; running task with host PATH"
);
run_on_host(workdir, command)
}
fn write_env_json(project_root: &Path, manifest_path: &Path, lockfile_path: &Path) -> Result<()> {
let flox_root = project_root.join(".flox");
let top_level = flox_root.join("env.json");
let nested = flox_root.join("env").join("env.json");
let nested_json = EnvJson {
version: ENV_VERSION,
manifest: manifest_path.to_string_lossy().to_string(),
lockfile: lockfile_path.to_string_lossy().to_string(),
};
// top-level env.json with relative paths for flox CLI expectations
let top_level_json = EnvJson {
version: ENV_VERSION,
manifest: "env/manifest.toml".to_string(),
lockfile: "env/manifest.lock".to_string(),
};
if let Some(parent) = top_level.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
if let Some(parent) = nested.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create {}", parent.display()))?;
}
let top_level_contents = serde_json::to_string_pretty(&top_level_json)
.context("failed to render top-level env.json")?;
let nested_contents =
serde_json::to_string_pretty(&nested_json).context("failed to render nested env.json")?;
write_if_changed(&top_level, &top_level_contents)?;
write_if_changed(&nested, &nested_contents)?;
Ok(())
}
fn run_on_host(workdir: &Path, command: &str) -> Result<()> {
let host_status = Command::new("/bin/sh")
.arg("-c")
.arg(command)
.current_dir(workdir)
.status()
.with_context(|| "failed to spawn command without managed env")?;
if host_status.success() {
Ok(())
} else {
bail!(
"command exited with status {}",
host_status.code().unwrap_or(-1)
);
}
}
fn render_manifest(packages: &[(String, FloxInstallSpec)]) -> Result<String> {
let mut install = BTreeMap::new();
for (name, spec) in packages {
install.insert(name.clone(), spec.clone());
}
let manifest = ManifestFile {
version: MANIFEST_VERSION,
install,
};
toml::to_string_pretty(&manifest).context("failed to render flox manifest")
}
fn write_if_changed(path: &Path, contents: &str) -> Result<bool> {
let needs_write = fs::read_to_string(path).map_or(true, |existing| existing != contents);
if needs_write {
fs::write(path, contents).with_context(|| format!("failed to write {}", path.display()))?;
}
Ok(needs_write)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn manifest_renders_with_full_descriptor() {
let deps = vec![(
"ripgrep".to_string(),
FloxInstallSpec {
pkg_path: "ripgrep".into(),
pkg_group: Some("tools".into()),
version: Some("14".into()),
systems: Some(vec!["x86_64-darwin".into()]),
priority: Some(10),
},
)];
let rendered = render_manifest(&deps).expect("render manifest");
assert!(rendered.contains("version = 1"));
assert!(rendered.contains("[install.ripgrep]"));
assert!(rendered.contains(r#"pkg-path = "ripgrep""#));
assert!(rendered.contains(r#"pkg-group = "tools""#));
assert!(rendered.contains(r#"version = "14""#));
assert!(rendered.contains(r#"priority = 10"#));
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/upstream.rs | src/upstream.rs | //! Upstream fork management.
//!
//! Provides automated workflows for managing forks with upstream repositories.
//! - `f upstream setup` - Configure upstream remote and local tracking branch
//! - `f upstream pull` - Pull changes from upstream into local branch
//! - `f upstream sync` - Full sync: pull upstream, merge to dev, merge to main, push
use std::process::{Command, Stdio};
use anyhow::{Context, Result, bail};
use crate::cli::{UpstreamAction, UpstreamCommand};
/// Run the upstream subcommand.
pub fn run(cmd: UpstreamCommand) -> Result<()> {
let action = cmd.action.unwrap_or(UpstreamAction::Status);
match action {
UpstreamAction::Status => show_status(),
UpstreamAction::Setup { upstream_url, upstream_branch } => {
setup_upstream(upstream_url.as_deref(), upstream_branch.as_deref())
}
UpstreamAction::Pull { branch } => pull_upstream(branch.as_deref()),
UpstreamAction::Sync { no_push } => sync_upstream(!no_push),
}
}
/// Set up upstream remote and local tracking branch, with optional fetch depth.
pub fn setup_upstream_with_depth(
upstream_url: Option<&str>,
upstream_branch: Option<&str>,
depth: Option<u32>,
) -> Result<()> {
setup_upstream_internal(upstream_url, upstream_branch, depth)
}
/// Show current upstream configuration status.
fn show_status() -> Result<()> {
println!("Upstream Fork Status\n");
// Check for upstream remote
let upstream_url = git_capture(&["remote", "get-url", "upstream"]).ok();
let origin_url = git_capture(&["remote", "get-url", "origin"]).ok();
if let Some(url) = &upstream_url {
println!("✓ upstream remote: {}", url.trim());
} else {
println!("✗ upstream remote: not configured");
}
if let Some(url) = &origin_url {
println!("✓ origin remote: {}", url.trim());
}
// Check for local upstream branch
let has_upstream_branch = git_capture(&["rev-parse", "--verify", "refs/heads/upstream"]).is_ok();
if has_upstream_branch {
let tracking = git_capture(&["config", "--get", "branch.upstream.remote"])
.ok()
.map(|s| s.trim().to_string());
println!("✓ local 'upstream' branch: exists (tracks {:?})", tracking);
} else {
println!("✗ local 'upstream' branch: not created");
}
// Current branch
let current = git_capture(&["rev-parse", "--abbrev-ref", "HEAD"])
.ok()
.map(|s| s.trim().to_string())
.unwrap_or_else(|| "unknown".to_string());
println!("\nCurrent branch: {}", current);
// Show divergence if upstream exists
if upstream_url.is_some() {
println!("\nTo set up: f upstream setup");
println!("To pull: f upstream pull");
println!("To sync: f upstream sync");
} else {
println!("\nTo set up upstream:");
println!(" f upstream setup --url <upstream-repo-url>");
println!(" f upstream setup --url https://github.com/original/repo");
}
Ok(())
}
/// Set up upstream remote and local tracking branch.
fn setup_upstream(upstream_url: Option<&str>, upstream_branch: Option<&str>) -> Result<()> {
setup_upstream_internal(upstream_url, upstream_branch, None)
}
fn setup_upstream_internal(
upstream_url: Option<&str>,
upstream_branch: Option<&str>,
depth: Option<u32>,
) -> Result<()> {
// Check if upstream remote exists
let has_upstream = git_capture(&["remote", "get-url", "upstream"]).is_ok();
if !has_upstream {
if let Some(url) = upstream_url {
println!("Adding upstream remote: {}", url);
git_run(&["remote", "add", "upstream", url])?;
} else {
// Try to detect from origin
if let Ok(origin_url) = git_capture(&["remote", "get-url", "origin"]) {
println!("No upstream remote configured.");
println!("Current origin: {}", origin_url.trim());
println!("\nTo add upstream, run:");
println!(" f upstream setup --url <original-repo-url>");
return Ok(());
}
bail!("No upstream remote. Use: f upstream setup --url <upstream-repo-url>");
}
} else {
let url = git_capture(&["remote", "get-url", "upstream"])?;
println!("✓ upstream remote exists: {}", url.trim());
}
// Fetch upstream
println!("\nFetching upstream...");
if let Some(depth) = depth {
let depth_str = depth.to_string();
git_run(&["fetch", "upstream", "--prune", "--depth", &depth_str])?;
} else {
git_run(&["fetch", "upstream", "--prune"])?;
}
// Determine upstream branch (explicit > HEAD > main > master)
let upstream_branch = if let Some(branch) = upstream_branch {
branch.to_string()
} else if let Ok(head_ref) = git_capture(&["symbolic-ref", "refs/remotes/upstream/HEAD"]) {
head_ref.trim().replace("refs/remotes/upstream/", "")
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/main"]).is_ok() {
"main".to_string()
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/master"]).is_ok() {
"master".to_string()
} else {
// List available branches
let branches = git_capture(&["branch", "-r", "--list", "upstream/*"])?;
println!("Cannot auto-detect upstream branch.");
println!("Available upstream branches:");
for line in branches.lines() {
println!(" {}", line.trim());
}
bail!("Specify branch with: f upstream setup --branch <branch-name>");
};
// Check if upstream branch exists on remote
let remote_ref = format!("refs/remotes/upstream/{}", upstream_branch);
if git_capture(&["rev-parse", "--verify", &remote_ref]).is_err() {
let branches = git_capture(&["branch", "-r", "--list", "upstream/*"])?;
println!("Branch 'upstream/{}' not found.", upstream_branch);
println!("Available upstream branches:");
for line in branches.lines() {
println!(" {}", line.trim());
}
bail!("Specify branch with: f upstream setup --branch <branch-name>");
}
// Create or update local upstream branch
let local_upstream_exists = git_capture(&["rev-parse", "--verify", "refs/heads/upstream"]).is_ok();
let upstream_ref = format!("upstream/{}", upstream_branch);
if local_upstream_exists {
println!("Updating local 'upstream' branch to match {}...", upstream_ref);
let current = git_capture(&["rev-parse", "--abbrev-ref", "HEAD"])?;
let current = current.trim();
if current == "upstream" {
// Already on upstream, just reset
git_run(&["reset", "--hard", &upstream_ref])?;
} else {
// Update without switching
git_run(&["branch", "-f", "upstream", &upstream_ref])?;
}
} else {
println!("Creating local 'upstream' branch tracking {}...", upstream_ref);
git_run(&["branch", "upstream", &upstream_ref])?;
}
// Set up tracking
git_run(&["config", "branch.upstream.remote", "upstream"])?;
git_run(&["config", "branch.upstream.merge", &format!("refs/heads/{}", upstream_branch)])?;
println!("\n✓ Upstream setup complete!");
println!("\nWorkflow:");
println!(" 1. f upstream pull - Pull latest from upstream into 'upstream' branch");
println!(" 2. f upstream sync - Pull, merge to dev/main, and push");
println!("\nThe local 'upstream' branch is a clean snapshot of the original repo.");
println!("Your changes stay on dev/main, making merges cleaner.");
Ok(())
}
/// Pull changes from upstream into the local upstream branch.
fn pull_upstream(target_branch: Option<&str>) -> Result<()> {
// Check upstream remote exists
if git_capture(&["remote", "get-url", "upstream"]).is_err() {
bail!("No upstream remote. Run: f upstream setup --url <url>");
}
// Fetch upstream
println!("Fetching upstream...");
git_run(&["fetch", "upstream", "--prune"])?;
// Determine the upstream branch to track (check config, then HEAD, then try main/master)
let upstream_branch = if let Ok(merge_ref) = git_capture(&["config", "--get", "branch.upstream.merge"]) {
merge_ref.trim().replace("refs/heads/", "")
} else if let Ok(head_ref) = git_capture(&["symbolic-ref", "refs/remotes/upstream/HEAD"]) {
// Parse "refs/remotes/upstream/master" -> "master"
head_ref.trim().replace("refs/remotes/upstream/", "")
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/main"]).is_ok() {
"main".to_string()
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/master"]).is_ok() {
"master".to_string()
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/dev"]).is_ok() {
"dev".to_string()
} else {
bail!("Cannot determine upstream branch. Run: f upstream setup --branch <branch>");
};
let upstream_ref = format!("upstream/{}", upstream_branch);
// Update local upstream branch
let current = git_capture(&["rev-parse", "--abbrev-ref", "HEAD"])?;
let current = current.trim();
// Check for uncommitted changes and stash if needed
let mut stashed = false;
let stash_count_before = git_capture(&["stash", "list"])
.map(|s| s.lines().count())
.unwrap_or(0);
let status = git_capture(&["status", "--porcelain"])?;
if !status.trim().is_empty() {
println!("Stashing local changes...");
let _ = git_run(&["stash", "push", "-m", "upstream-pull auto-stash"]);
// Check if stash actually added an entry
let stash_count_after = git_capture(&["stash", "list"])
.map(|s| s.lines().count())
.unwrap_or(0);
stashed = stash_count_after > stash_count_before;
}
// Update local upstream branch
let local_upstream_exists = git_capture(&["rev-parse", "--verify", "refs/heads/upstream"]).is_ok();
if local_upstream_exists {
if current == "upstream" {
git_run(&["reset", "--hard", &upstream_ref])?;
} else {
git_run(&["branch", "-f", "upstream", &upstream_ref])?;
}
println!("✓ Updated local 'upstream' branch to {}", upstream_ref);
} else {
git_run(&["branch", "upstream", &upstream_ref])?;
println!("✓ Created local 'upstream' branch from {}", upstream_ref);
}
// Optionally merge into target branch
if let Some(target) = target_branch {
println!("\nMerging upstream into {}...", target);
if current != target {
git_run(&["checkout", target])?;
}
if git_run(&["merge", "--ff-only", "upstream"]).is_err() {
println!("Fast-forward failed, trying regular merge...");
if let Err(e) = git_run(&["merge", "upstream", "--no-edit"]) {
if stashed {
println!("Your changes are stashed. Run 'git stash pop' after resolving.");
}
return Err(e);
}
}
println!("✓ Merged upstream into {}", target);
// Return to original branch if different
if current != target && current != "upstream" {
git_run(&["checkout", current])?;
}
}
// Restore stashed changes
if stashed {
println!("Restoring stashed changes...");
git_run(&["stash", "pop"])?;
}
// Show what changed
let behind = git_capture(&["rev-list", "--count", &format!("HEAD..{}", upstream_ref)])
.ok()
.and_then(|s| s.trim().parse::<u32>().ok())
.unwrap_or(0);
if behind > 0 {
println!("\nYour branch is {} commit(s) behind upstream.", behind);
println!("Run 'f upstream sync' to merge and push.");
} else {
println!("\n✓ Up to date with upstream!");
}
Ok(())
}
/// Full sync: pull upstream, merge to dev, merge to main, push.
fn sync_upstream(push: bool) -> Result<()> {
// Check upstream remote exists
if git_capture(&["remote", "get-url", "upstream"]).is_err() {
bail!("No upstream remote. Run: f upstream setup --url <url>");
}
let current = git_capture(&["rev-parse", "--abbrev-ref", "HEAD"])?;
let current = current.trim().to_string();
// Check for uncommitted changes and stash if needed
let mut stashed = false;
let stash_count_before = git_capture(&["stash", "list"])
.map(|s| s.lines().count())
.unwrap_or(0);
let status = git_capture(&["status", "--porcelain"])?;
if !status.trim().is_empty() {
println!("Stashing local changes...");
let _ = git_run(&["stash", "push", "-m", "upstream-sync auto-stash"]);
// Check if stash actually added an entry
let stash_count_after = git_capture(&["stash", "list"])
.map(|s| s.lines().count())
.unwrap_or(0);
stashed = stash_count_after > stash_count_before;
}
// Fetch upstream
println!("==> Fetching upstream...");
git_run(&["fetch", "upstream", "--prune"])?;
// Determine upstream branch (check config, then HEAD, then try main/master)
let upstream_branch = if let Ok(merge_ref) = git_capture(&["config", "--get", "branch.upstream.merge"]) {
merge_ref.trim().replace("refs/heads/", "")
} else if let Ok(head_ref) = git_capture(&["symbolic-ref", "refs/remotes/upstream/HEAD"]) {
// Parse "refs/remotes/upstream/master" -> "master"
head_ref.trim().replace("refs/remotes/upstream/", "")
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/main"]).is_ok() {
"main".to_string()
} else if git_capture(&["rev-parse", "--verify", "refs/remotes/upstream/master"]).is_ok() {
"master".to_string()
} else {
"main".to_string()
};
let upstream_ref = format!("upstream/{}", upstream_branch);
// Update local upstream branch
println!("==> Updating local 'upstream' branch...");
let local_upstream_exists = git_capture(&["rev-parse", "--verify", "refs/heads/upstream"]).is_ok();
if local_upstream_exists {
git_run(&["branch", "-f", "upstream", &upstream_ref])?;
} else {
git_run(&["branch", "upstream", &upstream_ref])?;
}
// Detect branch structure (dev+main or just main)
let has_dev = git_capture(&["rev-parse", "--verify", "refs/heads/dev"]).is_ok();
let has_main = git_capture(&["rev-parse", "--verify", "refs/heads/main"]).is_ok();
if has_dev {
// Merge upstream -> dev -> main
println!("==> Merging upstream into dev...");
git_run(&["checkout", "dev"])?;
merge_branch("upstream", "dev")?;
if has_main {
println!("==> Merging dev into main...");
git_run(&["checkout", "main"])?;
merge_branch("dev", "main")?;
}
} else if has_main {
// Just merge upstream -> main
println!("==> Merging upstream into main...");
git_run(&["checkout", "main"])?;
merge_branch("upstream", "main")?;
} else {
// Merge into current branch
println!("==> Merging upstream into {}...", current);
git_run(&["checkout", ¤t])?;
merge_branch("upstream", ¤t)?;
}
// Push if requested
if push {
println!("==> Pushing to origin...");
// Try push, auto-create repo if it doesn't exist
let branches_to_push: Vec<&str> = if has_dev && has_main {
vec!["dev", "main"]
} else if has_main {
vec!["main"]
} else if has_dev {
vec!["dev"]
} else {
vec![current.as_str()]
};
for branch in &branches_to_push {
if let Err(e) = git_run(&["push", "origin", branch]) {
// Check if it's a repo-not-found error
if try_create_origin_repo()? {
// Repo created, retry push
git_run(&["push", "-u", "origin", branch])?;
} else {
return Err(e);
}
}
}
}
// Return to original branch
if current != "main" && current != "dev" {
git_run(&["checkout", ¤t])?;
}
// Restore stashed changes
if stashed {
println!("Restoring stashed changes...");
git_run(&["stash", "pop"])?;
}
println!("\n✓ Sync complete!");
if has_dev && has_main {
println!(" upstream, dev, and main are updated.");
} else if has_main {
println!(" upstream and main are updated.");
}
Ok(())
}
/// Merge a source branch into the current branch.
fn merge_branch(source: &str, target: &str) -> Result<()> {
// Try fast-forward first
if git_run(&["merge", "--ff-only", source]).is_ok() {
return Ok(());
}
println!("Fast-forward failed, trying regular merge...");
if let Err(_) = git_run(&["merge", source, "--no-edit"]) {
bail!(
"Merge conflicts in {}. Resolve manually:\n git status\n # fix conflicts\n git add . && git commit",
target
);
}
Ok(())
}
/// Run a git command and capture stdout.
fn git_capture(args: &[&str]) -> Result<String> {
let output = Command::new("git")
.args(args)
.output()
.context("failed to run git")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
bail!("git {} failed: {}", args.join(" "), stderr.trim());
}
Ok(String::from_utf8_lossy(&output.stdout).to_string())
}
/// Run a git command with inherited stdio.
fn git_run(args: &[&str]) -> Result<()> {
let status = Command::new("git")
.args(args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status()
.context("failed to run git")?;
if !status.success() {
bail!("git {} failed", args.join(" "));
}
Ok(())
}
/// Try to create the origin repo on GitHub if it doesn't exist.
/// Returns true if repo was created, false if it already exists or creation failed.
fn try_create_origin_repo() -> Result<bool> {
// Get origin URL to extract repo name
let origin_url = match git_capture(&["remote", "get-url", "origin"]) {
Ok(url) => url.trim().to_string(),
Err(_) => return Ok(false),
};
// Extract repo name from URL (supports both SSH and HTTPS formats)
// SSH: git@github.com:user/repo.git
// HTTPS: https://github.com/user/repo.git
let repo_path = if origin_url.starts_with("git@github.com:") {
origin_url
.strip_prefix("git@github.com:")
.and_then(|s| s.strip_suffix(".git").or(Some(s)))
} else if origin_url.contains("github.com/") {
origin_url
.split("github.com/")
.nth(1)
.and_then(|s| s.strip_suffix(".git").or(Some(s)))
} else {
None
};
let Some(repo_path) = repo_path else {
println!("Cannot parse origin URL for auto-creation: {}", origin_url);
return Ok(false);
};
println!("\nOrigin repo doesn't exist. Creating: {}", repo_path);
// Use gh CLI to create the repo
let status = Command::new("gh")
.args(["repo", "create", repo_path, "--private", "--source=."])
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.status();
match status {
Ok(s) if s.success() => {
println!("✓ Created GitHub repo: {}", repo_path);
Ok(true)
}
Ok(_) => {
println!("Failed to create repo. Is `gh` installed and authenticated?");
println!(" Run: gh auth login");
Ok(false)
}
Err(e) => {
println!("Failed to run gh CLI: {}", e);
println!(" Install with: brew install gh");
Ok(false)
}
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/hub.rs | src/hub.rs | use std::{
fs,
net::IpAddr,
path::{Path, PathBuf},
process::Command,
time::Duration,
};
use anyhow::{Context, Result, bail};
use reqwest::blocking::Client;
use crate::{
cli::{HubAction, HubCommand, HubOpts},
config, doctor,
lin_runtime::{self, LinRuntime},
};
/// Flow acts as a thin launcher that makes sure the lin hub daemon is running.
pub fn run(cmd: HubCommand) -> Result<()> {
let action = cmd.action.unwrap_or(HubAction::Start);
let opts = cmd.opts;
let runtime = ensure_hub_runtime()?;
match action {
HubAction::Start => ensure_daemon(opts, &runtime),
HubAction::Stop => stop_daemon(&runtime),
}
}
fn ensure_daemon(opts: HubOpts, runtime: &LinRuntime) -> Result<()> {
let host = opts.host;
let port = opts.port;
let lin_config = opts.config.as_ref().map(|path| {
if path.is_absolute() {
path.clone()
} else {
config::expand_path(&path.to_string_lossy())
}
});
if hub_healthy(host, port) {
if !opts.no_ui {
println!(
"Lin watcher daemon already running at {}",
format_addr(host, port)
);
}
return Ok(());
}
if let Some(pid) = load_lin_pid()? {
if process_alive(pid)? {
terminate_process(pid).ok();
}
remove_lin_pid().ok();
}
println!(
"Starting lin watcher daemon using {}{}",
runtime.binary.display(),
lin_config
.as_ref()
.map(|p| format!(" (config: {})", p.display()))
.unwrap_or_default()
);
start_lin_process(&runtime.binary, host, port, lin_config.as_deref())?;
if !opts.no_ui {
println!("Lin watcher daemon ensured at {}", format_addr(host, port));
}
Ok(())
}
fn stop_daemon(runtime: &LinRuntime) -> Result<()> {
stop_lin_process().ok();
println!(
"Lin hub stopped (if it was running) [{}]",
runtime.binary.display()
);
Ok(())
}
fn ensure_hub_runtime() -> Result<LinRuntime> {
if let Some(existing) = lin_runtime::load_runtime()? {
return Ok(existing);
}
let binary = doctor::ensure_lin_available_interactive()?;
let runtime = LinRuntime {
version: lin_runtime::detect_binary_version(&binary),
binary,
};
lin_runtime::persist_runtime(&runtime)?;
Ok(runtime)
}
fn start_lin_process(
binary: &Path,
host: IpAddr,
port: u16,
config_path: Option<&Path>,
) -> Result<()> {
let mut cmd = Command::new(binary);
cmd.arg("daemon")
.arg("--host")
.arg(host.to_string())
.arg("--port")
.arg(port.to_string());
if let Some(path) = config_path {
cmd.arg("--config").arg(path);
}
cmd.stdin(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null());
let child = cmd
.spawn()
.with_context(|| format!("failed to start lin from {}", binary.display()))?;
persist_lin_pid(child.id())?;
Ok(())
}
fn stop_lin_process() -> Result<()> {
if let Some(pid) = load_lin_pid()? {
terminate_process(pid).ok();
remove_lin_pid().ok();
}
Ok(())
}
fn lin_pid_path() -> PathBuf {
if let Some(home) = std::env::var_os("HOME") {
PathBuf::from(home).join(".config/flow/lin.pid")
} else {
PathBuf::from(".config/flow/lin.pid")
}
}
fn load_lin_pid() -> Result<Option<u32>> {
let path = lin_pid_path();
if !path.exists() {
return Ok(None);
}
let contents =
fs::read_to_string(&path).with_context(|| format!("failed to read {}", path.display()))?;
let pid: u32 = contents.trim().parse().ok().unwrap_or(0);
if pid == 0 { Ok(None) } else { Ok(Some(pid)) }
}
fn persist_lin_pid(pid: u32) -> Result<()> {
let path = lin_pid_path();
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create pid dir {}", parent.display()))?;
}
fs::write(&path, pid.to_string())
.with_context(|| format!("failed to write {}", path.display()))?;
Ok(())
}
fn remove_lin_pid() -> Result<()> {
let path = lin_pid_path();
if path.exists() {
fs::remove_file(path).ok();
}
Ok(())
}
/// Check if the hub is healthy and responding.
pub fn hub_healthy(host: IpAddr, port: u16) -> bool {
let url = format_health_url(host, port);
let client = Client::builder()
.timeout(Duration::from_millis(750))
.build();
let Ok(client) = client else {
return false;
};
client
.get(url)
.send()
.and_then(|resp| resp.error_for_status())
.map(|_| true)
.unwrap_or(false)
}
fn format_addr(host: IpAddr, port: u16) -> String {
match host {
IpAddr::V4(_) => format!("http://{host}:{port}"),
IpAddr::V6(_) => format!("http://[{host}]:{port}"),
}
}
fn format_health_url(host: IpAddr, port: u16) -> String {
match host {
IpAddr::V4(_) => format!("http://{host}:{port}/health"),
IpAddr::V6(_) => format!("http://[{host}]:{port}/health"),
}
}
fn process_alive(pid: u32) -> Result<bool> {
#[cfg(unix)]
{
let status = Command::new("kill").arg("-0").arg(pid.to_string()).status();
return Ok(status.map(|s| s.success()).unwrap_or(false));
}
#[cfg(windows)]
{
let output = Command::new("tasklist")
.output()
.context("failed to invoke tasklist")?;
if !output.status.success() {
return Ok(false);
}
let needle = pid.to_string();
let body = String::from_utf8_lossy(&output.stdout);
Ok(body.lines().any(|line| line.contains(&needle)))
}
}
fn terminate_process(pid: u32) -> Result<()> {
#[cfg(unix)]
{
let status = Command::new("kill")
.arg(format!("{pid}"))
.stderr(std::process::Stdio::null())
.status()
.context("failed to invoke kill command")?;
if status.success() {
return Ok(());
}
bail!(
"kill command exited with status {}",
status.code().unwrap_or(-1)
);
}
#[cfg(windows)]
{
let status = Command::new("taskkill")
.args(["/PID", &pid.to_string(), "/F"])
.status()
.context("failed to invoke taskkill")?;
if status.success() {
return Ok(());
}
bail!(
"taskkill exited with status {}",
status.code().unwrap_or(-1)
);
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/secrets.rs | src/secrets.rs | use std::{
collections::HashMap,
env, fs,
path::{Path, PathBuf},
};
use anyhow::{Context, Result, bail};
use reqwest::blocking::Client;
use crate::{
cli::{SecretsAction, SecretsCommand, SecretsFormat, SecretsListOpts, SecretsPullOpts},
config::{self, Config, StorageConfig, StorageEnvConfig},
};
pub fn run(cmd: SecretsCommand) -> Result<()> {
match cmd.action {
SecretsAction::List(opts) => list(opts),
SecretsAction::Pull(opts) => pull(opts),
}
}
fn list(opts: SecretsListOpts) -> Result<()> {
let (config_path, cfg) = load_config(opts.config)?;
let secrets = cfg.storage.ok_or_else(|| {
anyhow::anyhow!("no [storage] block defined in {}", config_path.display())
})?;
if secrets.envs.is_empty() {
println!(
"No secret environments defined in {}",
config_path.display()
);
return Ok(());
}
println!(
"Environments defined in {} (provider: {}):",
config_path.display(),
secrets.provider
);
for env_cfg in &secrets.envs {
println!("\n- {}", env_cfg.name);
if let Some(desc) = &env_cfg.description {
println!(" Description: {}", desc);
}
if env_cfg.variables.is_empty() {
println!(" Variables: (unspecified)");
} else {
let summary: Vec<String> = env_cfg
.variables
.iter()
.map(|var| match &var.default {
Some(default) if !default.is_empty() => {
format!("{} (default: {})", var.key, default)
}
Some(_) => format!("{} (default: empty)", var.key),
None => var.key.clone(),
})
.collect();
println!(" Variables: {}", summary.join(", "));
}
}
Ok(())
}
fn pull(opts: SecretsPullOpts) -> Result<()> {
let (config_path, cfg) = load_config(opts.config)?;
let secrets = cfg.storage.ok_or_else(|| {
anyhow::anyhow!("no [storage] block defined in {}", config_path.display())
})?;
let env_cfg = secrets
.envs
.iter()
.find(|env| env.name == opts.env)
.ok_or_else(|| {
anyhow::anyhow!(
"unknown storage environment '{}' (available: {})",
opts.env,
secrets
.envs
.iter()
.map(|env| env.name.as_str())
.collect::<Vec<_>>()
.join(", ")
)
})?;
let values = fetch_remote_secrets(&secrets, env_cfg, opts.hub.clone())?;
let ordered = order_variables(env_cfg, &values);
let rendered = render_secrets(&ordered, opts.format);
if let Some(path) = opts.output {
write_output(&path, &rendered)?;
println!("Saved {} secrets to {}", env_cfg.name, path.display());
} else {
println!("{}", rendered);
}
Ok(())
}
fn fetch_remote_secrets(
cfg: &StorageConfig,
env_cfg: &StorageEnvConfig,
hub_override: Option<String>,
) -> Result<HashMap<String, String>> {
let api_key = env::var(&cfg.env_var).with_context(|| {
format!(
"environment variable {} is not set; required to authenticate with secrets provider",
cfg.env_var
)
})?;
let base_url = hub_override
.or_else(|| Some(cfg.hub_url.clone()))
.unwrap_or_else(|| "https://flow.1focus.ai".to_string());
let base = base_url.trim_end_matches('/');
let url = format!("{}/api/secrets/{}/{}", base, cfg.provider, env_cfg.name);
let client = Client::builder()
.build()
.context("failed to build HTTP client")?;
let response = client
.get(url)
.bearer_auth(api_key)
.send()
.with_context(|| "failed to call storage hub")?
.error_for_status()
.with_context(|| "storage hub returned an error response")?;
let mut body: HashMap<String, String> = response
.json()
.with_context(|| "failed to parse storage hub response")?;
for var in &env_cfg.variables {
if body.contains_key(&var.key) {
continue;
}
if let Some(default) = &var.default {
body.insert(var.key.clone(), default.clone());
} else {
bail!(
"storage hub response missing required variable '{}' for environment '{}'",
var.key,
env_cfg.name
);
}
}
Ok(body)
}
fn order_variables(
env_cfg: &StorageEnvConfig,
values: &HashMap<String, String>,
) -> Vec<(String, String)> {
let mut ordered = Vec::new();
for var in &env_cfg.variables {
if let Some(value) = values.get(&var.key) {
ordered.push((var.key.clone(), value.clone()));
}
}
for (key, value) in values {
if env_cfg.variables.iter().any(|v| v.key == *key) {
continue;
}
ordered.push((key.clone(), value.clone()));
}
ordered
}
fn render_secrets(vars: &[(String, String)], format: SecretsFormat) -> String {
match format {
SecretsFormat::Shell => vars
.iter()
.map(|(k, v)| format!("export {}={}", k, shell_quote(v)))
.collect::<Vec<_>>()
.join("\n"),
SecretsFormat::Dotenv => vars
.iter()
.map(|(k, v)| format!("{}={}", k, dotenv_quote(v)))
.collect::<Vec<_>>()
.join("\n"),
}
}
fn shell_quote(value: &str) -> String {
if value.is_empty() {
"''".to_string()
} else if value
.chars()
.all(|c| c.is_ascii_alphanumeric() || matches!(c, '_' | '-' | '.' | '/'))
{
value.to_string()
} else {
let escaped = value.replace('\'', "'\\''");
format!("'{}'", escaped)
}
}
fn dotenv_quote(value: &str) -> String {
if value
.bytes()
.all(|b| matches!(b, b'0'..=b'9' | b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'.' | b'-' | b'/'))
{
value.to_string()
} else {
format!("\"{}\"", value.replace('\\', "\\\\").replace('"', "\\\""))
}
}
fn write_output(path: &Path, contents: &str) -> Result<()> {
if let Some(parent) = path.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create directory {}", parent.display()))?;
}
}
fs::write(path, contents.as_bytes())
.with_context(|| format!("failed to write secrets to {}", path.display()))?;
Ok(())
}
fn load_config(path: PathBuf) -> Result<(PathBuf, Config)> {
let config_path = resolve_path(path)?;
let cfg = config::load(&config_path).with_context(|| {
format!(
"failed to load configuration from {}",
config_path.display()
)
})?;
Ok((config_path, cfg))
}
fn resolve_path(path: PathBuf) -> Result<PathBuf> {
if path.is_absolute() {
Ok(path)
} else {
Ok(env::current_dir()?.join(path))
}
}
#[cfg(test)]
mod tests {
use super::*;
use mockito::Server;
use std::path::PathBuf;
fn fixture_path(relative: &str) -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(relative)
}
struct EnvVarGuard {
key: String,
previous: Option<String>,
}
impl EnvVarGuard {
fn set(key: &str, value: &str) -> Self {
let previous = env::var(key).ok();
unsafe {
env::set_var(key, value);
}
Self {
key: key.to_string(),
previous,
}
}
}
impl Drop for EnvVarGuard {
fn drop(&mut self) {
if let Some(value) = &self.previous {
unsafe {
env::set_var(&self.key, value);
}
} else {
unsafe {
env::remove_var(&self.key);
}
}
}
}
#[test]
fn project_config_fixture_is_loadable_and_fetches_mocked_secrets() {
let cfg = config::load(fixture_path("test-data/project-config/flow.toml"))
.expect("project config fixture should parse");
assert_eq!(cfg.tasks.len(), 3, "fixture defines three tasks");
let commit = cfg
.tasks
.iter()
.find(|task| task.name == "commit")
.expect("commit task should exist");
assert_eq!(
commit.dependencies,
["github.com/1focus-ai/fast"],
"commit task should depend on fast"
);
let storage = cfg
.storage
.clone()
.expect("fixture should define a storage provider");
assert_eq!(storage.provider, "1focus.ai");
let env_cfg = storage
.envs
.iter()
.find(|env| env.name == "local")
.expect("local storage env should exist");
let _guard = EnvVarGuard::set(&storage.env_var, "test-token");
let mut server = Server::new();
let endpoint = format!("/api/secrets/{}/{}", storage.provider, env_cfg.name);
let mock = server
.mock("GET", endpoint.as_str())
.match_header("authorization", "Bearer test-token")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(
r#"{
"DATABASE_URL": "postgres://localhost/flow"
}"#,
)
.create();
let values =
fetch_remote_secrets(&storage, env_cfg, Some(server.url())).expect("mock fetch works");
mock.assert();
assert_eq!(
values.get("DATABASE_URL").map(String::as_str),
Some("postgres://localhost/flow")
);
assert_eq!(values.get("OPENAI_API_KEY").map(String::as_str), Some(""));
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/lmstudio.rs | src/lmstudio.rs | //! Simple LM Studio API client for task matching.
use anyhow::{Context, Result};
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
const DEFAULT_PORT: u16 = 1234;
const DEFAULT_MODEL: &str = "qwen3-8b";
#[derive(Debug, Serialize)]
struct ChatRequest {
model: String,
messages: Vec<ChatMessage>,
temperature: f32,
}
#[derive(Debug, Serialize)]
struct ChatMessage {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: Option<ResponseMessage>,
}
#[derive(Debug, Deserialize)]
struct ResponseMessage {
content: String,
}
/// Send a prompt to LM Studio and get a response.
pub fn quick_prompt(prompt: &str, model: Option<&str>, port: Option<u16>) -> Result<String> {
let prompt = prompt.trim();
let model = model.unwrap_or(DEFAULT_MODEL);
let port = port.unwrap_or(DEFAULT_PORT);
let client = Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.context("failed to create HTTP client")?;
let url = format!("http://localhost:{port}/v1/chat/completions");
let body = ChatRequest {
model: model.to_string(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: prompt.to_string(),
}],
temperature: 0.1, // Low temperature for deterministic task matching
};
let resp = client
.post(&url)
.json(&body)
.send()
.with_context(|| format!("failed to connect to LM Studio at localhost:{port}"))?;
if !resp.status().is_success() {
anyhow::bail!(
"LM Studio returned status {}: {}",
resp.status(),
resp.text().unwrap_or_default()
);
}
let text_body = resp.text().context("failed to read LM Studio response")?;
let parsed: ChatResponse =
serde_json::from_str(&text_body).context("failed to parse LM Studio response")?;
let text = parsed
.choices
.first()
.and_then(|c| c.message.as_ref())
.map(|m| m.content.trim().to_string())
.unwrap_or_default();
Ok(text)
}
/// Check if LM Studio is running and accessible.
#[allow(dead_code)]
pub fn is_available(port: Option<u16>) -> bool {
let port = port.unwrap_or(DEFAULT_PORT);
let client = match Client::builder()
.timeout(std::time::Duration::from_secs(2))
.build()
{
Ok(c) => c,
Err(_) => return false,
};
let url = format!("http://localhost:{port}/v1/models");
client
.get(&url)
.send()
.map(|r| r.status().is_success())
.unwrap_or(false)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/init.rs | src/init.rs | use std::{fs, path::PathBuf};
use anyhow::{Context, Result, bail};
use crate::cli::InitOpts;
const TEMPLATE: &str = r#"# flow
[[tasks]]
name = "setup"
command = ""
description = "Project setup (fill me)"
[[tasks]]
name = "dev"
command = ""
description = "Start dev server (fill me)"
"#;
pub fn run(opts: InitOpts) -> Result<()> {
let target = resolve_path(opts.path);
if target.exists() {
bail!("{} already exists; refusing to overwrite", target.display());
}
if let Some(parent) = target.parent() {
if !parent.as_os_str().is_empty() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create directory {}", parent.display()))?;
}
}
fs::write(&target, TEMPLATE)
.with_context(|| format!("failed to write {}", target.display()))?;
println!("created {}", target.display());
Ok(())
}
fn resolve_path(path: Option<PathBuf>) -> PathBuf {
match path {
Some(p) if p.is_absolute() => p,
Some(p) => std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join(p),
None => std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join("flow.toml"),
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/fixup.rs | src/fixup.rs | //! Fix common TOML syntax errors in flow.toml files.
//!
//! Common issues that AI tools create:
//! - `\$` escape sequences (invalid in TOML basic strings)
//! - `\n` literal in basic strings instead of actual newlines
//! - Unclosed multi-line strings
use std::fs;
use anyhow::{Context, Result};
use regex::Regex;
use crate::cli::FixupOpts;
/// Result of a fixup operation.
#[derive(Debug)]
pub struct FixupResult {
pub fixes_applied: Vec<FixupAction>,
pub had_errors: bool,
}
#[derive(Debug)]
pub struct FixupAction {
pub line: usize,
pub description: String,
pub before: String,
pub after: String,
}
pub fn run(opts: FixupOpts) -> Result<()> {
let config_path = if opts.config.is_absolute() {
opts.config.clone()
} else {
std::env::current_dir()?.join(&opts.config)
};
if !config_path.exists() {
anyhow::bail!("flow.toml not found at {}", config_path.display());
}
let content = fs::read_to_string(&config_path)
.with_context(|| format!("failed to read {}", config_path.display()))?;
let result = fix_toml_content(&content);
if result.fixes_applied.is_empty() {
println!("✓ No issues found in {}", config_path.display());
return Ok(());
}
println!(
"Found {} issue(s) in {}:\n",
result.fixes_applied.len(),
config_path.display()
);
for fix in &result.fixes_applied {
println!(" Line {}: {}", fix.line, fix.description);
println!(" - {}", truncate_for_display(&fix.before, 60));
println!(" + {}", truncate_for_display(&fix.after, 60));
println!();
}
if opts.dry_run {
println!("Dry run - no changes written.");
return Ok(());
}
// Apply fixes
let fixed_content = apply_fixes(&content, &result.fixes_applied);
// Validate the fixed content parses
if let Err(e) = toml::from_str::<toml::Value>(&fixed_content) {
println!("⚠ Warning: Fixed content still has TOML errors: {}", e);
println!("Writing anyway - manual review recommended.");
}
fs::write(&config_path, &fixed_content)
.with_context(|| format!("failed to write {}", config_path.display()))?;
println!(
"✓ Fixed {} issue(s) in {}",
result.fixes_applied.len(),
config_path.display()
);
Ok(())
}
/// Fix common TOML issues in the content.
pub fn fix_toml_content(content: &str) -> FixupResult {
let mut fixes = Vec::new();
let lines: Vec<&str> = content.lines().collect();
// Track if we're inside a multi-line basic string (""")
let mut in_multiline_basic = false;
let mut _multiline_start_line = 0;
for (line_idx, line) in lines.iter().enumerate() {
let line_num = line_idx + 1;
// Count triple quotes to track multi-line string state
let triple_quote_count = line.matches(r#"""""#).count();
if !in_multiline_basic {
// Check for start of multi-line basic string
if triple_quote_count == 1 {
in_multiline_basic = true;
_multiline_start_line = line_num;
} else if triple_quote_count == 2 {
// Single-line multi-line string (opens and closes on same line)
// Check for issues in this line
if let Some(fix) = check_invalid_escapes(line, line_num) {
fixes.push(fix);
}
}
} else {
// Inside multi-line basic string
if triple_quote_count >= 1 {
// End of multi-line string
in_multiline_basic = false;
}
// Check for invalid escape sequences inside multi-line basic strings
if let Some(fix) = check_invalid_escapes(line, line_num) {
fixes.push(fix);
}
}
}
FixupResult {
fixes_applied: fixes,
had_errors: false,
}
}
/// Apply fixes to TOML content and return the updated string.
pub fn apply_fixes_to_content(content: &str, fixes: &[FixupAction]) -> String {
apply_fixes(content, fixes)
}
/// Check a line for invalid escape sequences in TOML basic strings.
fn check_invalid_escapes(line: &str, line_num: usize) -> Option<FixupAction> {
// Invalid escapes in TOML basic strings: \$ \: \@ \! etc.
// Valid escapes: \\ \n \t \r \" \b \f \uXXXX \UXXXXXXXX and \ followed by newline
// We need to find backslash followed by characters that are NOT valid escape chars
let invalid_escape_re = Regex::new(r#"\\([^\\nrtbf"uU\s])"#).unwrap();
if let Some(capture) = invalid_escape_re.find(line) {
let escaped_char = &line[capture.start() + 1..capture.end()];
let fixed_line = invalid_escape_re
.replace_all(line, |caps: ®ex::Captures| {
// Just remove the backslash, keep the character
caps[1].to_string()
})
.to_string();
return Some(FixupAction {
line: line_num,
description: format!("Invalid escape sequence '\\{}'", escaped_char),
before: line.to_string(),
after: fixed_line,
});
}
None
}
/// Apply fixes to content, returning the fixed string.
fn apply_fixes(content: &str, fixes: &[FixupAction]) -> String {
let lines: Vec<&str> = content.lines().collect();
let mut result_lines: Vec<String> = lines.iter().map(|s| s.to_string()).collect();
for fix in fixes {
if fix.line > 0 && fix.line <= result_lines.len() {
result_lines[fix.line - 1] = fix.after.clone();
}
}
// Preserve original line endings
let has_trailing_newline = content.ends_with('\n');
let mut result = result_lines.join("\n");
if has_trailing_newline {
result.push('\n');
}
result
}
fn truncate_for_display(s: &str, max_len: usize) -> String {
if s.len() <= max_len {
s.to_string()
} else {
// Find valid UTF-8 char boundary
let mut end = max_len.min(s.len());
while end > 0 && !s.is_char_boundary(end) {
end -= 1;
}
format!("{}...", &s[..end])
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn fixes_escaped_dollar() {
let content = r##"
[[tasks]]
name = "test"
command = """
echo "Price: \$8"
"""
"##;
let result = fix_toml_content(content);
assert_eq!(result.fixes_applied.len(), 1);
assert!(result.fixes_applied[0].description.contains(r"\$"));
}
#[test]
fn preserves_valid_escapes() {
let content = r##"
[[tasks]]
name = "test"
command = """
echo "Line1"
echo "Tab here"
"""
"##;
let result = fix_toml_content(content);
assert!(result.fixes_applied.is_empty());
}
#[test]
fn no_fixes_needed() {
let content = r#"
[[tasks]]
name = "test"
command = "echo hello"
"#;
let result = fix_toml_content(content);
assert!(result.fixes_applied.is_empty());
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/env_setup.rs | src/env_setup.rs | use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use crossterm::{
event::{self, Event as CEvent, KeyCode, KeyEvent},
execute,
terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
};
use ignore::WalkBuilder;
use ratatui::{
Terminal,
backend::CrosstermBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Modifier, Style},
text::{Line, Span},
widgets::{Block, Borders, List, ListItem, Paragraph, Wrap},
};
use crate::env::parse_env_file;
#[derive(Debug, Clone, Default)]
pub struct EnvSetupDefaults {
pub env_file: Option<PathBuf>,
pub environment: Option<String>,
}
#[derive(Debug, Clone)]
pub struct EnvSetupResult {
pub env_file: Option<PathBuf>,
pub environment: String,
pub selected_keys: Vec<String>,
pub apply: bool,
}
pub fn run_env_setup(
project_root: &Path,
defaults: EnvSetupDefaults,
) -> Result<Option<EnvSetupResult>> {
let env_files = discover_env_files(project_root)?;
if env_files.is_empty() {
println!("No .env files found.");
println!("Create one (for example .env) and try: f env setup");
return Ok(None);
}
let mut app = EnvSetupApp::new(project_root, env_files, defaults);
enable_raw_mode().context("failed to enable raw mode")?;
let mut stdout = std::io::stdout();
execute!(stdout, EnterAlternateScreen).context("failed to enter alternate screen")?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend).context("failed to create terminal backend")?;
let app_result = run_app(&mut terminal, &mut app);
disable_raw_mode().ok();
let _ = terminal.show_cursor();
drop(terminal);
let mut stdout = std::io::stdout();
execute!(stdout, LeaveAlternateScreen).ok();
app_result
}
#[derive(Debug, Clone, Copy)]
enum SetupStep {
EnvFile,
EnvTarget,
CustomEnv,
Keys,
Confirm,
}
struct EnvFileChoice {
label: String,
path: Option<PathBuf>,
}
struct EnvTargetChoice {
label: String,
value: Option<String>,
is_custom: bool,
}
struct EnvKeyItem {
key: String,
selected: bool,
suspect: bool,
suspect_reason: Option<String>,
value_len: usize,
}
struct EnvSetupApp {
project_root: PathBuf,
step: SetupStep,
env_files: Vec<EnvFileChoice>,
selected_env_file: usize,
env_targets: Vec<EnvTargetChoice>,
selected_env_target: usize,
custom_env: String,
key_items: Vec<EnvKeyItem>,
selected_key: usize,
apply: bool,
result: Option<EnvSetupResult>,
}
impl EnvSetupApp {
fn new(project_root: &Path, env_files: Vec<PathBuf>, defaults: EnvSetupDefaults) -> Self {
let env_file_choices = build_env_file_choices(project_root, &env_files);
let selected_env_file =
pick_default_env_file(project_root, &env_file_choices, defaults.env_file.as_ref());
let mut app = Self {
project_root: project_root.to_path_buf(),
step: SetupStep::EnvFile,
env_files: env_file_choices,
selected_env_file,
env_targets: Vec::new(),
selected_env_target: 0,
custom_env: String::new(),
key_items: Vec::new(),
selected_key: 0,
apply: true,
result: None,
};
let preferred = defaults
.environment
.as_deref()
.map(|s| s.to_string())
.or_else(|| app.infer_env_target());
app.refresh_env_targets(preferred.as_deref());
app
}
fn infer_env_target(&self) -> Option<String> {
let path = self.env_file_path()?;
infer_env_target_from_file(&path)
}
fn refresh_env_targets(&mut self, preferred: Option<&str>) {
let mut targets = vec![
EnvTargetChoice {
label: "production (default)".to_string(),
value: Some("production".to_string()),
is_custom: false,
},
EnvTargetChoice {
label: "staging".to_string(),
value: Some("staging".to_string()),
is_custom: false,
},
EnvTargetChoice {
label: "dev".to_string(),
value: Some("dev".to_string()),
is_custom: false,
},
];
if let Some(env) = preferred {
if !targets.iter().any(|choice| choice.value.as_deref() == Some(env)) {
targets.push(EnvTargetChoice {
label: env.to_string(),
value: Some(env.to_string()),
is_custom: false,
});
}
}
targets.push(EnvTargetChoice {
label: "custom...".to_string(),
value: None,
is_custom: true,
});
self.env_targets = targets;
self.selected_env_target = pick_default_env_target(&self.env_targets, preferred);
}
fn refresh_keys(&mut self) {
self.key_items.clear();
self.selected_key = 0;
if let Some(path) = self.env_file_path() {
if let Ok(items) = build_key_items(&path) {
self.key_items = items;
}
}
}
fn env_file_path(&self) -> Option<PathBuf> {
self.env_files
.get(self.selected_env_file)
.and_then(|choice| choice.path.clone())
}
fn env_file_path_ref(&self) -> Option<&Path> {
self.env_files
.get(self.selected_env_file)
.and_then(|choice| choice.path.as_deref())
}
fn selected_env_target(&self) -> Option<String> {
self.env_targets
.get(self.selected_env_target)
.and_then(|choice| choice.value.clone())
}
fn finalize(&mut self) {
let selected_keys = self
.key_items
.iter()
.filter(|item| item.selected)
.map(|item| item.key.clone())
.collect();
let environment = self
.selected_env_target()
.unwrap_or_else(|| "production".to_string());
self.result = Some(EnvSetupResult {
env_file: self.env_file_path(),
environment,
selected_keys,
apply: self.apply,
});
}
}
fn run_app<B: ratatui::backend::Backend>(
terminal: &mut Terminal<B>,
app: &mut EnvSetupApp,
) -> Result<Option<EnvSetupResult>> {
loop {
terminal.draw(|f| draw_ui(f, app))?;
if event::poll(std::time::Duration::from_millis(200))? {
if let CEvent::Key(key) = event::read()? {
if handle_key(app, key)? {
return Ok(app.result.take());
}
}
}
}
}
fn handle_key(app: &mut EnvSetupApp, key: KeyEvent) -> Result<bool> {
match key.code {
KeyCode::Char('q') => return Ok(true),
KeyCode::Esc => return Ok(step_back(app)),
_ => {}
}
match app.step {
SetupStep::EnvFile => match key.code {
KeyCode::Up => {
select_prev(&mut app.selected_env_file, app.env_files.len());
let preferred = app.infer_env_target();
app.refresh_env_targets(preferred.as_deref());
}
KeyCode::Down => {
select_next(&mut app.selected_env_file, app.env_files.len());
let preferred = app.infer_env_target();
app.refresh_env_targets(preferred.as_deref());
}
KeyCode::Enter => {
let preferred = app.infer_env_target();
app.refresh_env_targets(preferred.as_deref());
app.step = SetupStep::EnvTarget;
}
_ => {}
},
SetupStep::EnvTarget => match key.code {
KeyCode::Up => select_prev(&mut app.selected_env_target, app.env_targets.len()),
KeyCode::Down => select_next(&mut app.selected_env_target, app.env_targets.len()),
KeyCode::Enter => {
if app
.env_targets
.get(app.selected_env_target)
.is_some_and(|choice| choice.is_custom)
{
app.custom_env.clear();
app.step = SetupStep::CustomEnv;
} else if app.env_file_path().is_some() {
app.refresh_keys();
if app.key_items.is_empty() {
app.step = SetupStep::Confirm;
} else {
app.step = SetupStep::Keys;
}
} else {
app.step = SetupStep::Confirm;
}
}
_ => {}
},
SetupStep::CustomEnv => match key.code {
KeyCode::Enter => {
if !app.custom_env.trim().is_empty() {
app.env_targets.push(EnvTargetChoice {
label: app.custom_env.trim().to_string(),
value: Some(app.custom_env.trim().to_string()),
is_custom: false,
});
app.selected_env_target = app.env_targets.len().saturating_sub(2);
if app.env_file_path().is_some() {
app.refresh_keys();
app.step = if app.key_items.is_empty() {
SetupStep::Confirm
} else {
SetupStep::Keys
};
} else {
app.step = SetupStep::Confirm;
}
}
}
KeyCode::Backspace => {
app.custom_env.pop();
}
KeyCode::Char(ch) => {
if !ch.is_control() {
app.custom_env.push(ch);
}
}
_ => {}
},
SetupStep::Keys => match key.code {
KeyCode::Up => select_prev(&mut app.selected_key, app.key_items.len()),
KeyCode::Down => select_next(&mut app.selected_key, app.key_items.len()),
KeyCode::Char(' ') => {
if let Some(item) = app.key_items.get_mut(app.selected_key) {
item.selected = !item.selected;
}
}
KeyCode::Enter => app.step = SetupStep::Confirm,
_ => {}
},
SetupStep::Confirm => match key.code {
KeyCode::Char(' ') => app.apply = !app.apply,
KeyCode::Enter => {
app.finalize();
return Ok(true);
}
_ => {}
},
}
Ok(false)
}
fn draw_ui(f: &mut ratatui::Frame<'_>, app: &EnvSetupApp) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(1), Constraint::Length(3)].as_ref())
.split(f.area());
let title = match app.step {
SetupStep::EnvFile => "Env Setup: Select .env file",
SetupStep::EnvTarget => "Select 1focus environment",
SetupStep::CustomEnv => "Enter custom environment",
SetupStep::Keys => "Select keys to push",
SetupStep::Confirm => "Confirm env setup",
};
let header = Paragraph::new(Line::from(title))
.block(Block::default().borders(Borders::ALL).title("flow"))
.alignment(ratatui::layout::Alignment::Center);
f.render_widget(header, chunks[0]);
match app.step {
SetupStep::EnvFile => {
let body = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(55), Constraint::Percentage(45)].as_ref())
.split(chunks[1]);
let items = app
.env_files
.iter()
.map(|choice| ListItem::new(Line::from(choice.label.clone())))
.collect::<Vec<_>>();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("Secrets source"))
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_env_file));
f.render_stateful_widget(list, body[0], &mut state);
let preview_lines = build_env_preview_lines(&app.project_root, app.env_file_path_ref());
let preview = Paragraph::new(preview_lines)
.block(Block::default().borders(Borders::ALL).title("Preview"))
.wrap(Wrap { trim: true });
f.render_widget(preview, body[1]);
}
SetupStep::EnvTarget => {
let items = app
.env_targets
.iter()
.map(|choice| ListItem::new(Line::from(choice.label.clone())))
.collect::<Vec<_>>();
let list = List::new(items)
.block(Block::default().borders(Borders::ALL).title("Environment"))
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_env_target));
f.render_stateful_widget(list, chunks[1], &mut state);
}
SetupStep::CustomEnv => {
let prompt = format!("> {}", app.custom_env);
let input = Paragraph::new(prompt)
.block(Block::default().borders(Borders::ALL).title("Environment name"))
.wrap(Wrap { trim: true });
f.render_widget(input, chunks[1]);
}
SetupStep::Keys => {
let body = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(60), Constraint::Percentage(40)].as_ref())
.split(chunks[1]);
let selected_count = app.key_items.iter().filter(|item| item.selected).count();
let items = app
.key_items
.iter()
.map(|item| {
let indicator = if item.selected { "[x]" } else { "[ ]" };
let flag = if item.suspect { " suspect" } else { "" };
let label = format!("{indicator} {}{flag}", item.key);
ListItem::new(Line::from(label))
})
.collect::<Vec<_>>();
let list = List::new(items)
.block(
Block::default()
.borders(Borders::ALL)
.title(format!(
"Keys ({}/{})",
selected_count,
app.key_items.len()
)),
)
.highlight_style(
Style::default()
.fg(Color::Black)
.bg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
let mut state = ratatui::widgets::ListState::default();
state.select(Some(app.selected_key));
f.render_stateful_widget(list, body[0], &mut state);
let detail_lines = build_key_detail_lines(
&app.project_root,
app.env_file_path_ref(),
app.key_items.get(app.selected_key),
);
let details = Paragraph::new(detail_lines)
.block(Block::default().borders(Borders::ALL).title("Details"))
.wrap(Wrap { trim: true });
f.render_widget(details, body[1]);
}
SetupStep::Confirm => {
let env_file = app
.env_file_path()
.map(|p| relative_display(&app.project_root, &p))
.unwrap_or_else(|| "none".to_string());
let env_target = app
.selected_env_target()
.unwrap_or_else(|| "production".to_string());
let selected_count = app.key_items.iter().filter(|item| item.selected).count();
let apply = if app.apply { "yes" } else { "no" };
let summary = vec![
Line::from(vec![
Span::styled("Env file: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(env_file),
]),
Line::from(vec![
Span::styled("Environment: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(env_target),
]),
Line::from(vec![
Span::styled("Keys selected: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(format!("{}", selected_count)),
]),
Line::from(vec![
Span::styled("Apply now: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(apply),
]),
];
let paragraph = Paragraph::new(summary)
.block(Block::default().borders(Borders::ALL).title("Review"))
.wrap(Wrap { trim: true });
f.render_widget(paragraph, chunks[1]);
}
}
let help = match app.step {
SetupStep::EnvFile => "Up/Down to move, Enter to select, Esc to cancel, q to cancel",
SetupStep::EnvTarget => "Up/Down to move, Enter to select, Esc to back, q to cancel",
SetupStep::CustomEnv => "Type name, Enter to confirm, Esc to back, q to cancel",
SetupStep::Keys => "Up/Down to move, Space to toggle, Enter to continue, Esc to back, q to cancel",
SetupStep::Confirm => "Space to toggle apply, Enter to finish, Esc to back, q to cancel",
};
let footer = Paragraph::new(help)
.block(Block::default().borders(Borders::ALL))
.alignment(ratatui::layout::Alignment::Center);
f.render_widget(footer, chunks[2]);
}
fn build_env_preview_lines(project_root: &Path, env_file: Option<&Path>) -> Vec<Line<'static>> {
let mut lines = Vec::new();
let Some(path) = env_file else {
lines.push(Line::from("No env file selected."));
lines.push(Line::from("Secrets will not be set."));
return lines;
};
lines.push(Line::from(vec![
Span::styled("File: ", Style::default().add_modifier(Modifier::BOLD)),
Span::raw(relative_display(project_root, path)),
]));
lines.push(Line::from("Values are hidden."));
let content = match fs::read_to_string(path) {
Ok(content) => content,
Err(_) => {
lines.push(Line::from("Unable to read file."));
return lines;
}
};
let vars = parse_env_file(&content);
if vars.is_empty() {
lines.push(Line::from("No env vars found."));
return lines;
}
let mut entries: Vec<_> = vars.into_iter().collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let suspect_count = entries
.iter()
.filter(|(_, value)| suspect_reason(value).is_some())
.count();
let total = entries.len();
lines.push(Line::from(format!(
"Keys: {} (suspect: {})",
total, suspect_count
)));
lines.push(Line::from("! = likely test/local value"));
let max_keys = 12usize;
for (key, value) in entries.iter().take(max_keys) {
let flag = if suspect_reason(value).is_some() { " !" } else { "" };
lines.push(Line::from(format!(" - {}{}", key, flag)));
}
if total > max_keys {
lines.push(Line::from(format!("... +{} more", total - max_keys)));
}
lines
}
fn build_key_detail_lines(
project_root: &Path,
env_file: Option<&Path>,
item: Option<&EnvKeyItem>,
) -> Vec<Line<'static>> {
let mut lines = Vec::new();
let env_label = env_file
.map(|path| relative_display(project_root, path))
.unwrap_or_else(|| "none".to_string());
lines.push(Line::from(format!("Env file: {}", env_label)));
let Some(item) = item else {
lines.push(Line::from("No key selected."));
return lines;
};
lines.push(Line::from(format!("Key: {}", item.key)));
lines.push(Line::from(format!(
"Selected: {}",
if item.selected { "yes" } else { "no" }
)));
lines.push(Line::from(format!(
"Status: {}",
if item.suspect { "suspect" } else { "ok" }
)));
if let Some(reason) = &item.suspect_reason {
lines.push(Line::from(format!("Reason: {}", reason)));
}
lines.push(Line::from(format!("Value length: {}", item.value_len)));
lines.push(Line::from("Values are hidden."));
if item.suspect {
lines.push(Line::from("Tip: suspect values default to unchecked."));
}
lines
}
fn select_prev(selected: &mut usize, len: usize) {
if len == 0 {
return;
}
if *selected == 0 {
*selected = len.saturating_sub(1);
} else {
*selected -= 1;
}
}
fn select_next(selected: &mut usize, len: usize) {
if len == 0 {
return;
}
if *selected + 1 >= len {
*selected = 0;
} else {
*selected += 1;
}
}
fn step_back(app: &mut EnvSetupApp) -> bool {
match app.step {
SetupStep::EnvFile => true,
SetupStep::EnvTarget => {
app.step = SetupStep::EnvFile;
false
}
SetupStep::CustomEnv => {
app.step = SetupStep::EnvTarget;
false
}
SetupStep::Keys => {
app.step = SetupStep::EnvTarget;
false
}
SetupStep::Confirm => {
if app.env_file_path().is_some() && !app.key_items.is_empty() {
app.step = SetupStep::Keys;
} else {
app.step = SetupStep::EnvTarget;
}
false
}
}
}
fn relative_display(root: &Path, path: &Path) -> String {
if let Ok(rel) = path.strip_prefix(root) {
let rel = rel.to_string_lossy().to_string();
if rel.is_empty() {
".".to_string()
} else {
rel
}
} else {
path.to_string_lossy().to_string()
}
}
fn build_env_file_choices(project_root: &Path, env_files: &[PathBuf]) -> Vec<EnvFileChoice> {
let mut choices = Vec::new();
choices.push(EnvFileChoice {
label: "Skip (do not set secrets)".to_string(),
path: None,
});
for path in env_files {
choices.push(EnvFileChoice {
label: relative_display(project_root, path),
path: Some(path.clone()),
});
}
choices
}
fn pick_default_env_file(
project_root: &Path,
choices: &[EnvFileChoice],
preferred: Option<&PathBuf>,
) -> usize {
if let Some(path) = preferred {
if let Some((idx, _)) = choices
.iter()
.enumerate()
.find(|(_, c)| c.path.as_ref() == Some(path))
{
return idx;
}
}
let candidates = [
".env",
".env.production",
".env.staging",
".env.dev",
".env.local",
];
for candidate in candidates {
let candidate_path = project_root.join(candidate);
if let Some((idx, _)) = choices
.iter()
.enumerate()
.find(|(_, c)| c.path.as_ref() == Some(&candidate_path))
{
return idx;
}
}
if choices.len() > 1 {
1
} else {
0
}
}
fn pick_default_env_target(targets: &[EnvTargetChoice], preferred: Option<&str>) -> usize {
if let Some(env) = preferred {
if let Some((idx, _)) = targets
.iter()
.enumerate()
.find(|(_, choice)| choice.value.as_deref() == Some(env))
{
return idx;
}
}
0
}
fn build_key_items(path: &Path) -> Result<Vec<EnvKeyItem>> {
let content = fs::read_to_string(path)
.with_context(|| format!("failed to read env file {}", path.display()))?;
let env = parse_env_file(&content);
let mut keys: Vec<_> = env.into_iter().collect();
keys.sort_by(|a, b| a.0.cmp(&b.0));
Ok(keys
.into_iter()
.map(|(key, value)| {
let reason = suspect_reason(&value);
let suspect = reason.is_some();
EnvKeyItem {
key,
selected: !suspect,
suspect: suspect || value.trim().is_empty(),
suspect_reason: reason.map(|reason| reason.to_string()),
value_len: value.len(),
}
})
.collect())
}
fn suspect_reason(value: &str) -> Option<&'static str> {
let trimmed = value.trim();
if trimmed.is_empty() {
return Some("empty");
}
let lowered = trimmed.to_lowercase();
if lowered.contains("sk_test") || lowered.contains("pk_test") {
return Some("stripe_test");
}
if lowered.contains("localhost") || lowered.contains("127.0.0.1") {
return Some("localhost");
}
if lowered.contains("example.com") || lowered.contains("example") {
return Some("example");
}
if lowered.contains("dummy") {
return Some("dummy");
}
if lowered.contains("test") {
return Some("test");
}
None
}
fn infer_env_target_from_file(path: &Path) -> Option<String> {
let name = path.file_name()?.to_string_lossy().to_lowercase();
if name.contains("staging") {
return Some("staging".to_string());
}
if name.contains("dev") || name.contains("development") {
return Some("dev".to_string());
}
if name.contains("prod") || name.contains("production") {
return Some("production".to_string());
}
None
}
fn discover_env_files(root: &Path) -> Result<Vec<PathBuf>> {
let walker = WalkBuilder::new(root)
.hidden(false)
.git_ignore(false)
.git_global(false)
.git_exclude(false)
.max_depth(Some(10))
.filter_entry(|entry| {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let name = entry.file_name().to_string_lossy();
!matches!(
name.as_ref(),
"node_modules"
| "target"
| "dist"
| "build"
| ".git"
| ".hg"
| ".svn"
| "__pycache__"
| ".pytest_cache"
| ".mypy_cache"
| "venv"
| ".venv"
| "vendor"
| "Pods"
| ".cargo"
| ".rustup"
)
} else {
true
}
})
.build();
let mut env_files = Vec::new();
for entry in walker.flatten() {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
if let Some(name) = entry.path().file_name().and_then(|s| s.to_str()) {
if name.starts_with(".env") && name != ".envrc" {
env_files.push(entry.path().to_path_buf());
}
}
}
}
env_files.sort();
env_files.dedup();
Ok(env_files)
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/doctor.rs | src/doctor.rs | use std::{
env,
fs::{self, OpenOptions},
io::Write,
path::{Path, PathBuf},
};
use anyhow::{Context, Result, bail};
use crate::{cli::DoctorOpts, lin_runtime};
/// Ensure the lin watcher daemon is available, prompting to install a bundled
/// copy if it is missing from PATH. Returns the resolved binary path.
pub fn ensure_lin_available_interactive() -> Result<PathBuf> {
if let Ok(Some(runtime)) = lin_runtime::load_runtime() {
if runtime.binary.exists() {
println!(
"✅ lin watcher daemon registered at {}",
runtime.binary.display()
);
return Ok(runtime.binary);
} else {
println!(
"⚠️ Registered lin binary at {} is missing; falling back to PATH lookup.",
runtime.binary.display()
);
}
}
if let Ok(path) = which::which("lin") {
println!("✅ lin watcher daemon found at {}", path.display());
return Ok(path);
}
if let Some(bundled) = find_bundled_lin() {
if prompt_install_lin(&bundled)? {
let installed = install_lin(&bundled)?;
println!("✅ Installed lin to {}", installed.display());
return Ok(installed);
}
}
bail!(
"lin is not on PATH. Build/install from this repo (scripts/deploy.sh) so flow can delegate watchers to it."
);
}
pub fn run(_opts: DoctorOpts) -> Result<()> {
println!("Running flow doctor checks...\n");
ensure_flox_available()?;
let _ = ensure_lin_available_interactive();
ensure_direnv_on_path()?;
match detect_shell()? {
Some(shell) => ensure_shell_hook(shell)?,
None => println!(
"⚠️ Unable to detect your shell from $SHELL. Add the direnv hook manually (see https://direnv.net)."
),
}
println!("\n✅ flow doctor is done. Re-run it any time after changing shells or machines.");
Ok(())
}
fn ensure_flox_available() -> Result<()> {
if which::which("flox").is_ok() {
println!("✅ flox found on PATH");
return Ok(());
}
// Heuristic: flox-managed env leaves a .flox directory or ~/.flox directory.
let home = home_dir();
if home.join(".flox").exists() {
println!(
"✅ flox environment directory detected at {}",
home.join(".flox").display()
);
return Ok(());
}
bail!(
"flox is not installed. Install it from https://flox.dev/docs/install-flox/install/ and re-run `f doctor`."
);
}
fn ensure_direnv_on_path() -> Result<()> {
match which::which("direnv") {
Ok(path) => {
println!("✅ direnv found at {}", path.display());
Ok(())
}
Err(_) => bail!(
"direnv is not on PATH. Install it from https://direnv.net/#installation and rerun `flow doctor`."
),
}
}
fn find_bundled_lin() -> Option<PathBuf> {
let exe_dir = std::env::current_exe()
.ok()
.and_then(|p| p.parent().map(PathBuf::from))?;
let candidate = exe_dir.join("lin");
if candidate.exists() {
Some(candidate)
} else {
None
}
}
fn prompt_install_lin(bundled: &Path) -> Result<bool> {
println!(
"lin was not found on PATH. A bundled copy was found at {}.",
bundled.display()
);
print!(
"Install lin to {}? [Y/n]: ",
default_install_dir().display()
);
let _ = std::io::stdout().flush();
let mut input = String::new();
std::io::stdin().read_line(&mut input).ok();
let normalized = input.trim().to_ascii_lowercase();
Ok(normalized.is_empty() || normalized == "y" || normalized == "yes")
}
fn install_lin(bundled: &Path) -> Result<PathBuf> {
let dest_dir = default_install_dir();
std::fs::create_dir_all(&dest_dir).with_context(|| {
format!(
"failed to create lin install directory {}",
dest_dir.display()
)
})?;
let dest = dest_dir.join("lin");
std::fs::copy(bundled, &dest).with_context(|| {
format!(
"failed to copy bundled lin from {} to {}",
bundled.display(),
dest.display()
)
})?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = std::fs::metadata(&dest)
.context("failed to stat installed lin")?
.permissions();
perms.set_mode(0o755);
std::fs::set_permissions(&dest, perms).context("failed to mark lin executable")?;
}
Ok(dest)
}
fn default_install_dir() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.map(|home| home.join("bin"))
.unwrap_or_else(|| PathBuf::from("."))
}
fn detect_shell() -> Result<Option<ShellKind>> {
if let Ok(shell_path) = env::var("SHELL") {
if let Some(kind) = ShellKind::from_path(shell_path) {
println!("✅ Detected shell: {}", kind.display());
return Ok(Some(kind));
}
}
Ok(None)
}
fn ensure_shell_hook(shell: ShellKind) -> Result<()> {
let config_path = shell.config_path();
let indicator = shell.hook_indicator();
let snippet = shell.hook_snippet();
let existing = fs::read_to_string(&config_path).unwrap_or_default();
if existing.contains(indicator) {
println!(
"✅ {} already sources direnv ({}).",
shell.display(),
config_path.display()
);
return Ok(());
}
println!(
"ℹ️ Adding direnv hook to {} ({}).",
shell.display(),
config_path.display()
);
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("failed to create directory {}", parent.to_string_lossy()))?;
}
let mut file = OpenOptions::new()
.create(true)
.append(true)
.open(&config_path)
.with_context(|| format!("failed to open {}", config_path.display()))?;
if !existing.is_empty() && !existing.ends_with('\n') {
writeln!(file)?;
}
writeln!(file, "\n# Added by flow doctor")?;
writeln!(file, "{snippet}")?;
println!(
"✅ Added direnv hook for {}. Restart your shell or source {}.",
shell.display(),
config_path.display()
);
Ok(())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ShellKind {
Bash,
Zsh,
Fish,
}
impl ShellKind {
fn from_path<P: AsRef<Path>>(path: P) -> Option<Self> {
let name = path
.as_ref()
.file_name()
.map(|os| os.to_string_lossy().to_ascii_lowercase())?;
match name.as_str() {
"bash" => Some(Self::Bash),
"zsh" => Some(Self::Zsh),
"fish" => Some(Self::Fish),
_ => None,
}
}
fn display(&self) -> &'static str {
match self {
ShellKind::Bash => "bash",
ShellKind::Zsh => "zsh",
ShellKind::Fish => "fish",
}
}
fn config_path(&self) -> PathBuf {
let home = home_dir();
self.config_path_with_base(&home)
}
fn config_path_with_base(&self, home: &Path) -> PathBuf {
match self {
ShellKind::Bash => home.join(".bashrc"),
ShellKind::Zsh => home.join(".zshrc"),
ShellKind::Fish => home.join(".config/fish/config.fish"),
}
}
fn hook_indicator(&self) -> &'static str {
match self {
ShellKind::Bash => "direnv hook bash",
ShellKind::Zsh => "direnv hook zsh",
ShellKind::Fish => "direnv hook fish",
}
}
fn hook_snippet(&self) -> &'static str {
match self {
ShellKind::Bash => {
r#"if command -v direnv >/dev/null 2>&1; then
eval "$(direnv hook bash)"
fi"#
}
ShellKind::Zsh => {
r#"if command -v direnv >/dev/null 2>&1; then
eval "$(direnv hook zsh)"
fi"#
}
ShellKind::Fish => {
r#"if type -q direnv
direnv hook fish | source
end"#
}
}
}
}
fn home_dir() -> PathBuf {
env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn shell_detection_from_path() {
assert_eq!(ShellKind::from_path("/bin/bash"), Some(ShellKind::Bash));
assert_eq!(ShellKind::from_path("zsh"), Some(ShellKind::Zsh));
assert_eq!(
ShellKind::from_path("/usr/local/bin/fish"),
Some(ShellKind::Fish)
);
assert_eq!(ShellKind::from_path("/bin/sh"), None);
}
#[test]
fn config_paths_follow_home_env() {
let base = Path::new("/tmp/drflow");
assert_eq!(
ShellKind::Zsh.config_path_with_base(base),
PathBuf::from("/tmp/drflow/.zshrc")
);
assert_eq!(
ShellKind::Bash.config_path_with_base(base),
PathBuf::from("/tmp/drflow/.bashrc")
);
assert_eq!(
ShellKind::Fish.config_path_with_base(base),
PathBuf::from("/tmp/drflow/.config/fish/config.fish")
);
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/server.rs | src/server.rs | use std::{
collections::HashMap,
convert::Infallible,
net::SocketAddr,
path::Path,
pin::Pin,
sync::{Arc, mpsc as std_mpsc},
time::Duration,
};
use anyhow::{Context, Result};
use axum::{
Router,
extract::{Path as AxumPath, Query, State},
http::StatusCode,
response::{
IntoResponse, Json,
sse::{Event, KeepAlive, Sse},
},
routing::{get, post},
};
use futures::{Stream, StreamExt};
use notify::RecursiveMode;
use notify_debouncer_mini::new_debouncer;
use serde::Deserialize;
use serde_json::json;
use tokio::sync::{RwLock, mpsc};
use tokio_stream::wrappers::BroadcastStream;
use crate::{
cli::DaemonOpts,
config::{self, Config, ServerConfig},
log_store::{self, LogEntry, LogQuery},
screen::ScreenBroadcaster,
servers::{LogLine, ManagedServer, ServerSnapshot},
terminal,
};
const LOG_BUFFER_CAPACITY: usize = 2048;
type ServerStore = Arc<RwLock<HashMap<String, Arc<ManagedServer>>>>;
#[derive(Clone)]
struct AppState {
screen: ScreenBroadcaster,
servers: ServerStore,
}
type DynSseStream = dyn Stream<Item = std::result::Result<Event, Infallible>> + Send;
pub async fn run(opts: DaemonOpts) -> Result<()> {
let screen = ScreenBroadcaster::with_mock_stream(opts.frame_buffer, opts.fps);
// Load configuration for managed servers.
let config_path = opts
.config
.clone()
.unwrap_or_else(config::default_config_path);
let mut cfg: Config = config::load_or_default(&config_path);
tracing::info!(
path = %config_path.display(),
server_count = cfg.servers.len(),
"loaded flow config"
);
if let Some(version) = cfg.version {
tracing::debug!(version, "config version detected");
}
terminal::maybe_enable_terminal_tracing(&cfg.options);
let servers_store: ServerStore = Arc::new(RwLock::new(HashMap::new()));
sync_servers(&servers_store, std::mem::take(&mut cfg.servers)).await;
if let Some(stream) = cfg.stream.as_ref() {
tracing::info!(
provider = %stream.provider,
hotkey = %stream.hotkey.as_deref().unwrap_or(""),
toggle_url = %stream.toggle_url.as_deref().unwrap_or(""),
"stream config detected"
);
}
let state = AppState {
screen,
servers: Arc::clone(&servers_store),
};
let (reload_tx, mut reload_rx) = mpsc::channel(4);
if let Err(err) = spawn_config_watcher(&config_path, reload_tx.clone()) {
tracing::warn!(?err, "failed to watch config for changes");
}
let servers_for_reload = Arc::clone(&servers_store);
let config_path_for_reload = config_path.clone();
tokio::spawn(async move {
while reload_rx.recv().await.is_some() {
if let Err(err) = reload_config(&config_path_for_reload, &servers_for_reload).await {
tracing::warn!(?err, "config reload failed");
}
}
});
let router = Router::new()
.route("/health", get(health))
.route("/screen/latest", get(screen_latest))
.route("/screen/stream", get(screen_stream))
.route("/servers", get(servers_list))
.route("/logs", get(all_logs))
.route("/servers/:name/logs", get(server_logs))
.route("/servers/:name/logs/stream", get(server_logs_stream))
// Log ingestion endpoints
.route("/logs/ingest", post(logs_ingest))
.route("/logs/query", get(logs_query))
.with_state(state);
let addr = SocketAddr::from((opts.host, opts.port));
tracing::info!(
"flowd listening on http://{addr} (mock fps: {}, buffer: {}, config: {})",
opts.fps,
opts.frame_buffer,
config_path.display(),
);
let listener = tokio::net::TcpListener::bind(addr).await?;
axum::serve(listener, router)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
async fn health() -> impl IntoResponse {
Json(json!({
"status": "ok",
"message": "flow daemon ready"
}))
}
async fn screen_latest(State(state): State<AppState>) -> impl IntoResponse {
match state.screen.latest().await {
Some(frame) => (StatusCode::OK, Json(frame)).into_response(),
None => StatusCode::NO_CONTENT.into_response(),
}
}
async fn screen_stream(
State(state): State<AppState>,
) -> Sse<impl Stream<Item = std::result::Result<Event, Infallible>>> {
let stream = BroadcastStream::new(state.screen.subscribe()).filter_map(|result| async move {
match result {
Ok(frame) => match serde_json::to_string(&frame) {
Ok(payload) => Some(Ok(Event::default().data(payload))),
Err(err) => {
tracing::error!(?err, "failed to serialize screen frame");
None
}
},
Err(err) => {
tracing::warn!(?err, "screen broadcast channel dropped event");
None
}
}
});
Sse::new(stream).keep_alive(
KeepAlive::new()
.interval(Duration::from_secs(5))
.text(":flowd keep-alive"),
)
}
async fn servers_list(State(state): State<AppState>) -> impl IntoResponse {
let servers = state.servers.read().await;
let futures_iter = servers
.values()
.cloned()
.map(|server| async move { server.snapshot().await });
let snapshots: Vec<ServerSnapshot> = futures::future::join_all(futures_iter).await;
(StatusCode::OK, Json(snapshots)).into_response()
}
#[derive(Debug, Deserialize)]
struct LogsQuery {
#[serde(default = "default_logs_limit")]
limit: usize,
}
fn default_logs_limit() -> usize {
512
}
async fn server_logs(
State(state): State<AppState>,
AxumPath(name): AxumPath<String>,
Query(query): Query<LogsQuery>,
) -> impl IntoResponse {
let server = {
let guard = state.servers.read().await;
guard.get(&name).cloned()
};
match server {
Some(server) => {
let lines: Vec<LogLine> = server.recent_logs(query.limit).await;
(StatusCode::OK, Json(lines)).into_response()
}
None => (
StatusCode::NOT_FOUND,
Json(json!({ "error": format!("unknown server {name}") })),
)
.into_response(),
}
}
async fn all_logs(
State(state): State<AppState>,
Query(query): Query<LogsQuery>,
) -> impl IntoResponse {
let servers: Vec<_> = {
let guard = state.servers.read().await;
guard.values().cloned().collect()
};
let mut entries: Vec<LogLine> = Vec::new();
for server in servers {
let mut lines = server.recent_logs(query.limit).await;
entries.append(&mut lines);
}
entries.sort_by_key(|line| line.timestamp_ms);
if entries.len() > query.limit {
entries = entries.split_off(entries.len() - query.limit);
}
(StatusCode::OK, Json(entries)).into_response()
}
async fn server_logs_stream(
State(state): State<AppState>,
AxumPath(name): AxumPath<String>,
) -> Sse<Pin<Box<DynSseStream>>> {
let server = {
let guard = state.servers.read().await;
guard.get(&name).cloned()
};
let (stream, enable_keep_alive) = match server {
Some(server) => {
let receiver = server.subscribe();
let stream = BroadcastStream::new(receiver).filter_map(|result| async move {
match result {
Ok(line) => match serde_json::to_string(&line) {
Ok(payload) => Some(Ok(Event::default().data(payload))),
Err(err) => {
tracing::error!(?err, "failed to serialize log line");
None
}
},
Err(err) => {
tracing::warn!(?err, "server log broadcast channel dropped event");
None
}
}
});
(Box::pin(stream) as Pin<Box<DynSseStream>>, true)
}
None => {
let stream = futures::stream::once(async move {
Ok(Event::default().data(
serde_json::to_string(&json!({
"error": format!("unknown server {name}")
}))
.unwrap_or_else(|_| "{\"error\":\"unknown server\"}".to_string()),
))
});
(Box::pin(stream) as Pin<Box<DynSseStream>>, false)
}
};
let sse = Sse::new(stream);
if enable_keep_alive {
sse.keep_alive(
KeepAlive::new()
.interval(Duration::from_secs(5))
.text(":flowd log keep-alive"),
)
} else {
sse
}
}
async fn reload_config(path: &Path, servers: &ServerStore) -> Result<()> {
let mut cfg = config::load(path)
.with_context(|| format!("failed to reload config at {}", path.display()))?;
tracing::info!(path = %path.display(), "config changed; reloading");
sync_servers(servers, std::mem::take(&mut cfg.servers)).await;
if let Some(stream) = cfg.stream {
tracing::info!(
provider = %stream.provider,
hotkey = %stream.hotkey.as_deref().unwrap_or(""),
toggle_url = %stream.toggle_url.as_deref().unwrap_or(""),
"stream config updated"
);
}
Ok(())
}
async fn sync_servers(store: &ServerStore, configs: Vec<ServerConfig>) {
let mut desired: HashMap<String, ServerConfig> = HashMap::new();
for cfg in configs.into_iter() {
desired.insert(cfg.name.clone(), cfg);
}
let mut to_stop: Vec<Arc<ManagedServer>> = Vec::new();
let mut to_start: Vec<Arc<ManagedServer>> = Vec::new();
{
let mut guard = store.write().await;
guard.retain(|name, server| {
if !desired.contains_key(name) {
to_stop.push(server.clone());
false
} else {
true
}
});
for (name, cfg) in desired.into_iter() {
if let Some(existing) = guard.get(&name) {
if existing.config() == &cfg {
continue;
}
to_stop.push(existing.clone());
guard.remove(&name);
}
let managed = ManagedServer::new(cfg.clone(), LOG_BUFFER_CAPACITY);
if cfg.autostart {
to_start.push(managed.clone());
}
guard.insert(name, managed);
}
}
for server in to_stop {
if let Err(err) = server.stop().await {
tracing::warn!(
?err,
name = server.config().name,
"failed to stop managed server during reload"
);
}
}
for server in to_start {
tokio::spawn(async move {
if let Err(err) = server.start().await {
tracing::error!(
?err,
server = server.config().name,
"failed to start managed server"
);
}
});
}
}
fn spawn_config_watcher(path: &Path, tx: mpsc::Sender<()>) -> notify::Result<()> {
let target = path.canonicalize().unwrap_or_else(|_| path.to_path_buf());
let watch_root = target
.parent()
.map(Path::to_path_buf)
.unwrap_or_else(|| target.clone());
std::thread::spawn(move || {
let (event_tx, event_rx) = std_mpsc::channel();
let mut debouncer = match new_debouncer(Duration::from_millis(250), event_tx) {
Ok(debouncer) => debouncer,
Err(err) => {
tracing::error!(?err, "failed to initialize config watcher");
return;
}
};
if let Err(err) = debouncer
.watcher()
.watch(&watch_root, RecursiveMode::NonRecursive)
{
tracing::error!(?err, path = %watch_root.display(), "failed to watch config directory");
return;
}
while let Ok(result) = event_rx.recv() {
match result {
Ok(events) => {
let should_reload = events.iter().any(|event| same_file(&target, &event.path));
if should_reload && tx.blocking_send(()).is_err() {
break;
}
}
Err(err) => tracing::warn!(?err, "config watcher error"),
}
}
});
Ok(())
}
fn same_file(a: &Path, b: &Path) -> bool {
if a == b {
return true;
}
if let Ok(canon) = b.canonicalize() {
if canon == a {
return true;
}
}
a.file_name()
.is_some_and(|name| Some(name) == b.file_name())
}
async fn shutdown_signal() {
if tokio::signal::ctrl_c().await.is_ok() {
tracing::info!("shutdown signal received");
}
}
// ============================================================================
// Log Ingestion Endpoints
// ============================================================================
/// Request body for log ingestion - single entry or batch.
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum IngestRequest {
Single(LogEntry),
Batch(Vec<LogEntry>),
}
/// POST /logs/ingest - Ingest log entries into the database.
async fn logs_ingest(Json(payload): Json<IngestRequest>) -> impl IntoResponse {
let result = tokio::task::spawn_blocking(move || {
let mut conn = match log_store::open_log_db() {
Ok(c) => c,
Err(e) => return Err(e),
};
match payload {
IngestRequest::Single(entry) => {
let id = log_store::insert_log(&conn, &entry)?;
Ok(json!({ "inserted": 1, "ids": [id] }))
}
IngestRequest::Batch(entries) => {
let ids = log_store::insert_logs(&mut conn, &entries)?;
Ok(json!({ "inserted": ids.len(), "ids": ids }))
}
}
})
.await;
match result {
Ok(Ok(response)) => (StatusCode::OK, Json(response)).into_response(),
Ok(Err(err)) => {
tracing::error!(?err, "log ingest failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": err.to_string() })),
)
.into_response()
}
Err(err) => {
tracing::error!(?err, "log ingest task panicked");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "internal error" })),
)
.into_response()
}
}
}
/// GET /logs/query - Query stored logs with filters.
async fn logs_query(Query(query): Query<LogQuery>) -> impl IntoResponse {
let result = tokio::task::spawn_blocking(move || {
let conn = log_store::open_log_db()?;
log_store::query_logs(&conn, &query)
})
.await;
match result {
Ok(Ok(entries)) => (StatusCode::OK, Json(entries)).into_response(),
Ok(Err(err)) => {
tracing::error!(?err, "log query failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": err.to_string() })),
)
.into_response()
}
Err(err) => {
tracing::error!(?err, "log query task panicked");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "internal error" })),
)
.into_response()
}
}
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
nikivdev/flow | https://github.com/nikivdev/flow/blob/85db313c274056cf0dbb36cc0aee35e037a66cfd/src/processes.rs | src/processes.rs | use std::collections::hash_map::DefaultHasher;
use std::fs::{self, File};
use std::hash::{Hash, Hasher};
use std::io::{BufRead, BufReader, Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::thread;
use std::time::Duration;
use anyhow::{Context, Result, bail};
use crate::cli::{KillOpts, ProcessOpts, TaskLogsOpts};
use crate::projects;
use crate::running;
use crate::tasks;
/// Show running processes for a project (or all projects)
pub fn show_project_processes(opts: ProcessOpts) -> Result<()> {
if opts.all {
show_all_processes()
} else {
let (config_path, cfg) = tasks::load_project_config(opts.config)?;
let canonical = config_path.canonicalize()?;
show_processes_for_project(&canonical, cfg.project_name.as_deref())
}
}
fn show_processes_for_project(config_path: &Path, project_name: Option<&str>) -> Result<()> {
let processes = running::get_project_processes(config_path)?;
let project_root = config_path.parent().unwrap_or(Path::new("."));
match project_name {
Some(name) => println!("Project: {} ({})", name, project_root.display()),
None => println!("Project: {}", project_root.display()),
}
if processes.is_empty() {
println!("No running flow processes.");
return Ok(());
}
println!("Running processes:");
for proc in &processes {
let runtime = format_runtime(proc.started_at);
println!(
" {} [pid: {}, pgid: {}] - {}",
proc.task_name, proc.pid, proc.pgid, runtime
);
println!(" {}", proc.command);
if proc.used_flox {
println!(" (flox environment)");
}
}
Ok(())
}
fn show_all_processes() -> Result<()> {
let all = running::load_running_processes()?;
if all.projects.is_empty() {
println!("No running flow processes.");
return Ok(());
}
for (config_path, processes) in &all.projects {
let project_name = processes
.first()
.and_then(|p| p.project_name.as_deref())
.unwrap_or("unknown");
let project_root = Path::new(config_path)
.parent()
.map(|p| p.display().to_string())
.unwrap_or_else(|| config_path.clone());
println!("\n{} ({}):", project_name, project_root);
for proc in processes {
let runtime = format_runtime(proc.started_at);
println!(" {} [pid: {}] - {}", proc.task_name, proc.pid, runtime);
}
}
Ok(())
}
fn format_runtime(started_at: u128) -> String {
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0);
let elapsed_secs = ((now.saturating_sub(started_at)) / 1000) as u64;
if elapsed_secs < 60 {
format!("{}s", elapsed_secs)
} else if elapsed_secs < 3600 {
format!("{}m {}s", elapsed_secs / 60, elapsed_secs % 60)
} else {
format!("{}h {}m", elapsed_secs / 3600, (elapsed_secs % 3600) / 60)
}
}
/// Kill processes based on options
pub fn kill_processes(opts: KillOpts) -> Result<()> {
let (config_path, _cfg) = tasks::load_project_config(opts.config)?;
let canonical = config_path.canonicalize()?;
if let Some(pid) = opts.pid {
kill_by_pid(pid, opts.force, opts.timeout)
} else if let Some(task) = &opts.task {
kill_by_task(&canonical, task, opts.force, opts.timeout)
} else if opts.all {
kill_all_for_project(&canonical, opts.force, opts.timeout)
} else {
bail!("Specify a task name, --pid <pid>, or --all")
}
}
fn kill_by_pid(pid: u32, force: bool, timeout: u64) -> Result<()> {
let processes = running::load_running_processes()?;
// Find the process entry to get its PGID
let entry = processes.projects.values().flatten().find(|p| p.pid == pid);
let pgid = entry.map(|e| e.pgid).unwrap_or(pid);
let task_name = entry.map(|e| e.task_name.as_str()).unwrap_or("unknown");
terminate_process_group(pgid, force, timeout)?;
running::unregister_process(pid)?;
println!("Killed {} (pid: {}, pgid: {})", task_name, pid, pgid);
Ok(())
}
fn kill_by_task(config_path: &Path, task: &str, force: bool, timeout: u64) -> Result<()> {
let processes = running::get_project_processes(config_path)?;
let matching: Vec<_> = processes.iter().filter(|p| p.task_name == task).collect();
if matching.is_empty() {
bail!("No running process found for task '{}'", task);
}
for proc in matching {
terminate_process_group(proc.pgid, force, timeout)?;
running::unregister_process(proc.pid)?;
println!("Killed {} (pid: {})", proc.task_name, proc.pid);
}
Ok(())
}
fn kill_all_for_project(config_path: &Path, force: bool, timeout: u64) -> Result<()> {
let processes = running::get_project_processes(config_path)?;
if processes.is_empty() {
println!("No running processes to kill.");
return Ok(());
}
for proc in &processes {
terminate_process_group(proc.pgid, force, timeout)?;
running::unregister_process(proc.pid)?;
println!("Killed {} (pid: {})", proc.task_name, proc.pid);
}
Ok(())
}
fn terminate_process_group(pgid: u32, force: bool, timeout: u64) -> Result<()> {
#[cfg(unix)]
{
if force {
// Immediate SIGKILL to process group
Command::new("kill")
.arg("-KILL")
.arg(format!("-{}", pgid))
.status()
.context("failed to send SIGKILL")?;
} else {
// Graceful SIGTERM to process group
let _ = Command::new("kill")
.arg("-TERM")
.arg(format!("-{}", pgid))
.status();
// Wait for process to exit
for _ in 0..timeout {
thread::sleep(Duration::from_secs(1));
if !running::process_alive(pgid) {
return Ok(());
}
}
// Force kill if still alive
if running::process_alive(pgid) {
Command::new("kill")
.arg("-KILL")
.arg(format!("-{}", pgid))
.status()
.context("failed to send SIGKILL after timeout")?;
}
}
}
#[cfg(windows)]
{
Command::new("taskkill")
.args(["/PID", &pgid.to_string(), "/T", "/F"])
.status()
.context("failed to kill process tree")?;
}
Ok(())
}
// ============================================================================
// Task Logs
// ============================================================================
fn log_dir() -> PathBuf {
std::env::var_os("HOME")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/flow/logs")
}
fn sanitize_component(raw: &str) -> String {
let mut s = String::new();
for ch in raw.chars() {
if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' {
s.push(ch);
} else {
s.push('-');
}
}
s.trim_matches('-').to_lowercase()
}
fn short_hash(input: &str) -> String {
let mut hasher = DefaultHasher::new();
input.hash(&mut hasher);
format!("{:x}", hasher.finish())
}
/// Get the log path for a project/task
fn get_log_path(project_root: &Path, project_name: Option<&str>, task_name: &str) -> PathBuf {
let base = log_dir();
let slug = if let Some(name) = project_name {
let clean = sanitize_component(name);
if clean.is_empty() {
format!("proj-{}", short_hash(&project_root.display().to_string()))
} else {
format!(
"{clean}-{}",
short_hash(&project_root.display().to_string())
)
}
} else {
format!("proj-{}", short_hash(&project_root.display().to_string()))
};
let task = {
let clean = sanitize_component(task_name);
if clean.is_empty() {
"task".to_string()
} else {
clean
}
};
base.join(slug).join(format!("{task}.log"))
}
/// Show task logs
pub fn show_task_logs(opts: TaskLogsOpts) -> Result<()> {
// If task_id is provided, fetch from hub
if let Some(ref task_id) = opts.task_id {
return show_hub_task_logs(task_id, opts.follow);
}
if opts.list {
return list_available_logs(opts.all);
}
if opts.all {
return show_all_logs(opts.lines);
}
// Resolve project: --project flag > flow.toml in cwd > active project
let (project_root, config_path, project_name) = if let Some(ref name) = opts.project {
// Explicit project name
match projects::resolve_project(name)? {
Some(entry) => (entry.project_root, entry.config_path, Some(entry.name)),
None => {
bail!(
"Project '{}' not found. Use `f projects` to see registered projects.",
name
);
}
}
} else if opts.config.exists() {
// flow.toml in current directory
let (cfg_path, cfg) = tasks::load_project_config(opts.config.clone())?;
let canonical = cfg_path.canonicalize().unwrap_or_else(|_| cfg_path.clone());
let root = cfg_path
.parent()
.unwrap_or(Path::new("."))
.canonicalize()
.unwrap_or_else(|_| cfg_path.parent().unwrap_or(Path::new(".")).to_path_buf());
(root, canonical, cfg.project_name)
} else if let Some(active) = projects::get_active_project() {
// Fall back to active project
match projects::resolve_project(&active)? {
Some(entry) => (entry.project_root, entry.config_path, Some(entry.name)),
None => {
bail!(
"Active project '{}' not found. Use `f projects` to see registered projects.",
active
);
}
}
} else {
bail!(
"No flow.toml in current directory and no active project set.\nRun a task in a project first, or use: f logs -p <project>"
);
};
// If no task specified, try to find available logs - prefer running tasks
let task_name = match opts.task {
Some(name) => name,
None => {
let logs = get_project_log_files(&project_root, project_name.as_deref());
if logs.is_empty() {
println!("No logs found for this project.");
return Ok(());
}
// Check for running tasks
let running = running::get_project_processes(&config_path).unwrap_or_default();
let running_tasks: Vec<_> = running.iter().map(|p| p.task_name.clone()).collect();
let running_logs: Vec<_> = logs
.iter()
.filter(|log| running_tasks.contains(log))
.cloned()
.collect();
if running_logs.len() == 1 {
// Single running task - use it
running_logs[0].clone()
} else if running_logs.len() > 1 {
// Multiple running tasks
println!("Multiple running tasks. Specify which to view:");
for log in &running_logs {
println!(" f logs {}", log);
}
return Ok(());
} else if logs.len() == 1 {
// No running tasks, but only one log file
logs[0].clone()
} else {
// No running tasks, multiple log files
println!("No running tasks. Available logs:");
for log in &logs {
println!(" f logs {}", log);
}
return Ok(());
}
}
};
let log_path = get_log_path(&project_root, project_name.as_deref(), &task_name);
if !log_path.exists() {
bail!(
"No log file found for task '{}' at {}",
task_name,
log_path.display()
);
}
if opts.follow {
tail_follow(&log_path, opts.lines, opts.quiet)?;
} else {
tail_lines(&log_path, opts.lines)?;
}
Ok(())
}
fn show_all_logs(lines: usize) -> Result<()> {
let base = log_dir();
if !base.exists() {
println!("No logs found at {}", base.display());
return Ok(());
}
// Find the most recently modified log file
let mut newest: Option<(PathBuf, u64)> = None;
for entry in fs::read_dir(&base)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
for log_entry in fs::read_dir(&path)? {
let log_entry = log_entry?;
let log_path = log_entry.path();
if log_path.extension().map(|e| e == "log").unwrap_or(false) {
if let Ok(meta) = fs::metadata(&log_path) {
let modified = meta
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_secs())
.unwrap_or(0);
if newest.as_ref().map(|(_, t)| modified > *t).unwrap_or(true) {
newest = Some((log_path, modified));
}
}
}
}
}
}
match newest {
Some((path, _)) => {
println!("Showing most recent log: {}\n", path.display());
tail_lines(&path, lines)
}
None => {
println!("No log files found.");
Ok(())
}
}
}
fn list_available_logs(_all: bool) -> Result<()> {
let base = log_dir();
if !base.exists() {
println!("No logs found at {}", base.display());
return Ok(());
}
println!("Available logs in {}:", base.display());
for entry in fs::read_dir(&base)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
let project_name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown");
println!("\n{}:", project_name);
for log_entry in fs::read_dir(&path)? {
let log_entry = log_entry?;
let log_path = log_entry.path();
if log_path.extension().map(|e| e == "log").unwrap_or(false) {
let task_name = log_path
.file_stem()
.and_then(|n| n.to_str())
.unwrap_or("unknown");
let metadata = fs::metadata(&log_path)?;
let size = metadata.len();
let modified = metadata
.modified()
.ok()
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
.map(|d| d.as_secs())
.unwrap_or(0);
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
let age = format_relative_time(now.saturating_sub(modified));
println!(" {} ({} bytes, modified {})", task_name, size, age);
}
}
}
}
Ok(())
}
fn format_relative_time(seconds: u64) -> String {
if seconds < 60 {
format!("{}s ago", seconds)
} else if seconds < 3600 {
format!("{}m ago", seconds / 60)
} else if seconds < 86400 {
format!("{}h ago", seconds / 3600)
} else {
format!("{}d ago", seconds / 86400)
}
}
/// Get list of task names that have log files for a project
fn get_project_log_files(project_root: &Path, project_name: Option<&str>) -> Vec<String> {
let base = log_dir();
let slug = if let Some(name) = project_name {
let clean = sanitize_component(name);
if clean.is_empty() {
format!("proj-{}", short_hash(&project_root.display().to_string()))
} else {
format!(
"{clean}-{}",
short_hash(&project_root.display().to_string())
)
}
} else {
format!("proj-{}", short_hash(&project_root.display().to_string()))
};
let project_log_dir = base.join(&slug);
if !project_log_dir.exists() {
return Vec::new();
}
let mut tasks = Vec::new();
if let Ok(entries) = fs::read_dir(&project_log_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().map(|e| e == "log").unwrap_or(false) {
if let Some(task_name) = path.file_stem().and_then(|n| n.to_str()) {
tasks.push(task_name.to_string());
}
}
}
}
tasks
}
fn tail_lines(path: &Path, n: usize) -> Result<()> {
let file = File::open(path).context("failed to open log file")?;
let reader = BufReader::new(file);
let lines: Vec<String> = reader.lines().filter_map(|l| l.ok()).collect();
let start = lines.len().saturating_sub(n);
for line in &lines[start..] {
println!("{}", line);
}
Ok(())
}
fn tail_follow(path: &Path, initial_lines: usize, quiet: bool) -> Result<()> {
// First show the last N lines
tail_lines(path, initial_lines)?;
// Then follow
let mut file = File::open(path).context("failed to open log file")?;
file.seek(SeekFrom::End(0))?;
if !quiet {
println!("\n--- Following {} (Ctrl+C to stop) ---", path.display());
}
let mut buf = vec![0u8; 4096];
loop {
match file.read(&mut buf) {
Ok(0) => {
// No new data, sleep and retry
thread::sleep(Duration::from_millis(100));
}
Ok(n) => {
print!("{}", String::from_utf8_lossy(&buf[..n]));
}
Err(e) => {
bail!("Error reading log file: {}", e);
}
}
}
}
/// Fetch and display logs for a hub task by ID
fn show_hub_task_logs(task_id: &str, follow: bool) -> Result<()> {
use reqwest::blocking::Client;
use serde::Deserialize;
const HUB_HOST: &str = "127.0.0.1";
const HUB_PORT: u16 = 9050;
#[derive(Debug, Deserialize)]
struct TaskLog {
id: String,
name: String,
command: String,
cwd: Option<String>,
#[allow(dead_code)]
started_at: u64,
finished_at: Option<u64>,
exit_code: Option<i32>,
output: Vec<OutputLine>,
}
#[derive(Debug, Deserialize)]
struct OutputLine {
#[allow(dead_code)]
timestamp_ms: u64,
stream: String,
line: String,
}
let url = format!("http://{}:{}/tasks/logs/{}", HUB_HOST, HUB_PORT, task_id);
let client = Client::builder()
.timeout(Duration::from_secs(5))
.build()
.context("failed to create HTTP client")?;
if follow {
// Poll for updates
let mut last_output_count = 0;
loop {
let resp = client.get(&url).send();
match resp {
Ok(r) if r.status().is_success() => {
let log: TaskLog = r.json().context("failed to parse task log")?;
// Print new output lines
for line in log.output.iter().skip(last_output_count) {
let prefix = if line.stream == "stderr" { "!" } else { " " };
println!("{} {}", prefix, line.line);
}
last_output_count = log.output.len();
// Check if task is done
if log.finished_at.is_some() {
if let Some(code) = log.exit_code {
if code == 0 {
println!("\n✓ Task completed successfully");
} else {
println!("\n✗ Task failed with exit code {}", code);
}
}
break;
}
}
Ok(r) if r.status().as_u16() == 404 => {
// Task not found yet, wait
thread::sleep(Duration::from_millis(200));
continue;
}
Ok(r) => {
bail!("Hub returned error: {}", r.status());
}
Err(e) => {
bail!("Failed to fetch task logs: {}", e);
}
}
thread::sleep(Duration::from_millis(500));
}
} else {
// One-shot fetch
let resp = client
.get(&url)
.send()
.context("failed to fetch task logs")?;
if resp.status().as_u16() == 404 {
println!(
"Task '{}' not found yet (queued). Streaming logs...",
task_id
);
return show_hub_task_logs(task_id, true);
}
if !resp.status().is_success() {
bail!("Hub returned error: {}", resp.status());
}
let log: TaskLog = resp.json().context("failed to parse task log")?;
println!("Task: {} ({})", log.name, log.id);
println!("Command: {}", log.command);
if let Some(cwd) = &log.cwd {
println!("Working dir: {}", cwd);
}
println!();
for line in &log.output {
let prefix = if line.stream == "stderr" { "!" } else { " " };
println!("{} {}", prefix, line.line);
}
if let Some(code) = log.exit_code {
println!();
if code == 0 {
println!("✓ Exit code: {}", code);
} else {
println!("✗ Exit code: {}", code);
}
} else {
println!("\n⋯ Task still running...");
}
}
Ok(())
}
| rust | MIT | 85db313c274056cf0dbb36cc0aee35e037a66cfd | 2026-01-04T15:40:10.857433Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.