repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
cambrian/accumulator | https://github.com/cambrian/accumulator/blob/8f460270c274a4a65d777378a587be6dbc986c18/benches/group/class.rs | benches/group/class.rs | /// See https://bheisler.github.io/criterion.rs/book/getting_started.html to add more benchmarks.
#[macro_use]
extern crate criterion;
use accumulator::group::{ClassGroup, ElemFrom, Group, UnknownOrderGroup};
use criterion::Criterion;
use rug::Integer;
use std::str::FromStr;
fn criterion_benchmark(c: &mut Criterion) {
let left = ClassGroup::elem((
Integer::from_str("16").unwrap(),
Integer::from_str("9").unwrap(),
Integer::from_str(
"47837607866886756167333839869251273774207619337757918597995294777816250058331116325341018110\
672047217112377476473502060121352842575308793237621563947157630098485131517401073775191194319\
531549483898334742144138601661120476425524333273122132151927833887323969998955713328783526854\
198871332313399489386997681827578317938792170918711794684859311697439726596656501594138449739\
494228617068329664776714484742276158090583495714649193839084110987149118615158361352488488402\
038894799695420483272708933239751363849397287571692736881031223140446926522431859701738994562\
9057462766047140854869124473221137588347335081555186814036",
)
.unwrap(),
));
let right = left.clone();
// Generator element.
let base = ClassGroup::elem((
Integer::from(2),
Integer::from(1),
Integer::from_str(
"38270086293509404933867071895401019019366095470206334878396235822253000046664893060272814488\
537637773689901981178801648097082274060247034590097251157726104078788105213920859020152955455\
625239587118667793715310881328896381140419466618497705721542267109859175999164570663026821483\
359097065850719591509598145462062654351033736734969435747887449357951781277325201275310759791\
595382893654663731821371587793820926472466796571719355071267288789719294892126689081990790721\
631115839756336386618167146591801091079517830057354189504824978512357541217945487761391195650\
32459702128377126838952995785769100706778680652441494512278",
)
.unwrap(),
));
let exp = Integer::from_str("65315").unwrap();
let g_inv = base.clone();
let g_sq = ClassGroup::unknown_order_elem();
let aa = Integer::from_str("16").unwrap();
let bb = Integer::from_str("105").unwrap();
let cc = Integer::from_str(
"4783760786688675616733383986925127377420761933775791859799529477781625005833111632534101811067\
20472171123774764735020601213528425753087932376215639471576300984851315174010737751911943195315\
49483898334742144138601661120476425524333273122132151927833887323969998955713328783526854198871\
33231339948938699768182757831793879217091871179468485931169743972659665650159413844973949422861\
70683296647767144847422761580905834957146491938390841109871491186151583613524884884020388947996\
95420483272708933239751363849397287571692736881031223140446926522431859701738994562905746276604\
7140854869124473221137588347335081555186814207",
)
.unwrap();
// Element which requires one iteration to reduce, represented as a tuple here, since only
// reduced representations of ClassElem are allowed.
let g_red = (cc.clone(), bb.clone(), aa.clone());
let g_norm = (aa.clone(), bb.clone(), cc.clone());
c.bench_function("group_class_op", move |b| {
b.iter(|| ClassGroup::op(&left, &right))
});
c.bench_function("group_class_exp", move |b| {
b.iter(|| ClassGroup::exp(&base, &exp))
});
c.bench_function("group_class_inv", move |b| {
b.iter(|| ClassGroup::inv(&g_inv))
});
c.bench_function("group_class_normalize", move |b| {
b.iter_with_setup(
|| g_norm.clone(),
|g| ClassGroup::normalize(g.0, g.1, g.2),
)
});
c.bench_function("group_class_reduce", move |b| {
b.iter_with_setup(
|| g_red.clone(),
|g| ClassGroup::reduce(g.0, g.1, g.2),
)
});
c.bench_function("group_class_square", move |b| {
b.iter_with_setup(|| g_sq.clone(), |g| ClassGroup::square(&g))
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | 8f460270c274a4a65d777378a587be6dbc986c18 | 2026-01-04T20:15:46.214781Z | false |
cambrian/accumulator | https://github.com/cambrian/accumulator/blob/8f460270c274a4a65d777378a587be6dbc986c18/benches/group/rsa.rs | benches/group/rsa.rs | /// See https://bheisler.github.io/criterion.rs/book/getting_started.html to add more benchmarks.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use accumulator::group::{ElemFrom, Group, Rsa2048};
use rug::Integer;
use std::str::FromStr;
fn bench_op<G: Group + ElemFrom<Integer>>() {
G::op(
&G::elem(
Integer::from_str(
"111066521363124532171649626395987136074128970245601106158251038731392583290069",
)
.unwrap(),
),
&G::elem(
Integer::from_str(
"106610920435831899020588753249099054915951032185883121197718271189872278955399",
)
.unwrap(),
),
);
}
fn bench_op_large<G: Group + ElemFrom<Integer>>() {
G::op(
&G::elem(Integer::from_str(
"21720738995539542858936915878186921869751915989840152165899303861582487240810878492659751749\
672737203717627738047648700009977053044057502917091973287111671693426065546612150833232954361\
536709981055037121764270784874720971933716065574032615073613728454497477072129686538873330572\
773963696018637078230885896090312654536801520372853122471254294946328305929844982319416384204\
134056551840145916685870951507887895129356414704422748714217113880489703934147612551938082501\
753055296801829703017260731439871110215618988509545129088484396848644805730347466581515692959\
313583208325725034506693916571047785061884094866050395109710",
)
.unwrap()),
&G::elem(Integer::from_str(
"31720738995539542858936915878186921869751915989840152165899303861582487240810878492659751749\
672737203717627738047648700009977053044057502917091973287111671693426065546612150833232954361\
536709981055037121764270784874720971933716065574032615073613728454497477072129686538873330572\
773963696018637078230885896090312654536801520372853122471254294946328305929844982319416384204\
134056551840145916685870951507887895129356414704422748714217113880489703934147612551938082501\
753055296801829703017260731439871110215618988509545129088484396848644805730347466581515692959\
313583208325725034506693916571047785061884094866050395109710",
)
.unwrap()),
);
}
fn bench_exp<G: Group + ElemFrom<u8>>() {
G::exp(
&G::elem(2),
&Integer::from_str(
"65315136833896061809557254466951240071191890612435768575001173256020447546800029221544380288\
474666886816442984548106882909827295319824031764930714696522619672276938781971873901815262421\
654562691730669161126673833543570922556193096897121287444423696122691826661878849856991509472\
508677693535083051665283493383",
)
.unwrap(),
);
}
fn bench_inv<G: Group + ElemFrom<u8>>() {
G::inv(&G::elem(2));
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("group_rsa_op", |b| b.iter(bench_op::<Rsa2048>));
c.bench_function("group_rsa_op_large", |b| b.iter(bench_op_large::<Rsa2048>));
c.bench_function("group_rsa_exp", |b| b.iter(bench_exp::<Rsa2048>));
c.bench_function("group_rsa_inv", |b| b.iter(bench_inv::<Rsa2048>));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | 8f460270c274a4a65d777378a587be6dbc986c18 | 2026-01-04T20:15:46.214781Z | false |
cambrian/accumulator | https://github.com/cambrian/accumulator/blob/8f460270c274a4a65d777378a587be6dbc986c18/benches/proof/poke2.rs | benches/proof/poke2.rs | /// See https://bheisler.github.io/criterion.rs/book/getting_started.html to add more benchmarks.
#[macro_use]
extern crate criterion;
use accumulator::group::Rsa2048;
use accumulator::group::{ElemFrom, UnknownOrderGroup};
use accumulator::proof::Poke2;
use accumulator::util::int;
use criterion::Criterion;
fn bench_poke2_rsa() {
let base = Rsa2048::unknown_order_elem();
let exp = int(20);
let result = Rsa2048::elem(1_048_576);
let proof = Poke2::<Rsa2048>::prove(&base, &exp, &result);
Poke2::verify(&base, &result, &proof);
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("poke2_rsa", |b| b.iter(bench_poke2_rsa));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | 8f460270c274a4a65d777378a587be6dbc986c18 | 2026-01-04T20:15:46.214781Z | false |
cambrian/accumulator | https://github.com/cambrian/accumulator/blob/8f460270c274a4a65d777378a587be6dbc986c18/benches/proof/poe.rs | benches/proof/poe.rs | /// See https://bheisler.github.io/criterion.rs/book/getting_started.html to add more benchmarks.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use accumulator::group::{ElemFrom, Rsa2048, UnknownOrderGroup};
use accumulator::proof::Poe;
use accumulator::util::int;
fn bench_poe_rsa() {
let base = Rsa2048::unknown_order_elem();
let exp = int(20);
let result = Rsa2048::elem(1_048_576);
let proof = Poe::<Rsa2048>::prove(&base, &exp, &result);
Poe::<Rsa2048>::verify(&base, &exp, &result, &proof);
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("poe_rsa", |b| b.iter(bench_poe_rsa));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | 8f460270c274a4a65d777378a587be6dbc986c18 | 2026-01-04T20:15:46.214781Z | false |
cambrian/accumulator | https://github.com/cambrian/accumulator/blob/8f460270c274a4a65d777378a587be6dbc986c18/benches/proof/pokcr.rs | benches/proof/pokcr.rs | /// See https://bheisler.github.io/criterion.rs/book/getting_started.html to add more benchmarks.
#[macro_use]
extern crate criterion;
use accumulator::group::{ElemFrom, Rsa2048};
use accumulator::proof::Pokcr;
use accumulator::util::int;
use criterion::Criterion;
fn bench_pokcr_rsa() {
let witnesses = [Rsa2048::elem(2), Rsa2048::elem(3)];
let x = [int(2), int(2)];
let alphas = [Rsa2048::elem(4), Rsa2048::elem(9)];
let proof = Pokcr::<Rsa2048>::prove(&witnesses);
Pokcr::verify(&alphas, &x, &proof);
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("pokcr_rsa", |b| b.iter(bench_pokcr_rsa));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
| rust | MIT | 8f460270c274a4a65d777378a587be6dbc986c18 | 2026-01-04T20:15:46.214781Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_generator.rs | src/panel_generator.rs | use eframe::egui::{self, CursorIcon, Id, LayerId, Order, Sense};
use epaint::Color32;
use serde::{Deserialize, Serialize};
use std::{
fs::File,
io::{Read, Write},
};
use crate::{
generators::{
render_fbm, render_hills, render_island, render_landmass, render_mid_point,
render_mudslide, render_water_erosion, FbmConf, HillsConf, IslandConf, LandMassConf,
MidPointConf, MudSlideConf, NormalizeConf, WaterErosionConf,
},
worldgen::{Step, StepType},
VERSION,
};
/// actions to do by the main program
pub enum GeneratorAction {
/// recompute heightmap from a specific step (deleteStep, stepIndex)
Regen(bool, usize),
/// disable a step and recompute the heightmap
Disable(usize),
/// enable a step and recompute the heightmap
Enable(usize),
/// display a specific step heightmap in the 2D preview
DisplayLayer(usize),
/// display a specific step mask in the 2D preview
DisplayMask(usize),
/// change the RNG seed
SetSeed(u64),
/// remove all steps
Clear,
}
#[derive(Serialize, Deserialize)]
pub struct PanelGenerator {
/// to avoid loading a save from another version
version: String,
#[serde(skip)]
/// is the world generator currently computing the heightmap?
pub is_running: bool,
#[serde(skip)]
/// are we currently displaying a mask or a heightmap ?
pub mask_selected: bool,
/// generator steps with their configuration and masks
pub steps: Vec<Step>,
/// current selected step. used for combo box. must be outside of steps in case steps is empty
cur_step: Step,
/// current selected step index
pub selected_step: usize,
/// current drag n drop destination
move_to_pos: usize,
/// is the drag n drop zone currently hovered by the mouse cursor?
hovered: bool,
/// random number generator's seed
pub seed: u64,
}
impl Default for PanelGenerator {
fn default() -> Self {
Self {
version: VERSION.to_owned(),
is_running: false,
mask_selected: false,
steps: Vec::new(),
cur_step: Step {
typ: StepType::Hills(HillsConf::default()),
..Default::default()
},
selected_step: 0,
move_to_pos: 0,
hovered: false,
seed: 0xdeadbeef,
}
}
}
fn render_step_gui(ui: &mut egui::Ui, id: Id, body: impl FnOnce(&mut egui::Ui)) -> Option<f32> {
let is_being_dragged = ui.ctx().is_being_dragged(id);
if !is_being_dragged {
ui.scope(body);
} else {
let layer_id = LayerId::new(Order::Tooltip, id);
let response = ui.with_layer_id(layer_id, body).response;
ui.output_mut(|i| i.cursor_icon = CursorIcon::Grabbing);
if let Some(pointer_pos) = ui.ctx().pointer_interact_pos() {
let mut delta = pointer_pos - response.rect.center();
delta.x += 60.0;
ui.ctx().translate_layer(layer_id, delta);
return Some(delta.y);
}
}
None
}
impl PanelGenerator {
pub fn enabled_steps(&self) -> usize {
self.steps.iter().filter(|s| !s.disabled).count()
}
fn render_header(&mut self, ui: &mut egui::Ui, progress: f32) -> Option<GeneratorAction> {
let mut action = None;
ui.horizontal(|ui| {
ui.heading("Generators");
if self.is_running {
ui.spinner();
}
});
ui.add(egui::ProgressBar::new(progress).show_percentage());
ui.horizontal(|ui| {
if ui.button("Clear").clicked() {
self.steps.clear();
action = Some(GeneratorAction::Clear)
}
ui.label("Seed");
let old_seed = self.seed;
let old_size = ui.spacing().interact_size.x;
ui.spacing_mut().interact_size.x = 100.0;
ui.add(egui::DragValue::new(&mut self.seed).speed(1.0));
ui.spacing_mut().interact_size.x = old_size;
if self.seed != old_seed {
action = Some(GeneratorAction::SetSeed(self.seed));
}
});
action
}
/// render UI to add a new step
fn render_new_step(&mut self, ui: &mut egui::Ui) -> Option<GeneratorAction> {
let mut action = None;
ui.horizontal(|ui| {
if ui.button("New step").clicked() {
self.steps.push(self.cur_step.clone());
self.selected_step = self.steps.len() - 1;
action = Some(GeneratorAction::Regen(false, self.selected_step))
}
egui::ComboBox::from_label("")
.selected_text(format!("{}", self.cur_step))
.show_ui(ui, |ui| {
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::Hills(HillsConf::default()),
..Default::default()
},
"Hills",
)
.on_hover_text("Add round hills to generate a smooth land");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::Fbm(FbmConf::default()),
..Default::default()
},
"Fbm",
)
.on_hover_text("Add fractional brownian motion to generate a mountainous land");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::MidPoint(MidPointConf::default()),
..Default::default()
},
"MidPoint",
)
.on_hover_text("Use mid point deplacement to generate a mountainous land");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::Normalize(NormalizeConf::default()),
..Default::default()
},
"Normalize",
)
.on_hover_text("Scale the terrain back to the 0.0-1.0 range");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::LandMass(LandMassConf::default()),
..Default::default()
},
"LandMass",
)
.on_hover_text(
"Scale the terrain so that only a proportion of land is above water level",
);
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::MudSlide(MudSlideConf::default()),
..Default::default()
},
"MudSlide",
)
.on_hover_text("Simulate mud sliding and smoothing the terrain");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::WaterErosion(WaterErosionConf::default()),
..Default::default()
},
"WaterErosion",
)
.on_hover_text("Simulate rain falling and carving rivers");
ui.selectable_value(
&mut self.cur_step,
Step {
typ: StepType::Island(IslandConf::default()),
..Default::default()
},
"Island",
)
.on_hover_text("Lower height on the map borders");
});
});
action
}
/// render the list of steps of current project
fn render_step_list(
&mut self,
ui: &mut egui::Ui,
to_remove: &mut Option<usize>,
to_move: &mut Option<usize>,
) -> Option<GeneratorAction> {
let mut action = None;
let len = self.steps.len();
// let dragging = ui.ctx().dragged_id.is_some()
let dragging = ui.memory(|m| m.is_anything_being_dragged()) && self.hovered;
let response = ui
.scope(|ui| {
for (i, step) in self.steps.iter_mut().enumerate() {
if dragging && self.move_to_pos == i {
ui.separator();
}
let item_id = Id::new("wgen").with(i);
if let Some(dy) = render_step_gui(ui, item_id, |ui| {
ui.horizontal(|ui| {
let response = ui
.button("▓")
.on_hover_text("Drag this to change step order");
let response = ui.interact(response.rect, item_id, Sense::drag());
if response.hovered() {
ui.output_mut(|o| o.cursor_icon = CursorIcon::Grab);
}
if ui.button("⊗").on_hover_text("Delete this step").clicked() {
*to_remove = Some(i);
}
if ui
.button(egui::RichText::new("👁").color(if step.disabled {
Color32::from_rgb(0, 0, 0)
} else {
Color32::from_rgb(200, 200, 200)
}))
.on_hover_text(if step.disabled {
"Enable this step"
} else {
"Disable this step"
})
.clicked()
{
step.disabled = !step.disabled;
if step.disabled {
action = Some(GeneratorAction::Disable(i));
} else {
action = Some(GeneratorAction::Enable(i));
}
}
if ui
.button(if step.mask.is_none() { "⬜" } else { "⬛" })
.on_hover_text("Add a mask to this step")
.clicked()
{
self.mask_selected = true;
self.selected_step = i;
}
if ui
.selectable_label(
self.selected_step == i && !self.mask_selected,
step.to_string(),
)
.clicked()
{
self.selected_step = i;
self.mask_selected = false;
}
});
}) {
*to_move = Some(i);
let dest = i as i32 + (dy / 20.0) as i32;
self.move_to_pos = dest.clamp(0, len as i32) as usize;
}
}
})
.response;
self.hovered = response.hovered();
action
}
/// render the configuration UI for currently selected step
fn render_curstep_conf(&mut self, ui: &mut egui::Ui) -> Option<GeneratorAction> {
let mut action = None;
match &mut self.steps[self.selected_step] {
Step {
typ: StepType::Hills(conf),
..
} => render_hills(ui, conf),
Step {
typ: StepType::LandMass(conf),
..
} => render_landmass(ui, conf),
Step {
typ: StepType::MudSlide(conf),
..
} => render_mudslide(ui, conf),
Step {
typ: StepType::Fbm(conf),
..
} => render_fbm(ui, conf),
Step {
typ: StepType::WaterErosion(conf),
..
} => render_water_erosion(ui, conf),
Step {
typ: StepType::Island(conf),
..
} => render_island(ui, conf),
Step {
typ: StepType::MidPoint(conf),
..
} => render_mid_point(ui, conf),
Step {
typ: StepType::Normalize(_),
..
} => (),
}
if ui.button("Refresh").clicked() {
action = Some(GeneratorAction::Regen(false, self.selected_step));
self.mask_selected = false;
}
action
}
pub fn render(&mut self, ui: &mut egui::Ui, progress: f32) -> Option<GeneratorAction> {
let previous_selected_step = self.selected_step;
let previous_mask_selected = self.mask_selected;
let mut action = self.render_header(ui, progress);
action = action.or(self.render_new_step(ui));
ui.end_row();
let mut to_remove = None;
let mut to_move = None;
action = action.or(self.render_step_list(ui, &mut to_remove, &mut to_move));
ui.separator();
if !self.steps.is_empty() {
action = action.or(self.render_curstep_conf(ui));
}
if action.is_none()
&& (previous_selected_step != self.selected_step
|| previous_mask_selected != self.mask_selected)
{
if self.mask_selected {
action = Some(GeneratorAction::DisplayMask(self.selected_step));
} else {
action = Some(GeneratorAction::DisplayLayer(self.selected_step));
}
}
if let Some(i) = to_remove {
self.steps.remove(i);
if self.selected_step >= self.steps.len() {
self.selected_step = if self.steps.is_empty() {
0
} else {
self.steps.len() - 1
};
}
action = Some(GeneratorAction::Regen(true, i));
self.mask_selected = false;
}
if ui.input(|i| i.pointer.any_released()) {
if let Some(i) = to_move {
if i != self.move_to_pos {
let step = self.steps.remove(i);
let dest = if self.move_to_pos > i {
self.move_to_pos - 1
} else {
self.move_to_pos
};
self.steps.insert(dest, step);
action = Some(GeneratorAction::Regen(false, i));
self.mask_selected = false;
}
}
}
action
}
pub fn load(&mut self, file_path: &str) -> Result<(), String> {
let mut file = File::open(file_path).map_err(|_| "Unable to open the file")?;
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|_| "Unable to read the file")?;
let gen_data: PanelGenerator =
ron::from_str(&contents).map_err(|e| format!("Cannot parse the file : {}", e))?;
if gen_data.version != VERSION {
return Err(format!(
"Bad file version. Expected {}, found {}",
VERSION, gen_data.version
));
}
*self = gen_data;
Ok(())
}
pub fn save(&self, file_path: &str) -> Result<(), String> {
let data = ron::to_string(self).unwrap();
let mut buffer = File::create(file_path).map_err(|_| "Unable to create the file")?;
write!(buffer, "{}", data).map_err(|_| "Unable to write to the file")?;
Ok(())
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/exporter.rs | src/exporter.rs | use std::{path::Path, sync::mpsc::Sender};
use crate::{
panel_export::{ExportFileType, PanelExport},
worldgen::{Step, WorldGenerator},
ThreadMessage,
};
pub fn export_heightmap(
// random number generator's seed to use
seed: u64,
// list of generator steps with their configuration and optional masks
steps: &[Step],
// size and number of files to export, file name pattern
export_data: &PanelExport,
// channel to send feedback messages to the main thread
tx: Sender<ThreadMessage>,
// minimum amount of progress to report (below this value, the global %age won't change)
min_progress_step: f32,
) -> Result<(), String> {
let file_width = export_data.export_width as usize;
let file_height = export_data.export_height as usize;
let mut wgen = WorldGenerator::new(
seed,
(
(export_data.export_width * export_data.tiles_h) as usize,
(export_data.export_height * export_data.tiles_v) as usize,
),
);
wgen.generate(steps, tx, min_progress_step);
let (min, max) = wgen.get_min_max();
let coef = if max - min > std::f32::EPSILON {
1.0 / (max - min)
} else {
1.0
};
for ty in 0..export_data.tiles_v as usize {
for tx in 0..export_data.tiles_h as usize {
let offset_x = if export_data.seamless {
tx * (file_width - 1)
} else {
tx * file_width
};
let offset_y = if export_data.seamless {
ty * (file_height - 1)
} else {
ty * file_height
};
let path = format!(
"{}_x{}_y{}.{}",
export_data.file_path,
tx,
ty,
export_data.file_type.to_string()
);
match export_data.file_type {
ExportFileType::PNG => write_png(
file_width,
file_height,
offset_x,
offset_y,
&wgen,
min,
coef,
&path,
)?,
ExportFileType::EXR => write_exr(
file_width,
file_height,
offset_x,
offset_y,
&wgen,
min,
coef,
&path,
)?,
}
}
}
Ok(())
}
fn write_png(
file_width: usize,
file_height: usize,
offset_x: usize,
offset_y: usize,
wgen: &WorldGenerator,
min: f32,
coef: f32,
path: &str,
) -> Result<(), String> {
let mut buf = vec![0u8; file_width * file_height * 2];
for py in 0..file_height {
for px in 0..file_width {
let mut h = wgen.combined_height(px + offset_x, py + offset_y);
h = (h - min) * coef;
let offset = (px + py * file_width) * 2;
let pixel = (h * 65535.0) as u16;
let upixel = pixel.to_ne_bytes();
buf[offset] = upixel[0];
buf[offset + 1] = upixel[1];
}
}
image::save_buffer(
&Path::new(&path),
&buf,
file_width as u32,
file_height as u32,
image::ColorType::L16,
)
.map_err(|e| format!("Error while saving {}: {}", &path, e))
}
fn write_exr(
file_width: usize,
file_height: usize,
offset_x: usize,
offset_y: usize,
wgen: &WorldGenerator,
min: f32,
coef: f32,
path: &str,
) -> Result<(), String> {
use exr::prelude::*;
let channel = SpecificChannels::new(
(ChannelDescription::named("Y", SampleType::F16),),
|Vec2(px, py)| {
let h = wgen.combined_height(px + offset_x, py + offset_y);
let h = f16::from_f32((h - min) * coef);
(h,)
},
);
Image::from_encoded_channels(
(file_width, file_height),
Encoding {
compression: Compression::ZIP1,
blocks: Blocks::ScanLines,
line_order: LineOrder::Increasing,
},
channel,
)
.write()
.to_file(path)
.map_err(|e| format!("Error while saving {}: {}", &path, e))
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_maskedit.rs | src/panel_maskedit.rs | use eframe::{
egui::{self, PointerButton},
emath,
};
use epaint::{Color32, ColorImage, Pos2, Rect};
use three_d::{
vec3, Blend, Camera, ColorMaterial, CpuMaterial, CpuMesh, CpuTexture, Cull, DepthTest, Gm,
Indices, Mat4, Mesh, Object, Positions, Srgba, TextureData, Viewport,
};
use crate::{panel_2dview::Panel2dAction, MASK_SIZE};
/// maximum size of the brush relative to the canvas
const MAX_BRUSH_SIZE: f32 = 0.25;
#[derive(Clone, Copy)]
pub struct BrushConfig {
/// value painted with middle mouse button
pub value: f32,
/// brush radius from a single 'pixel' in the heightmap (0.0) to 25% of heightmap's size (1.0)
pub size: f32,
/// brush radius where the opacity starts to falloff from no falloff(0.0) to center of the brush (1.0)
pub falloff: f32,
/// how fast the brush updates the mask 0.0: slow, 1.0: fast
pub opacity: f32,
}
pub struct PanelMaskEdit {
/// preview canvas size in pixels
image_size: usize,
/// the mask as a MASK_SIZE x MASK_SIZE f32 matrix
mask: Option<Vec<f32>>,
/// the brush parameters
conf: BrushConfig,
/// should the mesh used to render the mask be updated to reflect changes in mask ?
mesh_updated: bool,
/// are we rendering a new mask for the first time ?
new_mask: bool,
/// should the mesh used to render the brush be updated to reflect a change in brush falloff ?
brush_updated: bool,
/// are we currently modifying the mask (cursor is in canvas and one mouse button is pressed)
is_painting: bool,
/// used to compute the brush impact on the mask depending on elapsed time
prev_frame_time: f64,
/// how transparent we want the heightmap to appear on top of the mask
pub heightmap_transparency: f32,
}
impl PanelMaskEdit {
pub fn new(image_size: usize) -> Self {
PanelMaskEdit {
image_size,
mask: None,
conf: BrushConfig {
value: 0.5,
size: 0.5,
falloff: 0.5,
opacity: 0.5,
},
mesh_updated: false,
new_mask: true,
is_painting: false,
brush_updated: false,
prev_frame_time: -1.0,
heightmap_transparency: 0.5,
}
}
pub fn get_mask(&self) -> Option<Vec<f32>> {
self.mask.clone()
}
pub fn display_mask(&mut self, image_size: usize, mask: Option<Vec<f32>>) {
self.image_size = image_size;
self.mesh_updated = true;
self.new_mask = true;
self.mask = mask.or_else(|| Some(vec![1.0; MASK_SIZE * MASK_SIZE]));
}
pub fn render(
&mut self,
ui: &mut egui::Ui,
heightmap_img: &ColorImage,
) -> Option<Panel2dAction> {
let mut action = None;
ui.vertical(|ui| {
egui::Frame::dark_canvas(ui.style()).show(ui, |ui| {
self.render_3dview(ui, heightmap_img, self.image_size as u32);
});
if self.is_painting {
action = Some(Panel2dAction::MaskUpdated);
ui.ctx().request_repaint();
} else {
self.prev_frame_time = -1.0;
}
ui.label("mouse buttons : left increase, right decrease, middle set brush value");
ui.horizontal(|ui| {
ui.label("brush size");
ui.add(
egui::DragValue::new(&mut self.conf.size)
.speed(0.01)
.range(1.0 / (MASK_SIZE as f32)..=1.0),
);
ui.label("falloff");
let old_falloff = self.conf.falloff;
ui.add(
egui::DragValue::new(&mut self.conf.falloff)
.speed(0.01)
.range(0.0..=1.0),
);
ui.label("value");
ui.add(
egui::DragValue::new(&mut self.conf.value)
.speed(0.01)
.range(0.0..=1.0),
);
ui.label("opacity");
ui.add(
egui::DragValue::new(&mut self.conf.opacity)
.speed(0.01)
.range(0.0..=1.0),
);
// need to update the brush mesh ?
self.brush_updated = old_falloff != self.conf.falloff;
});
ui.horizontal(|ui| {
ui.label("heightmap opacity");
ui.add(
egui::DragValue::new(&mut self.heightmap_transparency)
.speed(0.01)
.range(0.0..=1.0),
);
});
if ui
.button("Clear mask")
.on_hover_text("Delete this mask")
.clicked()
{
action = Some(Panel2dAction::MaskDelete);
if let Some(ref mut mask) = self.mask {
mask.fill(1.0);
self.mesh_updated = true;
}
}
});
action
}
fn render_3dview(&mut self, ui: &mut egui::Ui, heightmap_img: &ColorImage, image_size: u32) {
let (rect, response) = ui.allocate_exact_size(
egui::Vec2::splat(self.image_size as f32),
egui::Sense::drag(),
);
let lbutton = ui.input(|i| i.pointer.button_down(PointerButton::Primary));
let rbutton = ui.input(|i| i.pointer.button_down(PointerButton::Secondary));
let mbutton = ui.input(|i| i.pointer.button_down(PointerButton::Middle));
let mut mouse_pos = ui.input(|i| i.pointer.hover_pos());
let to_screen = emath::RectTransform::from_to(
Rect::from_min_size(Pos2::ZERO, response.rect.square_proportions()),
response.rect,
);
let from_screen = to_screen.inverse();
let mut mesh_updated = self.mesh_updated;
let new_mask = self.new_mask;
let hmap_transp = self.heightmap_transparency;
let heightmap_img = if new_mask {
Some(heightmap_img.clone())
} else {
None
};
let brush_updated = self.brush_updated;
let brush_config = self.conf;
let time = if self.prev_frame_time == -1.0 {
self.prev_frame_time = ui.input(|i| i.time);
0.0
} else {
let t = ui.input(|i| i.time);
let elapsed = t - self.prev_frame_time;
self.prev_frame_time = t;
elapsed
};
if let Some(pos) = mouse_pos {
// mouse position in canvas from 0.0,0.0 (bottom left) to 1.0,1.0 (top right)
let canvas_pos = from_screen * pos;
mouse_pos = Some(canvas_pos);
self.is_painting = (lbutton || rbutton || mbutton) && in_canvas(canvas_pos);
if self.is_painting && time > 0.0 {
self.update_mask(canvas_pos, lbutton, rbutton, brush_config, time as f32);
mesh_updated = true;
}
}
let mask = if mesh_updated {
self.mask.clone()
} else {
None
};
let callback = egui::PaintCallback {
rect,
callback: std::sync::Arc::new(egui_glow::CallbackFn::new(move |info, painter| {
with_three_d_context(painter.gl(), |three_d, renderer| {
if new_mask {
if let Some(ref heightmap_img) = heightmap_img {
renderer.set_heightmap(three_d, heightmap_img, image_size);
}
}
if brush_updated {
renderer.update_brush(three_d, brush_config);
}
if mesh_updated {
renderer.update_model(three_d, &mask);
}
renderer.render(three_d, &info, mouse_pos, brush_config, hmap_transp);
});
})),
};
ui.painter().add(callback);
self.mesh_updated = false;
self.new_mask = false;
}
fn update_mask(
&mut self,
canvas_pos: Pos2,
lbutton: bool,
rbutton: bool,
brush_config: BrushConfig,
time: f32,
) {
if let Some(ref mut mask) = self.mask {
let mx = canvas_pos.x * MASK_SIZE as f32;
let my = canvas_pos.y * MASK_SIZE as f32;
let brush_radius = brush_config.size * MASK_SIZE as f32 * MAX_BRUSH_SIZE;
let falloff_dist = (1.0 - brush_config.falloff) * brush_radius;
let minx = (mx - brush_radius).max(0.0) as usize;
let maxx = ((mx + brush_radius) as usize).min(MASK_SIZE);
let miny = (my - brush_radius).max(0.0) as usize;
let maxy = ((my + brush_radius) as usize).min(MASK_SIZE);
let opacity_factor = 0.5 + brush_config.opacity;
let (target_value, time_coef) = if lbutton {
(0.0, 10.0)
} else if rbutton {
// for some unknown reason, white color is faster than black!
(1.0, 3.0)
} else {
// mbutton
(brush_config.value, 5.0)
};
let brush_coef = 1.0 / (brush_radius - falloff_dist);
let coef = time * time_coef * opacity_factor;
for y in miny..maxy {
let dy = y as f32 - my;
let yoff = y * MASK_SIZE;
for x in minx..maxx {
let dx = x as f32 - mx;
// distance from brush center
let dist = (dx * dx + dy * dy).sqrt();
if dist >= brush_radius {
// out of the brush
continue;
}
let alpha = if dist < falloff_dist {
1.0
} else {
1.0 - (dist - falloff_dist) * brush_coef
};
let current_value = mask[x + yoff];
mask[x + yoff] = current_value + coef * alpha * (target_value - current_value);
}
}
}
}
}
fn in_canvas(canvas_pos: Pos2) -> bool {
canvas_pos.x >= 0.0 && canvas_pos.x <= 1.0 && canvas_pos.y >= 0.0 && canvas_pos.y <= 1.0
}
fn with_three_d_context<R>(
gl: &std::sync::Arc<egui_glow::glow::Context>,
f: impl FnOnce(&three_d::Context, &mut Renderer) -> R,
) -> R {
use std::cell::RefCell;
thread_local! {
pub static THREE_D: RefCell<Option<(three_d::Context,Renderer)>> = RefCell::new(None);
}
#[allow(unsafe_code)]
unsafe {
use egui_glow::glow::HasContext as _;
gl.disable(egui_glow::glow::DEPTH_TEST);
gl.enable(egui_glow::glow::BLEND);
if !cfg!(target_arch = "wasm32") {
gl.disable(egui_glow::glow::FRAMEBUFFER_SRGB);
}
}
THREE_D.with(|context| {
let mut context = context.borrow_mut();
let (three_d, renderer) = context.get_or_insert_with(|| {
let three_d = three_d::Context::from_gl_context(gl.clone()).unwrap();
let renderer = Renderer::new(&three_d);
(three_d, renderer)
});
f(three_d, renderer)
})
}
pub struct Renderer {
mask_model: Gm<Mesh, ColorMaterial>,
brush_mesh: CpuMesh,
brush_model: Gm<Mesh, ColorMaterial>,
heightmap_model: Gm<Mesh, ColorMaterial>,
mask_mesh: CpuMesh,
material: ColorMaterial,
}
impl Renderer {
pub fn new(three_d: &three_d::Context) -> Self {
let mut material = ColorMaterial::new(
three_d,
&CpuMaterial {
roughness: 1.0,
metallic: 0.0,
albedo: Srgba::WHITE,
..Default::default()
},
);
material.render_states.cull = Cull::None;
material.render_states.depth_test = DepthTest::Always;
material.render_states.blend = Blend::TRANSPARENCY;
let mask_mesh = build_mask();
let mask_model = Gm::new(Mesh::new(three_d, &mask_mesh), material.clone());
let brush_mesh = build_brush(0.5);
let brush_model = Gm::new(Mesh::new(three_d, &brush_mesh), material.clone());
let heightmap_model = Gm::new(Mesh::new(three_d, &CpuMesh::square()), material.clone());
Self {
mask_model,
brush_mesh,
brush_model,
mask_mesh,
heightmap_model,
material,
}
}
pub fn update_brush(&mut self, three_d: &three_d::Context, brush_conf: BrushConfig) {
if let Positions::F32(ref mut vertices) = self.brush_mesh.positions {
let inv_fall = 1.0 - brush_conf.falloff;
// update position of inner opaque ring
for i in 0..32 {
let angle = std::f32::consts::PI * 2.0 * (i as f32) / 32.0;
vertices[i + 1] = vec3(angle.cos() * inv_fall, angle.sin() * inv_fall, 0.0);
}
}
self.brush_model = Gm::new(Mesh::new(three_d, &self.brush_mesh), self.material.clone());
}
pub fn update_model(&mut self, three_d: &three_d::Context, mask: &Option<Vec<f32>>) {
if let Some(mask) = mask {
if let Some(ref mut colors) = self.mask_mesh.colors {
let mut idx = 0;
for y in 0..MASK_SIZE {
let yoff = (MASK_SIZE - 1 - y) * MASK_SIZE;
for x in 0..MASK_SIZE {
let rgb_val = (mask[yoff + x] * 255.0).clamp(0.0, 255.0) as u8;
colors[idx].r = rgb_val;
colors[idx].g = rgb_val;
colors[idx].b = rgb_val;
idx += 1;
}
}
}
self.mask_model = Gm::new(Mesh::new(three_d, &self.mask_mesh), self.material.clone());
}
}
pub fn render(
&mut self,
_three_d: &three_d::Context,
info: &egui::PaintCallbackInfo,
mouse_pos: Option<Pos2>,
brush_conf: BrushConfig,
hmap_transp: f32,
) {
// Set where to paint
let viewport = info.viewport_in_pixels();
let viewport = Viewport {
x: viewport.left_px,
y: viewport.from_bottom_px,
width: viewport.width_px as _,
height: viewport.height_px as _,
};
let target = vec3(0.0, 0.0, 0.0);
let campos = vec3(0.0, 0.0, 1.0);
let camera = Camera::new_orthographic(
viewport,
campos,
target,
vec3(0.0, 1.0, 0.0),
10.0,
0.0,
1000.0,
);
self.mask_model.render(&camera, &[]);
if let Some(mouse_pos) = mouse_pos {
let transfo = Mat4::from_translation(vec3(
mouse_pos.x * 10.0 - 5.0,
5.0 - mouse_pos.y * 10.0,
0.1,
));
let scale = Mat4::from_scale(brush_conf.size * 10.0 * MAX_BRUSH_SIZE);
self.brush_model.set_transformation(transfo * scale);
self.brush_model.render(&camera, &[]);
}
let transfo = Mat4::from_scale(5.0);
self.heightmap_model.set_transformation(transfo);
self.heightmap_model.material.color.a = (hmap_transp * 255.0) as u8;
self.heightmap_model.render(&camera, &[]);
}
fn set_heightmap(
&mut self,
three_d: &three_d::Context,
heightmap_img: &ColorImage,
image_size: u32,
) {
self.heightmap_model = build_heightmap(three_d, heightmap_img, image_size);
}
}
/// build a circular mesh with a double ring : one opaque 32 vertices inner ring and one transparent 64 vertices outer ring
fn build_brush(falloff: f32) -> CpuMesh {
const VERTICES_COUNT: usize = 1 + 32 + 64;
let mut colors = Vec::with_capacity(VERTICES_COUNT);
let mut vertices = Vec::with_capacity(VERTICES_COUNT);
let mut indices = Vec::with_capacity(3 * 32 + 9 * 32);
vertices.push(vec3(0.0, 0.0, 0.0));
let inv_fall = 1.0 - falloff;
// inner opaque ring
for i in 0..32 {
let angle = std::f32::consts::PI * 2.0 * (i as f32) / 32.0;
vertices.push(vec3(angle.cos() * inv_fall, angle.sin() * inv_fall, 0.0));
}
// outer transparent ring
for i in 0..64 {
let angle = std::f32::consts::PI * 2.0 * (i as f32) / 64.0;
vertices.push(vec3(angle.cos(), angle.sin(), 0.0));
}
for _ in 0..33 {
colors.push(Srgba::RED);
}
for _ in 0..64 {
colors.push(Srgba::new(255, 0, 0, 0));
}
// inner ring
for i in 0..32 {
indices.push(0);
indices.push(1 + i);
indices.push(1 + (1 + i) % 32);
}
// outer ring, 32 vertices inside, 64 vertices outside
for i in 0..32 {
indices.push(1 + i);
indices.push(33 + 2 * i);
indices.push(33 + (2 * i + 1) % 64);
indices.push(1 + i);
indices.push(1 + (i + 1) % 32);
indices.push(33 + (2 * i + 1) % 64);
indices.push(1 + (i + 1) % 32);
indices.push(33 + (2 * i + 1) % 64);
indices.push(33 + (2 * i + 2) % 64);
}
CpuMesh {
// name: "brush".to_string(),
positions: Positions::F32(vertices),
indices: Indices::U16(indices),
colors: Some(colors),
..Default::default()
}
}
fn build_mask() -> CpuMesh {
let mut vertices = Vec::with_capacity(MASK_SIZE * MASK_SIZE);
let mut indices = Vec::with_capacity(6 * (MASK_SIZE - 1) * (MASK_SIZE - 1));
let mut colors = Vec::with_capacity(MASK_SIZE * MASK_SIZE);
for y in 0..MASK_SIZE {
let vy = y as f32 / (MASK_SIZE - 1) as f32 * 10.0 - 5.0;
for x in 0..MASK_SIZE {
let vx = x as f32 / (MASK_SIZE - 1) as f32 * 10.0 - 5.0;
vertices.push(three_d::vec3(vx, vy, 0.0));
colors.push(Srgba::WHITE);
}
}
for y in 0..MASK_SIZE - 1 {
let y_offset = y * MASK_SIZE;
for x in 0..MASK_SIZE - 1 {
let off = x + y_offset;
indices.push((off) as u32);
indices.push((off + MASK_SIZE) as u32);
indices.push((off + 1) as u32);
indices.push((off + MASK_SIZE) as u32);
indices.push((off + MASK_SIZE + 1) as u32);
indices.push((off + 1) as u32);
}
}
CpuMesh {
positions: Positions::F32(vertices),
indices: Indices::U32(indices),
colors: Some(colors),
..Default::default()
}
}
/// build a simple textured square to display the heightmap
fn build_heightmap(
three_d: &three_d::Context,
heightmap_img: &ColorImage,
image_size: u32,
) -> Gm<Mesh, ColorMaterial> {
let mesh = CpuMesh::square();
let mut material = ColorMaterial::new(
three_d,
&CpuMaterial {
roughness: 1.0,
metallic: 0.0,
albedo: Srgba::new(255, 255, 255, 128),
albedo_texture: Some(CpuTexture {
width: image_size,
height: image_size,
data: TextureData::RgbaU8(
heightmap_img.pixels.iter().map(Color32::to_array).collect(),
),
..Default::default()
}),
..Default::default()
},
);
material.render_states.cull = Cull::None;
material.render_states.depth_test = DepthTest::Always;
material.render_states.blend = Blend::TRANSPARENCY;
Gm::new(Mesh::new(three_d, &mesh), material)
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/fps.rs | src/fps.rs | use std::time::{Duration, Instant};
pub struct FpsCounter {
last: Instant,
fps_counter: usize,
fps: usize,
}
impl Default for FpsCounter {
fn default() -> Self {
Self {
last: Instant::now(),
fps_counter: 0,
fps: 0,
}
}
}
impl FpsCounter {
pub fn new_frame(&mut self) {
self.fps_counter += 1;
if self.last.elapsed() >= Duration::from_secs(1) {
self.fps = self.fps_counter;
self.fps_counter = 0;
self.last = Instant::now();
}
}
pub fn fps(&self) -> usize {
self.fps
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_3dview.rs | src/panel_3dview.rs | use eframe::egui::{self, PointerButton};
use image::EncodableLayout;
use three_d::{
core::prelude::Srgba, degrees, radians, vec2, vec3, AmbientLight, Camera, ClearState,
CpuMaterial, CpuMesh, CpuTexture, Cull, DirectionalLight, Gm, Indices, InnerSpace, Mat3, Mat4,
Mesh, PhysicalMaterial, Positions, TextureData, Vec3,
};
use crate::worldgen::ExportMap;
const ZSCALE: f32 = 200.0;
const XY_SCALE: f32 = 500.0;
const PANEL3D_SIZE: f32 = 256.0;
const WATER_LEVEL_DELTA: f32 = 3.0;
#[derive(Default, Clone)]
pub struct MeshData {
size: (usize, usize),
vertices: Vec<three_d::Vec3>,
indices: Vec<u32>,
normals: Vec<three_d::Vec3>,
uv: Vec<three_d::Vec2>,
}
#[derive(Clone, Copy)]
pub struct Panel3dViewConf {
/// camera x and y orbit angles
pub orbit: three_d::Vec2,
/// camera x and y pan distances
pub pan: three_d::Vec2,
/// camera zoom in degrees (y field of view is 90 - zoom)
pub zoom: f32,
/// vertical scale to apply to the heightmap
pub hscale: f32,
/// water plane z position
pub water_level: f32,
/// do we display the water plane ?
pub show_water: bool,
/// do we display the skybox ?
pub show_skybox: bool,
}
pub struct Panel3dView {
size: f32,
conf: Panel3dViewConf,
mesh_data: MeshData,
mesh_updated: bool,
}
impl Default for Panel3dView {
fn default() -> Self {
Self {
size: PANEL3D_SIZE,
conf: Panel3dViewConf {
pan: three_d::Vec2::new(0.0, 0.0),
orbit: three_d::Vec2::new(std::f32::consts::FRAC_PI_2, std::f32::consts::FRAC_PI_4),
zoom: 60.0,
hscale: 100.0,
water_level: 40.0,
show_water: true,
show_skybox: true,
},
mesh_data: Default::default(),
mesh_updated: false,
}
}
}
impl Panel3dView {
pub fn new(size: f32) -> Self {
Self {
size,
..Default::default()
}
}
pub fn render(&mut self, ui: &mut egui::Ui) {
ui.vertical(|ui| {
egui::Frame::dark_canvas(ui.style()).show(ui, |ui| {
self.render_3dview(ui);
});
ui.horizontal(|ui| {
ui.label("Height scale %");
ui.add(
egui::DragValue::new(&mut self.conf.hscale)
.speed(1.0)
.range(std::ops::RangeInclusive::new(10.0, 200.0)),
);
});
ui.horizontal(|ui| {
ui.label("Show water plane");
let old_show_water = self.conf.show_water;
ui.checkbox(&mut self.conf.show_water, "");
if old_show_water != self.conf.show_water {
self.update_water_level(self.conf.show_water, self.conf.water_level);
}
ui.label("Water height");
let old_water_level = self.conf.water_level;
ui.add_enabled(
self.conf.show_water,
egui::DragValue::new(&mut self.conf.water_level)
.speed(0.1)
.range(std::ops::RangeInclusive::new(0.0, 100.0)),
);
if old_water_level != self.conf.water_level {
self.update_water_level(false, old_water_level);
self.update_water_level(true, self.conf.water_level);
}
ui.label("Show skybox");
ui.checkbox(&mut self.conf.show_skybox, "");
});
});
}
pub fn update_water_level(&mut self, show: bool, level: f32) {
let sign = if show { 1.0 } else { -1.0 };
for v in self.mesh_data.vertices.iter_mut() {
let delta = v.z - level;
if delta > 0.0 {
v.z += sign * WATER_LEVEL_DELTA;
} else {
v.z -= sign * WATER_LEVEL_DELTA;
}
}
self.mesh_updated = true;
}
pub fn update_mesh(&mut self, hmap: &ExportMap) {
let size = hmap.get_size();
self.mesh_data.size = size;
self.mesh_data.vertices = Vec::with_capacity(size.0 * size.1);
let grid_size = (XY_SCALE / size.0 as f32, XY_SCALE / size.1 as f32);
let off_x = -0.5 * grid_size.0 * size.0 as f32;
let off_y = -0.5 * grid_size.1 * size.1 as f32;
let (min, max) = hmap.get_min_max();
let coef = ZSCALE
* if max - min > std::f32::EPSILON {
1.0 / (max - min)
} else {
1.0
};
let ucoef = 1.0 / size.0 as f32;
let vcoef = 1.0 / size.1 as f32;
for y in 0..size.1 {
let vy = y as f32 * grid_size.1 + off_y;
for x in 0..size.0 {
let vx = x as f32 * grid_size.0 + off_x;
let mut vz = hmap.height(x, y);
vz = (vz - min) * coef;
self.mesh_data.vertices.push(three_d::vec3(vx, -vy, vz));
self.mesh_data
.uv
.push(three_d::vec2(x as f32 * ucoef, y as f32 * vcoef));
}
}
if self.conf.show_water {
self.update_water_level(true, self.conf.water_level);
}
self.mesh_data.indices = Vec::with_capacity(6 * (size.1 - 1) * (size.0 - 1));
for y in 0..size.1 - 1 {
let y_offset = y * size.0;
for x in 0..size.0 - 1 {
let off = x + y_offset;
self.mesh_data.indices.push((off) as u32);
self.mesh_data.indices.push((off + size.0) as u32);
self.mesh_data.indices.push((off + 1) as u32);
self.mesh_data.indices.push((off + size.0) as u32);
self.mesh_data.indices.push((off + size.0 + 1) as u32);
self.mesh_data.indices.push((off + 1) as u32);
}
}
let mut cpu_mesh = three_d::CpuMesh {
positions: three_d::Positions::F32(self.mesh_data.vertices.clone()),
indices: three_d::Indices::U32(self.mesh_data.indices.clone()),
..Default::default()
};
cpu_mesh.compute_normals();
self.mesh_data.normals = cpu_mesh.normals.take().unwrap();
self.mesh_updated = true;
}
fn render_3dview(&mut self, ui: &mut egui::Ui) {
let (rect, response) =
ui.allocate_exact_size(egui::Vec2::splat(self.size), egui::Sense::drag());
let lbutton = ui.input(|i| i.pointer.button_down(PointerButton::Primary));
let rbutton = ui.input(|i| i.pointer.button_down(PointerButton::Secondary));
let mbutton = ui.input(|i| i.pointer.button_down(PointerButton::Middle));
if lbutton {
self.conf.orbit[0] += response.drag_delta().x * 0.01;
self.conf.orbit[1] += response.drag_delta().y * 0.01;
self.conf.orbit[1] = self.conf.orbit[1].clamp(0.15, std::f32::consts::FRAC_PI_2 - 0.05);
} else if rbutton {
self.conf.pan[0] += response.drag_delta().x * 0.5;
self.conf.pan[1] += response.drag_delta().y * 0.5;
self.conf.pan[1] = self.conf.pan[1].clamp(0.0, 140.0);
} else if mbutton {
self.conf.zoom += response.drag_delta().y * 0.15;
}
// Clone locals so we can move them into the paint callback:
let conf = self.conf;
let mesh_updated = self.mesh_updated;
let mesh_data: Option<MeshData> = if mesh_updated {
Some(self.mesh_data.clone())
} else {
None
};
let callback = egui::PaintCallback {
rect,
callback: std::sync::Arc::new(egui_glow::CallbackFn::new(move |info, painter| {
with_three_d_context(painter.gl(), |three_d, renderer| {
if mesh_updated {
renderer.update_model(three_d, &mesh_data);
}
renderer.render(
three_d,
&info,
conf,
FrameInput::new(&three_d, &info, painter),
);
});
})),
};
ui.painter().add(callback);
self.mesh_updated = false;
}
}
///
/// Translates from egui input to three-d input
///
pub struct FrameInput<'a> {
screen: three_d::RenderTarget<'a>,
viewport: three_d::Viewport,
scissor_box: three_d::ScissorBox,
}
impl FrameInput<'_> {
pub fn new(
context: &three_d::Context,
info: &egui::PaintCallbackInfo,
painter: &egui_glow::Painter,
) -> Self {
use three_d::*;
// Disable sRGB textures for three-d
#[cfg(not(target_arch = "wasm32"))]
#[allow(unsafe_code)]
unsafe {
use egui_glow::glow::HasContext as _;
context.disable(egui_glow::glow::FRAMEBUFFER_SRGB);
}
// Constructs a screen render target to render the final image to
let screen = painter.intermediate_fbo().map_or_else(
|| {
RenderTarget::screen(
context,
info.viewport.width() as u32,
info.viewport.height() as u32,
)
},
|fbo| {
RenderTarget::from_framebuffer(
context,
info.viewport.width() as u32,
info.viewport.height() as u32,
fbo,
)
},
);
// Set where to paint
let viewport = info.viewport_in_pixels();
let viewport = Viewport {
x: viewport.left_px,
y: viewport.from_bottom_px,
width: viewport.width_px as u32,
height: viewport.height_px as u32,
};
// Respect the egui clip region (e.g. if we are inside an `egui::ScrollArea`).
let clip_rect = info.clip_rect_in_pixels();
let scissor_box = ScissorBox {
x: clip_rect.left_px,
y: clip_rect.from_bottom_px,
width: clip_rect.width_px as u32,
height: clip_rect.height_px as u32,
};
Self {
screen,
scissor_box,
viewport,
}
}
}
fn with_three_d_context<R>(
gl: &std::sync::Arc<egui_glow::glow::Context>,
f: impl FnOnce(&three_d::Context, &mut Renderer) -> R,
) -> R {
use std::cell::RefCell;
thread_local! {
pub static THREE_D: RefCell<Option<(three_d::Context,Renderer)>> = RefCell::new(None);
}
#[allow(unsafe_code)]
unsafe {
use egui_glow::glow::HasContext as _;
gl.enable(egui_glow::glow::DEPTH_TEST);
if !cfg!(target_arch = "wasm32") {
gl.disable(egui_glow::glow::FRAMEBUFFER_SRGB);
}
gl.clear(egui_glow::glow::DEPTH_BUFFER_BIT);
gl.clear_depth_f32(1.0);
gl.depth_func(egui_glow::glow::LESS);
}
THREE_D.with(|context| {
let mut context = context.borrow_mut();
let (three_d, renderer) = context.get_or_insert_with(|| {
let three_d = three_d::Context::from_gl_context(gl.clone()).unwrap();
let renderer = Renderer::new(&three_d);
(three_d, renderer)
});
f(three_d, renderer)
})
}
pub struct Renderer {
terrain_mesh: CpuMesh,
terrain_model: Gm<Mesh, PhysicalMaterial>,
terrain_material: PhysicalMaterial,
water_model: Gm<Mesh, PhysicalMaterial>,
directional: DirectionalLight,
ambient: AmbientLight,
sky: Gm<Mesh, PhysicalMaterial>,
}
impl Renderer {
pub fn new(three_d: &three_d::Context) -> Self {
let terrain_mesh = CpuMesh::square();
let mut terrain_material = PhysicalMaterial::new_opaque(
three_d,
&CpuMaterial {
roughness: 1.0,
metallic: 0.0,
albedo: Srgba::new_opaque(45, 30, 25),
..Default::default()
},
);
terrain_material.render_states.cull = Cull::Back;
let terrain_model = Gm::new(Mesh::new(three_d, &terrain_mesh), terrain_material.clone());
let water_model = build_water_plane(three_d);
Self {
terrain_mesh,
terrain_model,
terrain_material,
water_model,
sky: build_sky(three_d),
directional: DirectionalLight::new(
three_d,
1.5,
Srgba::new_opaque(255, 222, 180),
vec3(-0.5, 0.5, -0.5).normalize(),
),
ambient: AmbientLight::new(&three_d, 0.5, Srgba::WHITE),
}
}
pub fn update_model(&mut self, three_d: &three_d::Context, mesh_data: &Option<MeshData>) {
if let Some(mesh_data) = mesh_data {
let mut rebuild = false;
if let Positions::F32(ref mut vertices) = self.terrain_mesh.positions {
rebuild = vertices.len() != mesh_data.vertices.len();
*vertices = mesh_data.vertices.clone();
}
if rebuild {
self.terrain_mesh.indices = Indices::U32(mesh_data.indices.clone());
self.terrain_mesh.normals = Some(mesh_data.normals.clone());
self.terrain_mesh.uvs = Some(mesh_data.uv.clone());
self.terrain_mesh.tangents = None;
}
self.terrain_model = Gm::new(
Mesh::new(three_d, &self.terrain_mesh),
self.terrain_material.clone(),
);
}
}
pub fn render(
&mut self,
_three_d: &three_d::Context,
_info: &egui::PaintCallbackInfo,
conf: Panel3dViewConf,
frame_input: FrameInput<'_>,
) {
// Set where to paint
let viewport = frame_input.viewport;
let target = vec3(0.0, 0.0, 0.0);
let campos = vec3(XY_SCALE * 2.0, 0.0, 0.0);
let mut camera = Camera::new_perspective(
viewport,
campos,
target,
vec3(0.0, 0.0, 1.0),
degrees((90.0 - conf.zoom * 0.8).clamp(1.0, 90.0)),
0.1,
XY_SCALE * 10.0,
);
camera.rotate_around_with_fixed_up(target, 0.0, conf.orbit[1]);
let up = camera.up();
let right_direction = camera.right_direction();
camera.translate(conf.pan[1] * up - conf.pan[0] * right_direction);
let camz = camera.position().z;
if camz < conf.water_level + 10.0 {
camera.translate(vec3(0.0, 0.0, conf.water_level + 10.0 - camz));
}
let mut transfo = Mat4::from_angle_z(radians(conf.orbit[0] * 2.0));
transfo.z[2] = conf.hscale / 100.0;
self.terrain_model.set_transformation(transfo);
let light_transfo = Mat3::from_angle_z(radians(conf.orbit[0] * 2.0));
self.directional.direction = light_transfo * vec3(-0.5, 0.5, -0.5);
self.directional
.generate_shadow_map(1024, &[&self.terrain_model]);
// Get the screen render target to be able to render something on the screen
frame_input
.screen
// Clear the color and depth of the screen render target
.clear_partially(frame_input.scissor_box, ClearState::depth(1.0));
frame_input.screen.render_partially(
frame_input.scissor_box,
&camera,
&[&self.terrain_model],
&[&self.ambient, &self.directional],
);
if conf.show_water {
let mut water_transfo =
Mat4::from_translation(Vec3::new(0.0, 0.0, conf.water_level * conf.hscale * 0.01));
water_transfo.x[0] = XY_SCALE * 10.0;
water_transfo.y[1] = XY_SCALE * 10.0;
self.water_model.set_transformation(water_transfo);
frame_input.screen.render_partially(
frame_input.scissor_box,
&camera,
&[&self.water_model],
&[&self.ambient, &self.directional],
);
}
if conf.show_skybox {
let transfo = Mat4::from_angle_z(radians(conf.orbit[0] * 2.0));
self.sky.set_transformation(transfo);
frame_input.screen.render_partially(
frame_input.scissor_box,
&camera,
&[&self.sky],
&[],
);
}
frame_input.screen.into_framebuffer(); // Take back the screen fbo, we will continue to use it.
}
}
const SKY_BYTES: &[u8] = include_bytes!("../sky.jpg");
fn build_sky(three_d: &three_d::Context) -> Gm<Mesh, PhysicalMaterial> {
let img = image::load_from_memory(SKY_BYTES).unwrap();
let buffer = img.as_rgb8().unwrap().as_bytes();
let mut data = Vec::new();
let mut i = 0;
while i < (img.height() * img.width() * 3) as usize {
let r = buffer[i];
let g = buffer[i + 1];
let b = buffer[i + 2];
i += 3;
data.push([r, g, b]);
}
const SUBDIV: u32 = 32;
let mut sky2 = uv_wrapping_cylinder(SUBDIV);
sky2.transform(Mat4::from_nonuniform_scale(
ZSCALE * 5.0,
XY_SCALE * 2.0,
XY_SCALE * 2.0,
))
.unwrap();
sky2.transform(Mat4::from_angle_y(degrees(-90.0))).unwrap();
sky2.transform(Mat4::from_angle_z(degrees(90.0))).unwrap();
let mut sky_material = PhysicalMaterial::new_opaque(
three_d,
&CpuMaterial {
roughness: 1.0,
metallic: 0.0,
emissive: Srgba::WHITE,
emissive_texture: Some(CpuTexture {
width: img.width(),
height: img.height(),
data: TextureData::RgbU8(data),
..Default::default()
}),
..Default::default()
},
);
// water_material.render_states.depth_test = DepthTest::Greater;
sky_material.render_states.cull = Cull::Front;
Gm::new(Mesh::new(three_d, &sky2), sky_material)
}
fn build_water_plane(three_d: &three_d::Context) -> Gm<Mesh, PhysicalMaterial> {
let water_mesh = CpuMesh::square();
let mut water_material = PhysicalMaterial::new_opaque(
three_d,
&CpuMaterial {
roughness: 0.1,
metallic: 0.2,
albedo: Srgba::new_opaque(50, 60, 150),
..Default::default()
},
);
// water_material.render_states.depth_test = DepthTest::Greater;
water_material.render_states.cull = Cull::Back;
Gm::new(Mesh::new(three_d, &water_mesh), water_material)
}
fn uv_wrapping_cylinder(angle_subdivisions: u32) -> CpuMesh {
let length_subdivisions = 1;
let mut positions = Vec::new();
let mut indices = Vec::new();
for i in 0..length_subdivisions + 1 {
let x = i as f32 / length_subdivisions as f32;
for j in 0..angle_subdivisions + 1 {
let angle = 2.0 * std::f32::consts::PI * j as f32 / angle_subdivisions as f32;
positions.push(vec3(x, angle.cos(), angle.sin()));
}
}
for i in 0..length_subdivisions {
for j in 0..angle_subdivisions {
indices.push((i * (angle_subdivisions + 1) + j) as u16);
indices.push((i * (angle_subdivisions + 1) + (j + 1)) as u16);
indices.push(((i + 1) * (angle_subdivisions + 1) + (j + 1)) as u16);
indices.push((i * (angle_subdivisions + 1) + j) as u16);
indices.push(((i + 1) * (angle_subdivisions + 1) + (j + 1)) as u16);
indices.push(((i + 1) * (angle_subdivisions + 1) + j) as u16);
}
}
let mut uvs = Vec::new();
for i in 0..angle_subdivisions + 1 {
let u = i as f32 / angle_subdivisions as f32;
uvs.push(vec2(u, 1.0));
}
for i in 0..angle_subdivisions + 1 {
let u = i as f32 / angle_subdivisions as f32;
uvs.push(vec2(u, 0.0));
}
let mut mesh = CpuMesh {
// name: "cylinder".to_string(),
positions: Positions::F32(positions),
indices: Indices::U16(indices),
uvs: Some(uvs),
..Default::default()
};
mesh.compute_normals();
mesh
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_2dview.rs | src/panel_2dview.rs | use eframe::egui;
use egui_extras::RetainedImage;
use epaint::{Color32, ColorImage};
use crate::{fps::FpsCounter, panel_maskedit::PanelMaskEdit, worldgen::ExportMap};
pub enum Panel2dAction {
/// inform the main program that the preview size has changed. terrain/3d view must be recomputed
ResizePreview(usize),
/// inform the main program that mask must be copied to the generator panel
MaskUpdated,
/// inform the main program that mask must be deleted in the generator panel
MaskDelete,
}
pub struct Panel2dView {
/// preview image of the heightmap
img: ColorImage,
/// minimum value in the heightmap
min: f32,
/// maximum value in the heightmap
max: f32,
/// are we displaying the mask editor ?
mask_mode: bool,
/// size of the preview canvas in pixels
image_size: usize,
/// size of the heightmap
preview_size: usize,
/// should we update the preview every time a step is computed ?
pub live_preview: bool,
/// utility to display FPS
fps_counter: FpsCounter,
/// egui renderable image
ui_img: Option<RetainedImage>,
/// mask editor subpanel
mask_editor: PanelMaskEdit,
}
impl Panel2dView {
pub fn new(image_size: usize, preview_size: u32, hmap: &ExportMap) -> Self {
let mut panel = Panel2dView {
img: ColorImage::new([image_size, image_size], Color32::BLACK),
min: 0.0,
max: 0.0,
image_size,
mask_mode: false,
live_preview: true,
preview_size: preview_size as usize,
fps_counter: FpsCounter::default(),
ui_img: None,
mask_editor: PanelMaskEdit::new(image_size),
};
panel.refresh(image_size, preview_size, Some(hmap));
panel
}
pub fn get_current_mask(&self) -> Option<Vec<f32>> {
self.mask_editor.get_mask()
}
pub fn display_mask(&mut self, image_size: usize, preview_size: u32, mask: Option<Vec<f32>>) {
self.image_size = image_size;
self.preview_size = preview_size as usize;
self.mask_editor.display_mask(image_size, mask);
self.mask_mode = true;
}
pub fn refresh(&mut self, image_size: usize, preview_size: u32, hmap: Option<&ExportMap>) {
self.image_size = image_size;
self.mask_mode = false;
self.preview_size = preview_size as usize;
if self.img.width() != image_size {
self.img = ColorImage::new([self.image_size, self.image_size], Color32::BLACK);
}
if let Some(hmap) = hmap {
let (min, max) = hmap.get_min_max();
let coef = if max - min > std::f32::EPSILON {
1.0 / (max - min)
} else {
1.0
};
self.min = min;
self.max = max;
let mut idx = 0;
for y in 0..image_size {
let py = ((y * preview_size as usize) as f32 / image_size as f32) as usize;
for x in 0..image_size {
let px = ((x * preview_size as usize) as f32 / image_size as f32) as usize;
let mut h = hmap.height(px as usize, py as usize);
h = (h - min) * coef;
self.img.pixels[idx] = Color32::from_gray((h * 255.0).clamp(0.0, 255.0) as u8);
idx += 1;
}
}
};
self.ui_img = Some(RetainedImage::from_color_image("hmap", self.img.clone()));
}
pub fn render(&mut self, ui: &mut egui::Ui) -> Option<Panel2dAction> {
let mut action = None;
let old_size = self.preview_size;
self.fps_counter.new_frame();
if self.mask_mode {
action = self.mask_editor.render(ui, &self.img);
} else {
ui.vertical(|ui| {
if let Some(ref img) = self.ui_img {
img.show(ui);
}
ui.horizontal(|ui| {
ui.label(format!("Height range : {} - {}", self.min, self.max));
});
});
}
ui.label(format!("FPS : {}", self.fps_counter.fps()));
ui.horizontal(|ui| {
ui.label("Preview size");
egui::ComboBox::from_label("")
.selected_text(format!("{}x{}", self.preview_size, self.preview_size))
.show_ui(ui, |ui| {
ui.selectable_value(&mut self.preview_size, 64, "64x64");
ui.selectable_value(&mut self.preview_size, 128, "128x128");
ui.selectable_value(&mut self.preview_size, 256, "256x256");
ui.selectable_value(&mut self.preview_size, 512, "512x512");
});
ui.label("Live preview");
ui.checkbox(&mut self.live_preview, "");
});
if self.preview_size != old_size {
action = Some(Panel2dAction::ResizePreview(self.preview_size));
}
action
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/main.rs | src/main.rs | extern crate exr;
extern crate image;
extern crate noise;
extern crate rand;
mod exporter;
mod fps;
mod generators;
mod panel_2dview;
mod panel_3dview;
mod panel_export;
mod panel_generator;
mod panel_maskedit;
mod panel_save;
mod worldgen;
use eframe::egui::{self, Visuals};
use epaint::emath;
use exporter::export_heightmap;
use std::sync::mpsc::{self, Receiver, Sender};
use std::thread;
use std::time::Instant;
use panel_2dview::{Panel2dAction, Panel2dView};
use panel_3dview::Panel3dView;
use panel_export::PanelExport;
use panel_generator::{GeneratorAction, PanelGenerator};
use panel_save::{PanelSaveLoad, SaveLoadAction};
use worldgen::{generator_thread, ExportMap, WorldGenCommand, WorldGenerator};
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub const MASK_SIZE: usize = 64;
/// messages sent to the main thread by either world generator or exporter threads
pub enum ThreadMessage {
/// from world generator : all steps have been computed => update 2D/3D previews
GeneratorDone(ExportMap),
/// from world generator : update progress bar
GeneratorStepProgress(f32),
/// from world generator : one step has been computed => update 2D preview if live preview enabled
GeneratorStepDone(usize, Option<ExportMap>),
/// from world generator : return the heightmap for a specific step
GeneratorStepMap(usize, ExportMap),
/// from exporter : one step has been computed
ExporterStepDone(usize),
/// from exporter : export is finished
ExporterDone(Result<(), String>),
/// from exporter : update progress bar
ExporterStepProgress(f32),
}
fn main() {
let options = eframe::NativeOptions {
multisampling: 8,
depth_buffer: 24,
renderer: eframe::Renderer::Glow,
vsync: true,
viewport: egui::ViewportBuilder::default().with_maximized(true),
..Default::default()
};
println!(
"wgen v{} - {} cpus {} cores",
VERSION,
num_cpus::get(),
num_cpus::get_physical()
);
eframe::run_native(
"wgen",
options,
Box::new(|_cc| Ok(Box::new(MyApp::default()))),
)
.or_else(|e| {
eprintln!("Error: {}", e);
Ok::<(), ()>(())
})
.ok();
}
struct MyApp {
/// size in pixels of the 2D preview canvas
image_size: usize,
/// size of the preview heightmap (from 64x64 to 512x512)
preview_size: usize,
/// current world generator progress
progress: f32,
/// exporter progress
exporter_progress: f32,
/// exporter progress bar text
exporter_text: String,
/// exporter current step
exporter_cur_step: usize,
/// random number generator's seed
seed: u64,
// ui widgets
gen_panel: PanelGenerator,
export_panel: PanelExport,
panel_3d: Panel3dView,
panel_2d: Panel2dView,
load_save_panel: PanelSaveLoad,
// thread communication
/// channel to receive messages from either world generator or exporter
thread2main_rx: Receiver<ThreadMessage>,
/// channel to send messages to the world generator thread
main2wgen_tx: Sender<WorldGenCommand>,
/// channel to send messages to the main thread from the exporter thread
exp2main_tx: Sender<ThreadMessage>,
/// an error to display in a popup
err_msg: Option<String>,
/// are we editing a mask ?
mask_step: Option<usize>,
/// last time the mask was updated
last_mask_updated: f64,
}
impl Default for MyApp {
fn default() -> Self {
let preview_size = 128;
let image_size = 790; //368;
let seed = 0xdeadbeef;
let wgen = WorldGenerator::new(seed, (preview_size, preview_size));
let panel_2d = Panel2dView::new(image_size, preview_size as u32, &wgen.get_export_map());
// generator -> main channel
let (exp2main_tx, thread2main_rx) = mpsc::channel();
// main -> generator channel
let (main2gen_tx, gen_rx) = mpsc::channel();
let gen_tx = exp2main_tx.clone();
thread::spawn(move || {
generator_thread(seed, preview_size, gen_rx, gen_tx);
});
Self {
image_size,
preview_size,
seed,
panel_2d,
panel_3d: Panel3dView::new(image_size as f32),
progress: 1.0,
exporter_progress: 1.0,
exporter_text: String::new(),
exporter_cur_step: 0,
mask_step: None,
gen_panel: PanelGenerator::default(),
export_panel: PanelExport::default(),
load_save_panel: PanelSaveLoad::default(),
thread2main_rx,
main2wgen_tx: main2gen_tx,
exp2main_tx,
err_msg: None,
last_mask_updated: 0.0,
}
}
}
impl MyApp {
fn export(&mut self) {
let steps = self.gen_panel.steps.clone();
let export_panel = self.export_panel.clone();
let seed = self.seed;
let tx = self.exp2main_tx.clone();
let min_progress_step = 0.01 * self.gen_panel.enabled_steps() as f32;
thread::spawn(move || {
let res = export_heightmap(seed, &steps, &export_panel, tx.clone(), min_progress_step);
tx.send(ThreadMessage::ExporterDone(res)).unwrap();
});
}
fn regen(&mut self, must_delete: bool, from_idx: usize) {
self.progress = from_idx as f32 / self.gen_panel.enabled_steps() as f32;
self.main2wgen_tx
.send(WorldGenCommand::Abort(from_idx))
.unwrap();
let len = self.gen_panel.steps.len();
if must_delete {
self.main2wgen_tx
.send(WorldGenCommand::DeleteStep(from_idx))
.unwrap();
}
if len == 0 {
return;
}
for i in from_idx.min(len - 1)..len {
self.main2wgen_tx
.send(WorldGenCommand::ExecuteStep(
i,
self.gen_panel.steps[i].clone(),
self.panel_2d.live_preview,
0.01 * self.gen_panel.enabled_steps() as f32,
))
.unwrap();
}
self.gen_panel.is_running = true;
}
fn set_seed(&mut self, new_seed: u64) {
self.seed = new_seed;
self.main2wgen_tx
.send(WorldGenCommand::SetSeed(new_seed))
.unwrap();
self.regen(false, 0);
}
fn resize(&mut self, new_size: usize) {
if self.preview_size == new_size {
return;
}
self.preview_size = new_size;
self.main2wgen_tx
.send(WorldGenCommand::SetSize(new_size))
.unwrap();
self.regen(false, 0);
}
fn render_left_panel(&mut self, ctx: &egui::Context) {
egui::SidePanel::left("Generation").show(ctx, |ui| {
ui.label(format!("wgen {}", VERSION));
ui.separator();
if self
.export_panel
.render(ui, self.exporter_progress, &self.exporter_text)
{
self.export_panel.enabled = false;
self.exporter_progress = 0.0;
self.exporter_cur_step = 0;
self.export();
}
ui.separator();
match self.load_save_panel.render(ui) {
Some(SaveLoadAction::Load) => {
if let Err(msg) = self.gen_panel.load(self.load_save_panel.get_file_path()) {
let err_msg = format!(
"Error while reading project {} : {}",
self.load_save_panel.get_file_path(),
msg
);
println!("{}", err_msg);
self.err_msg = Some(err_msg);
} else {
self.main2wgen_tx.send(WorldGenCommand::Clear).unwrap();
self.set_seed(self.gen_panel.seed);
}
}
Some(SaveLoadAction::Save) => {
if let Err(msg) = self.gen_panel.save(self.load_save_panel.get_file_path()) {
let err_msg = format!(
"Error while writing project {} : {}",
self.load_save_panel.get_file_path(),
msg
);
println!("{}", err_msg);
self.err_msg = Some(err_msg);
}
}
None => (),
}
ui.separator();
egui::ScrollArea::vertical().show(ui, |ui| {
match self.gen_panel.render(ui, self.progress) {
Some(GeneratorAction::Clear) => {
self.main2wgen_tx.send(WorldGenCommand::Clear).unwrap();
}
Some(GeneratorAction::SetSeed(new_seed)) => {
self.set_seed(new_seed);
}
Some(GeneratorAction::Regen(must_delete, from_idx)) => {
self.regen(must_delete, from_idx);
}
Some(GeneratorAction::Disable(idx)) => {
self.main2wgen_tx
.send(WorldGenCommand::DisableStep(idx))
.unwrap();
self.regen(false, idx);
}
Some(GeneratorAction::Enable(idx)) => {
self.main2wgen_tx
.send(WorldGenCommand::EnableStep(idx))
.unwrap();
self.regen(false, idx);
}
Some(GeneratorAction::DisplayLayer(step)) => {
self.main2wgen_tx
.send(WorldGenCommand::GetStepMap(step))
.unwrap();
}
Some(GeneratorAction::DisplayMask(step)) => {
self.mask_step = Some(step);
let mask = if let Some(ref mask) = self.gen_panel.steps[step].mask {
Some(mask.clone())
} else {
Some(vec![1.0; MASK_SIZE * MASK_SIZE])
};
self.panel_2d
.display_mask(self.image_size, self.preview_size as u32, mask);
}
None => (),
}
});
});
}
fn render_central_panel(&mut self, ctx: &egui::Context) {
egui::CentralPanel::default().show(ctx, |ui| {
ui.heading("Terrain preview");
ui.horizontal(|ui| {
egui::CollapsingHeader::new("2d preview")
.default_open(true)
.show(ui, |ui| match self.panel_2d.render(ui) {
Some(Panel2dAction::ResizePreview(new_size)) => {
self.resize(new_size);
self.mask_step = None;
self.gen_panel.mask_selected = false;
}
Some(Panel2dAction::MaskUpdated) => {
self.last_mask_updated = ui.input(|r| r.time);
}
Some(Panel2dAction::MaskDelete) => {
if let Some(step) = self.mask_step {
self.gen_panel.steps[step].mask = None;
}
self.last_mask_updated = 0.0;
}
None => (),
});
egui::CollapsingHeader::new("3d preview")
.default_open(true)
.show(ui, |ui| {
self.panel_3d.render(ui);
});
});
});
}
fn handle_threads_messages(&mut self) {
match self.thread2main_rx.try_recv() {
Ok(ThreadMessage::GeneratorStepProgress(progress)) => {
let progstep = 1.0 / self.gen_panel.enabled_steps() as f32;
self.progress = (self.progress / progstep).floor() * progstep;
self.progress += progress * progstep;
}
Ok(ThreadMessage::GeneratorDone(hmap)) => {
log("main<=Done");
self.panel_2d
.refresh(self.image_size, self.preview_size as u32, Some(&hmap));
self.gen_panel.selected_step = self.gen_panel.steps.len() - 1;
self.panel_3d.update_mesh(&hmap);
self.gen_panel.is_running = false;
self.progress = 1.0;
}
Ok(ThreadMessage::GeneratorStepDone(step, hmap)) => {
log(&format!("main<=GeneratorStepDone({})", step));
if let Some(ref hmap) = hmap {
self.panel_2d
.refresh(self.image_size, self.preview_size as u32, Some(hmap));
}
self.gen_panel.selected_step = step;
self.progress = (step + 1) as f32 / self.gen_panel.enabled_steps() as f32
}
Ok(ThreadMessage::GeneratorStepMap(_idx, hmap)) => {
// display heightmap from a specific step in the 2d preview
if let Some(step) = self.mask_step {
// mask was updated, recompute terrain
self.regen(false, step);
self.mask_step = None;
}
self.panel_2d
.refresh(self.image_size, self.preview_size as u32, Some(&hmap));
}
Ok(ThreadMessage::ExporterStepProgress(progress)) => {
let progstep = 1.0 / self.gen_panel.enabled_steps() as f32;
self.exporter_progress = (self.exporter_progress / progstep).floor() * progstep;
self.exporter_progress += progress * progstep;
self.exporter_text = format!(
"{}% {}/{} {}",
(self.exporter_progress * 100.0) as usize,
self.exporter_cur_step + 1,
self.gen_panel.steps.len(),
self.gen_panel.steps[self.exporter_cur_step]
);
}
Ok(ThreadMessage::ExporterStepDone(step)) => {
log(&format!("main<=ExporterStepDone({})", step));
self.exporter_progress = (step + 1) as f32 / self.gen_panel.enabled_steps() as f32;
self.exporter_cur_step = step + 1;
if step + 1 == self.gen_panel.steps.len() {
self.exporter_text =
format!("Saving {}...", self.export_panel.file_type.to_string());
} else {
self.exporter_text = format!(
"{}% {}/{} {}",
(self.exporter_progress * 100.0) as usize,
step + 1,
self.gen_panel.steps.len(),
self.gen_panel.steps[self.exporter_cur_step]
);
}
}
Ok(ThreadMessage::ExporterDone(res)) => {
if let Err(msg) = res {
let err_msg = format!("Error while exporting heightmap : {}", msg);
println!("{}", err_msg);
self.err_msg = Some(err_msg);
}
log("main<=ExporterDone");
self.exporter_progress = 1.0;
self.export_panel.enabled = true;
self.exporter_cur_step = 0;
self.exporter_text = String::new();
}
Err(_) => {}
}
}
}
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
let wsize = ctx.input(|i| {
if let Some(rect) = i.viewport().inner_rect {
rect.size()
} else {
emath::Vec2::new(0.0, 0.0)
}
});
let new_size = ((wsize.x - 340.0) * 0.5) as usize;
if new_size != self.image_size && new_size != 0 {
// handle window resizing
self.image_size = new_size;
self.panel_2d
.refresh(self.image_size, self.preview_size as u32, None);
self.panel_3d = Panel3dView::new(self.image_size as f32);
self.regen(false, 0);
}
ctx.set_visuals(Visuals::dark());
self.handle_threads_messages();
self.render_left_panel(ctx);
self.render_central_panel(ctx);
if self.last_mask_updated > 0.0 && ctx.input(|i| i.time) - self.last_mask_updated >= 0.5 {
if let Some(step) = self.mask_step {
// mask was updated, copy mask to generator step
if let Some(mask) = self.panel_2d.get_current_mask() {
self.gen_panel.steps[step].mask = Some(mask);
}
}
self.last_mask_updated = 0.0;
}
if let Some(ref err_msg) = self.err_msg {
// display error popup
let mut open = true;
egui::Window::new("Error")
.resizable(false)
.collapsible(false)
.open(&mut open)
.show(ctx, |ui| {
ui.scope(|ui| {
ui.visuals_mut().override_text_color = Some(egui::Color32::RED);
ui.label(err_msg);
});
});
if !open {
self.err_msg = None;
}
}
}
}
pub fn log(msg: &str) {
thread_local! {
pub static LOGTIME: Instant = Instant::now();
}
LOGTIME.with(|log_time| {
println!(
"{:03.3} {}",
log_time.elapsed().as_millis() as f32 / 1000.0,
msg
);
});
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_export.rs | src/panel_export.rs | use std::path::PathBuf;
use eframe::egui;
pub const TEXTEDIT_WIDTH: f32 = 240.0;
#[derive(Clone)]
pub enum ExportFileType {
PNG,
EXR,
}
impl std::fmt::Display for ExportFileType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Self::PNG => "png",
Self::EXR => "exr",
}
)
}
}
#[derive(Clone)]
pub struct PanelExport {
/// width of each image in pixels
pub export_width: f32,
/// height of each image in pixels
pub export_height: f32,
/// number of horizontal tiles
pub tiles_h: f32,
/// number of vertical tiles
pub tiles_v: f32,
/// image filename prefix
pub file_path: String,
/// should we repeat the same pixel row on two adjacent tiles ?
/// not needed for unreal engine which handles multi-textures heightmaps
/// might be needed for other engines (for example godot heightmap terrain plugin)
pub seamless: bool,
/// format to export, either png or exr
pub file_type: ExportFileType,
/// to disable the exporter ui during export
pub enabled: bool,
/// program's current directory
cur_dir: PathBuf,
}
impl Default for PanelExport {
fn default() -> Self {
let cur_dir = std::env::current_dir().unwrap();
let file_path = format!("{}/wgen", cur_dir.display());
Self {
export_width: 1024.0,
export_height: 1024.0,
tiles_h: 1.0,
tiles_v: 1.0,
file_path,
seamless: false,
file_type: ExportFileType::PNG,
enabled: true,
cur_dir,
}
}
}
impl PanelExport {
pub fn render(&mut self, ui: &mut egui::Ui, progress: f32, progress_text: &str) -> bool {
let mut export = false;
ui.horizontal(|ui| {
ui.heading("Export heightmaps");
if !self.enabled {
ui.spinner();
}
});
ui.add(egui::ProgressBar::new(progress).text(progress_text));
ui.add_enabled_ui(self.enabled, |ui| {
ui.horizontal(|ui| {
ui.label("Tile size");
ui.add(egui::DragValue::new(&mut self.export_width).speed(1.0));
ui.label(" x ");
ui.add(egui::DragValue::new(&mut self.export_height).speed(1.0));
});
ui.horizontal(|ui| {
ui.label("Tiles");
ui.add(egui::DragValue::new(&mut self.tiles_h).speed(1.0));
ui.label(" x ");
ui.add(egui::DragValue::new(&mut self.tiles_v).speed(1.0));
});
ui.horizontal(|ui| {
ui.label("Export file path");
if ui.button("Pick...").clicked() {
if let Some(path) = rfd::FileDialog::new()
.set_directory(&self.cur_dir)
.pick_file()
{
self.file_path = path.display().to_string();
if self.file_path.ends_with(".png") {
self.file_path =
self.file_path.strip_suffix(".png").unwrap().to_owned();
} else if self.file_path.ends_with(".exr") {
self.file_path =
self.file_path.strip_suffix(".exr").unwrap().to_owned();
}
self.cur_dir = if path.is_file() {
path.parent().unwrap().to_path_buf()
} else {
path
};
}
}
});
ui.horizontal(|ui| {
ui.add(
egui::TextEdit::singleline(&mut self.file_path)
.desired_width(TEXTEDIT_WIDTH - 80.0),
);
ui.label("_x*_y*.");
if ui
.button(&self.file_type.to_string())
.on_hover_text("change the exported file format")
.clicked()
{
match self.file_type {
ExportFileType::PNG => self.file_type = ExportFileType::EXR,
ExportFileType::EXR => self.file_type = ExportFileType::PNG,
}
}
});
ui.horizontal(|ui| {
ui.checkbox(&mut self.seamless, "seamless")
.on_hover_text("whether pixel values are repeated on two adjacent tiles");
export = ui.button("Export!").clicked();
});
});
export
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/worldgen.rs | src/worldgen.rs | use std::sync::mpsc::Sender;
use std::time::Instant;
use std::{fmt::Display, sync::mpsc::Receiver};
use serde::{Deserialize, Serialize};
use crate::generators::{
gen_fbm, gen_hills, gen_island, gen_landmass, gen_mid_point, gen_mudslide, gen_normalize,
gen_water_erosion, get_min_max, FbmConf, HillsConf, IslandConf, LandMassConf, MidPointConf,
MudSlideConf, NormalizeConf, WaterErosionConf,
};
use crate::{log, ThreadMessage, MASK_SIZE};
#[derive(Debug)]
/// commands sent by the main thread to the world generator thread
pub enum WorldGenCommand {
/// recompute a specific step : step index, step conf, live preview, min progress step to report
ExecuteStep(usize, Step, bool, f32),
/// remove a step
DeleteStep(usize),
/// enable a step
EnableStep(usize),
/// disable a step
DisableStep(usize),
/// change the heightmap size
SetSize(usize),
/// return the heightmap for a given step
GetStepMap(usize),
/// change the random number generator seed
SetSeed(u64),
/// remove all steps
Clear,
/// cancel previous undone ExecuteStep commands from a specific step
Abort(usize),
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
/// Each value contains its own configuration
pub enum StepType {
Hills(HillsConf),
Fbm(FbmConf),
Normalize(NormalizeConf),
LandMass(LandMassConf),
MudSlide(MudSlideConf),
WaterErosion(WaterErosionConf),
Island(IslandConf),
MidPoint(MidPointConf),
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct Step {
/// should we skip this step when computing the heightmap ?
pub disabled: bool,
/// this step mask
pub mask: Option<Vec<f32>>,
/// step type with its configuration
pub typ: StepType,
}
impl Default for Step {
fn default() -> Self {
Self {
disabled: false,
mask: None,
typ: StepType::Normalize(NormalizeConf::default()),
}
}
}
impl Display for Step {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let debug_val = format!("{:?}", self.typ);
let val: Vec<&str> = debug_val.split('(').collect();
write!(f, "{}", val[0])
}
}
pub struct ExportMap {
size: (usize, usize),
h: Vec<f32>,
}
impl ExportMap {
pub fn get_min_max(&self) -> (f32, f32) {
get_min_max(&self.h)
}
pub fn get_size(&self) -> (usize, usize) {
self.size
}
pub fn height(&self, x: usize, y: usize) -> f32 {
let off = x + y * self.size.0;
if off < self.size.0 * self.size.1 {
return self.h[off];
}
0.0
}
pub fn borrow(&self) -> &Vec<f32> {
&self.h
}
}
#[derive(Clone)]
struct HMap {
h: Vec<f32>,
disabled: bool,
}
#[derive(Clone)]
pub struct WorldGenerator {
seed: u64,
world_size: (usize, usize),
hmap: Vec<HMap>,
}
struct InnerStep {
index: usize,
step: Step,
live: bool,
min_progress_step: f32,
}
fn do_command(
msg: WorldGenCommand,
wgen: &mut WorldGenerator,
steps: &mut Vec<InnerStep>,
tx: Sender<ThreadMessage>,
) {
log(&format!("wgen<={:?}", msg));
match msg {
WorldGenCommand::Clear => {
wgen.clear();
}
WorldGenCommand::SetSeed(new_seed) => {
wgen.seed = new_seed;
}
WorldGenCommand::ExecuteStep(index, step, live, min_progress_step) => {
steps.push(InnerStep {
index,
step,
live,
min_progress_step,
});
}
WorldGenCommand::DeleteStep(index) => {
wgen.hmap.remove(index);
}
WorldGenCommand::DisableStep(index) => {
wgen.hmap[index].disabled = true;
}
WorldGenCommand::EnableStep(index) => {
wgen.hmap[index].disabled = false;
}
WorldGenCommand::GetStepMap(index) => tx
.send(ThreadMessage::GeneratorStepMap(
index,
wgen.get_step_export_map(index),
))
.unwrap(),
WorldGenCommand::Abort(from_idx) => {
let mut i = 0;
while i < steps.len() {
if steps[i].index >= from_idx {
steps.remove(i);
} else {
i += 1;
}
}
}
WorldGenCommand::SetSize(size) => {
*wgen = WorldGenerator::new(wgen.seed, (size, size));
}
}
}
pub fn generator_thread(
seed: u64,
size: usize,
rx: Receiver<WorldGenCommand>,
tx: Sender<ThreadMessage>,
) {
let mut wgen = WorldGenerator::new(seed, (size, size));
let mut steps = Vec::new();
loop {
if steps.is_empty() {
// blocking wait
if let Ok(msg) = rx.recv() {
let tx = tx.clone();
do_command(msg, &mut wgen, &mut steps, tx);
}
}
while let Ok(msg) = rx.try_recv() {
let tx = tx.clone();
do_command(msg, &mut wgen, &mut steps, tx);
}
if !steps.is_empty() {
let InnerStep {
index,
step,
live,
min_progress_step,
} = steps.remove(0);
let tx2 = tx.clone();
wgen.execute_step(index, &step, false, tx2, min_progress_step);
if steps.is_empty() {
log("wgen=>Done");
tx.send(ThreadMessage::GeneratorDone(wgen.get_export_map()))
.unwrap();
} else {
log(&format!("wgen=>GeneratorStepDone({})", index));
tx.send(ThreadMessage::GeneratorStepDone(
index,
if live {
Some(wgen.get_step_export_map(index))
} else {
None
},
))
.unwrap();
}
}
}
}
impl WorldGenerator {
pub fn new(seed: u64, world_size: (usize, usize)) -> Self {
Self {
seed,
world_size,
hmap: Vec::new(),
}
}
pub fn get_export_map(&self) -> ExportMap {
self.get_step_export_map(if self.hmap.is_empty() {
0
} else {
self.hmap.len() - 1
})
}
pub fn get_step_export_map(&self, step: usize) -> ExportMap {
ExportMap {
size: self.world_size,
h: if step >= self.hmap.len() {
vec![0.0; self.world_size.0 * self.world_size.1]
} else {
self.hmap[step].h.clone()
},
}
}
pub fn combined_height(&self, x: usize, y: usize) -> f32 {
let off = x + y * self.world_size.0;
if !self.hmap.is_empty() && off < self.world_size.0 * self.world_size.1 {
return self.hmap[self.hmap.len() - 1].h[off];
}
0.0
}
pub fn clear(&mut self) {
*self = WorldGenerator::new(self.seed, self.world_size);
}
fn execute_step(
&mut self,
index: usize,
step: &Step,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let now = Instant::now();
let len = self.hmap.len();
if index >= len {
let vecsize = self.world_size.0 * self.world_size.1;
self.hmap.push(if len == 0 {
HMap {
h: vec![0.0; vecsize],
disabled: false,
}
} else {
HMap {
h: self.hmap[len - 1].h.clone(),
disabled: false,
}
});
} else if index > 0 {
self.hmap[index].h = self.hmap[index - 1].h.clone();
} else {
self.hmap[index].h.fill(0.0);
}
{
let hmap = &mut self.hmap[index];
match step {
Step {
typ: StepType::Hills(conf),
disabled,
..
} => {
if !*disabled {
gen_hills(
self.seed,
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::Fbm(conf),
disabled,
..
} => {
if !*disabled {
gen_fbm(
self.seed,
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::MidPoint(conf),
disabled,
..
} => {
if !*disabled {
gen_mid_point(
self.seed,
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::Normalize(conf),
disabled,
..
} => {
if !*disabled {
gen_normalize(&mut hmap.h, conf);
}
}
Step {
typ: StepType::LandMass(conf),
disabled,
..
} => {
if !*disabled {
gen_landmass(
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::MudSlide(conf),
disabled,
..
} => {
if !*disabled {
gen_mudslide(
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::WaterErosion(conf),
disabled,
..
} => {
if !*disabled {
gen_water_erosion(
self.seed,
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
Step {
typ: StepType::Island(conf),
disabled,
..
} => {
if !*disabled {
gen_island(
self.world_size,
&mut hmap.h,
conf,
export,
tx,
min_progress_step,
);
}
}
}
}
if let Some(ref mask) = step.mask {
if index > 0 {
let prev = self.hmap[index - 1].h.clone();
apply_mask(self.world_size, mask, Some(&prev), &mut self.hmap[index].h);
} else {
apply_mask(self.world_size, mask, None, &mut self.hmap[index].h);
}
}
log(&format!(
"Executed {} in {:.2}s",
step,
now.elapsed().as_secs_f32()
));
}
pub fn generate(&mut self, steps: &[Step], tx: Sender<ThreadMessage>, min_progress_step: f32) {
self.clear();
for (i, step) in steps.iter().enumerate() {
let tx2 = tx.clone();
self.execute_step(i, step, true, tx2, min_progress_step);
tx.send(ThreadMessage::ExporterStepDone(i)).unwrap();
}
}
pub fn get_min_max(&self) -> (f32, f32) {
if self.hmap.is_empty() {
(0.0, 0.0)
} else {
get_min_max(&self.hmap[self.hmap.len() - 1].h)
}
}
}
fn apply_mask(world_size: (usize, usize), mask: &[f32], prev: Option<&[f32]>, h: &mut [f32]) {
let mut off = 0;
let (min, _) = if prev.is_none() {
get_min_max(h)
} else {
(0.0, 0.0)
};
for y in 0..world_size.1 {
let myf = (y * MASK_SIZE) as f32 / world_size.0 as f32;
let my = myf as usize;
let yalpha = myf.fract();
for x in 0..world_size.0 {
let mxf = (x * MASK_SIZE) as f32 / world_size.0 as f32;
let mx = mxf as usize;
let xalpha = mxf.fract();
let mut mask_value = mask[mx + my * MASK_SIZE];
if mx + 1 < MASK_SIZE {
mask_value = (1.0 - xalpha) * mask_value + xalpha * mask[mx + 1 + my * MASK_SIZE];
if my + 1 < MASK_SIZE {
let bottom_left_mask = mask[mx + (my + 1) * MASK_SIZE];
let bottom_right_mask = mask[mx + 1 + (my + 1) * MASK_SIZE];
let bottom_mask =
(1.0 - xalpha) * bottom_left_mask + xalpha * bottom_right_mask;
mask_value = (1.0 - yalpha) * mask_value + yalpha * bottom_mask;
}
}
if let Some(prev) = prev {
h[off] = (1.0 - mask_value) * prev[off] + mask_value * h[off];
} else {
h[off] = (1.0 - mask_value) * min + mask_value * (h[off] - min);
}
off += 1;
}
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/panel_save.rs | src/panel_save.rs | use std::path::PathBuf;
use eframe::egui;
use crate::panel_export::TEXTEDIT_WIDTH;
pub struct PanelSaveLoad {
/// the name of the file to load or save
pub file_path: String,
/// the program's current directory
cur_dir: PathBuf,
}
pub enum SaveLoadAction {
Save,
Load,
}
impl Default for PanelSaveLoad {
fn default() -> Self {
let cur_dir = std::env::current_dir().unwrap();
let file_path = format!("{}/my_terrain.wgen", cur_dir.display());
Self { file_path, cur_dir }
}
}
impl PanelSaveLoad {
pub fn get_file_path(&self) -> &str {
&self.file_path
}
pub fn render(&mut self, ui: &mut egui::Ui) -> Option<SaveLoadAction> {
let mut action = None;
ui.heading("Save/load project");
ui.horizontal(|ui| {
ui.label("File path");
if ui.button("Pick...").clicked() {
if let Some(path) = rfd::FileDialog::new()
.set_directory(&self.cur_dir)
.pick_file()
{
self.file_path = path.display().to_string();
self.cur_dir = if path.is_file() {
path.parent().unwrap().to_path_buf()
} else {
path
};
}
}
});
ui.add(egui::TextEdit::singleline(&mut self.file_path).desired_width(TEXTEDIT_WIDTH));
ui.horizontal(|ui| {
if ui.button("Load!").clicked() {
action = Some(SaveLoadAction::Load);
}
if ui.button("Save!").clicked() {
action = Some(SaveLoadAction::Save);
}
});
action
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/mid_point.rs | src/generators/mid_point.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use rand::{rngs::StdRng, Rng, SeedableRng};
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::report_progress;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct MidPointConf {
pub roughness: f32,
}
impl Default for MidPointConf {
fn default() -> Self {
Self { roughness: 0.7 }
}
}
pub struct ProgressTracking {
count: usize,
progress: f32,
min_progress_step: f32,
export: bool,
}
pub fn render_mid_point(ui: &mut egui::Ui, conf: &mut MidPointConf) {
ui.horizontal(|ui| {
ui.label("roughness");
ui.add(
egui::DragValue::new(&mut conf.roughness)
.speed(0.01)
.range(0.01..=1.0),
);
});
}
pub fn gen_mid_point(
seed: u64,
size: (usize, usize),
hmap: &mut Vec<f32>,
conf: &MidPointConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let mut rng = StdRng::seed_from_u64(seed);
hmap[0] = rng.random_range(0.0..1.0);
hmap[size.0 - 1] = rng.random_range(0.0..1.0);
hmap[size.0 * (size.1 - 1)] = rng.random_range(0.0..1.0);
hmap[size.0 * size.1 - 1] = rng.random_range(0.0..1.0);
let mut track = ProgressTracking {
count: size.0 * size.1 * 2,
progress: 0.0,
min_progress_step,
export,
};
diamond_square(
hmap,
&mut rng,
size,
size.0 / 2,
conf.roughness,
&mut track,
tx,
);
}
fn check_progress(track: &mut ProgressTracking, size: (usize, usize), tx: Sender<ThreadMessage>) {
let new_progress = 1.0 - track.count as f32 / (size.0 * size.1 * 2) as f32;
if new_progress - track.progress >= track.min_progress_step {
track.progress = new_progress;
report_progress(track.progress, track.export, tx);
}
}
pub fn diamond_square(
hmap: &mut Vec<f32>,
rng: &mut StdRng,
size: (usize, usize),
cur_size: usize,
roughness: f32,
track: &mut ProgressTracking,
tx: Sender<ThreadMessage>,
) {
let half = cur_size / 2;
if half < 1 {
return;
}
for y in (half..size.1).step_by(cur_size) {
for x in (half..size.0).step_by(cur_size) {
square_step(hmap, rng, x, y, size, half, roughness);
track.count -= 1;
check_progress(track, size, tx.clone());
}
}
let mut col = 0;
for x in (0..size.0).step_by(half) {
col += 1;
if col % 2 == 1 {
for y in (half..size.1).step_by(cur_size) {
diamond_step(hmap, rng, x, y, size, half, roughness);
track.count -= 1;
check_progress(track, size, tx.clone());
}
} else {
for y in (0..size.1).step_by(cur_size) {
diamond_step(hmap, rng, x, y, size, half, roughness);
track.count -= 1;
check_progress(track, size, tx.clone());
}
}
}
diamond_square(hmap, rng, size, cur_size / 2, roughness * 0.5, track, tx);
}
fn square_step(
hmap: &mut [f32],
rng: &mut StdRng,
x: usize,
y: usize,
size: (usize, usize),
reach: usize,
roughness: f32,
) {
let mut count = 0;
let mut avg = 0.0;
if x >= reach && y >= reach {
avg += hmap[x - reach + (y - reach) * size.0];
count += 1;
}
if x >= reach && y + reach < size.1 {
avg += hmap[x - reach + (y + reach) * size.0];
count += 1;
}
if x + reach < size.0 && y >= reach {
avg += hmap[x + reach + (y - reach) * size.0];
count += 1;
}
if x + reach < size.0 && y + reach < size.1 {
avg += hmap[x + reach + (y + reach) * size.0];
count += 1;
}
avg /= count as f32;
avg += rng.random_range(-roughness..roughness);
hmap[x + y * size.0] = avg;
}
fn diamond_step(
hmap: &mut [f32],
rng: &mut StdRng,
x: usize,
y: usize,
size: (usize, usize),
reach: usize,
roughness: f32,
) {
let mut count = 0;
let mut avg = 0.0;
if x >= reach {
avg += hmap[x - reach + y * size.0];
count += 1;
}
if x + reach < size.0 {
avg += hmap[x + reach + y * size.0];
count += 1;
}
if y >= reach {
avg += hmap[x + (y - reach) * size.0];
count += 1;
}
if y + reach < size.1 {
avg += hmap[x + (y + reach) * size.0];
count += 1;
}
avg /= count as f32;
avg += rng.random_range(-roughness..roughness);
hmap[x + y * size.0] = avg;
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/water_erosion.rs | src/generators/water_erosion.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use rand::{rngs::StdRng, Rng, SeedableRng};
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::report_progress;
// water erosion algorithm adapted from https://www.firespark.de/resources/downloads/implementation%20of%20a%20methode%20for%20hydraulic%20erosion.pdf
const MAX_PATH_LENGTH: usize = 40;
const DEFAULT_EVAPORATION: f32 = 0.05;
const DEFAULT_CAPACITY: f32 = 8.0;
const DEFAULT_MIN_SLOPE: f32 = 0.05;
const DEFAULT_DEPOSITION: f32 = 0.1;
const DEFAULT_INERTIA: f32 = 0.4;
const DEFAULT_DROP_AMOUNT: f32 = 0.5;
const DEFAULT_EROSION_STRENGTH: f32 = 0.1;
const DEFAULT_RADIUS: f32 = 4.0;
/// a drop of water
struct Drop {
/// position on the grid
pub pos: (f32, f32),
/// water amount
pub water: f32,
/// movement direction
pub dir: (f32, f32),
/// maximum sediment capacity of the drop
pub capacity: f32,
/// amount of accumulated sediment
pub sediment: f32,
/// velocity
pub speed: f32,
}
impl Drop {
pub fn grid_offset(&self, grid_width: usize) -> usize {
self.pos.0.round() as usize + self.pos.1.round() as usize * grid_width
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct WaterErosionConf {
drop_amount: f32,
erosion_strength: f32,
evaporation: f32,
capacity: f32,
min_slope: f32,
deposition: f32,
inertia: f32,
radius: f32,
}
impl Default for WaterErosionConf {
fn default() -> Self {
Self {
drop_amount: DEFAULT_DROP_AMOUNT,
erosion_strength: DEFAULT_EROSION_STRENGTH,
evaporation: DEFAULT_EVAPORATION,
capacity: DEFAULT_CAPACITY,
min_slope: DEFAULT_MIN_SLOPE,
deposition: DEFAULT_DEPOSITION,
inertia: DEFAULT_INERTIA,
radius: DEFAULT_RADIUS,
}
}
}
pub fn render_water_erosion(ui: &mut egui::Ui, conf: &mut WaterErosionConf) {
ui.horizontal(|ui| {
ui.label("drop amount")
.on_hover_text("Amount of drops simulated");
ui.add(
egui::DragValue::new(&mut conf.drop_amount)
.speed(0.01)
.range(0.1..=2.0),
);
ui.label("erosion strength")
.on_hover_text("How much soil is eroded by the drop");
ui.add(
egui::DragValue::new(&mut conf.erosion_strength)
.speed(0.01)
.range(0.01..=1.0),
);
});
ui.horizontal(|ui| {
ui.label("drop capacity")
.on_hover_text("How much sediment a drop can contain");
ui.add(
egui::DragValue::new(&mut conf.capacity)
.speed(0.5)
.range(2.0..=32.0),
);
ui.label("inertia")
.on_hover_text("Inertia of the drop. Increase for smoother result");
ui.add(
egui::DragValue::new(&mut conf.inertia)
.speed(0.01)
.range(0.01..=0.5),
);
});
ui.horizontal(|ui| {
ui.label("deposition")
.on_hover_text("Amount of sediment deposited");
ui.add(
egui::DragValue::new(&mut conf.deposition)
.speed(0.01)
.range(0.01..=1.0),
);
ui.label("evaporation")
.on_hover_text("How fast the drop evaporate. Increase for smoother results");
ui.add(
egui::DragValue::new(&mut conf.evaporation)
.speed(0.01)
.range(0.01..=0.5),
);
});
ui.horizontal(|ui| {
ui.label("radius").on_hover_text("Erosion radius");
ui.add(
egui::DragValue::new(&mut conf.radius)
.speed(0.1)
.range(1.0..=10.0),
);
ui.label("minimum slope")
.on_hover_text("Minimum height for the drop capacity calculation");
ui.add(
egui::DragValue::new(&mut conf.min_slope)
.speed(0.001)
.range(0.001..=0.1),
);
});
}
pub fn gen_water_erosion(
seed: u64,
size: (usize, usize),
hmap: &mut [f32],
conf: &WaterErosionConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let mut progress = 0.0;
let mut rng = StdRng::seed_from_u64(seed);
// maximum drop count is 2 per cell
let drop_count = ((size.1 * 2) as f32 * conf.drop_amount) as usize;
// compute erosion weight depending on radius
let mut erosion_weight = 0.0;
for y in (-conf.radius).round() as i32..conf.radius.round() as i32 {
for x in (-conf.radius).round() as i32..conf.radius.round() as i32 {
let dist = ((x * x + y * y) as f32).sqrt();
if dist < conf.radius {
erosion_weight += conf.radius - dist;
}
}
}
// use a double loop to check progress every size.0 drops
for y in 0..drop_count {
for _ in 0..size.0 {
let mut drop = Drop {
pos: (
rng.random_range(0..size.0 - 1) as f32,
rng.random_range(0..size.1 - 1) as f32,
),
dir: (0.0, 0.0),
sediment: 0.0,
water: 1.0,
capacity: conf.capacity,
speed: 0.0,
};
let mut off = drop.grid_offset(size.0);
let mut count = 0;
while count < MAX_PATH_LENGTH {
let oldh = hmap[off];
let old_off = off;
// interpolate slope at old position
let h00 = oldh;
let h10 = hmap[off + 1];
let h01 = hmap[off + size.0];
let h11 = hmap[off + 1 + size.0];
let old_u = drop.pos.0.fract();
let old_v = drop.pos.1.fract();
// weight for each cell surrounding the drop position
let w00 = (1.0 - old_u) * (1.0 - old_v);
let w10 = old_u * (1.0 - old_v);
let w01 = (1.0 - old_u) * old_v;
let w11 = old_u * old_v;
// get slope direction
let mut gx = (h00 - h10) * (1.0 - old_v) + (h01 - h11) * old_v;
let mut gy = (h00 - h01) * (1.0 - old_u) + (h10 - h11) * old_u;
(gx, gy) = normalize_dir(gx, gy, &mut rng);
// interpolate between old direction and new one to account for inertia
gx = (drop.dir.0 - gx) * conf.inertia + gx;
gy = (drop.dir.1 - gy) * conf.inertia + gy;
(drop.dir.0, drop.dir.1) = normalize_dir(gx, gy, &mut rng);
let old_x = drop.pos.0;
let old_y = drop.pos.1;
// compute the droplet new position
drop.pos.0 += drop.dir.0;
drop.pos.1 += drop.dir.1;
let ix = drop.pos.0.round() as usize;
let iy = drop.pos.1.round() as usize;
if ix >= size.0 - 1 || iy >= size.1 - 1 {
// out of the map
break;
}
off = drop.grid_offset(size.0);
// interpolate height at new drop position
let u = drop.pos.0.fract();
let v = drop.pos.1.fract();
let new_h00 = hmap[off];
let new_h10 = hmap[off + 1];
let new_h01 = hmap[off + size.0];
let new_h11 = hmap[off + 1 + size.0];
let newh = (new_h00 * (1.0 - u) + new_h10 * u) * (1.0 - v)
+ (new_h01 * (1.0 - u) + new_h11 * u) * v;
let hdif = newh - oldh;
if hdif >= 0.0 {
// going uphill : deposit sediment at old position
let deposit = drop.sediment.min(hdif);
hmap[old_off] += deposit * w00;
hmap[old_off + 1] += deposit * w10;
hmap[old_off + size.0] += deposit * w01;
hmap[old_off + 1 + size.0] += deposit * w11;
drop.sediment -= deposit;
drop.speed = 0.0;
if drop.sediment <= 0.0 {
// no more sediment. stop the path
break;
}
} else {
drop.capacity =
conf.min_slope.max(-hdif) * drop.water * conf.capacity * drop.speed;
if drop.sediment > drop.capacity {
// too much sediment in the drop. deposit
let deposit = (drop.sediment - drop.capacity) * conf.deposition;
hmap[old_off] += deposit * w00;
hmap[old_off + 1] += deposit * w10;
hmap[old_off + size.0] += deposit * w01;
hmap[old_off + 1 + size.0] += deposit * w11;
drop.sediment -= deposit;
} else {
// erode
let amount =
((drop.capacity - drop.sediment) * conf.erosion_strength).min(-hdif);
for y in (old_y - conf.radius).round() as i32
..(old_y + conf.radius).round() as i32
{
if y < 0 || y >= size.1 as i32 {
continue;
}
let dy = old_y - y as f32;
for x in (old_x - conf.radius).round() as i32
..(old_x + conf.radius).round() as i32
{
if x < 0 || x >= size.0 as i32 {
continue;
}
let dx = old_x - x as f32;
let dist = (dx * dx + dy * dy).sqrt();
if dist < conf.radius {
let off = x as usize + y as usize * size.0;
hmap[off] -= amount * (conf.radius - dist) / erosion_weight;
}
}
}
drop.sediment += amount;
}
}
drop.speed = (drop.speed * drop.speed + hdif.abs()).sqrt();
drop.water *= 1.0 - conf.evaporation;
count += 1;
}
}
let new_progress = y as f32 / drop_count as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
}
fn normalize_dir(dx: f32, dy: f32, rng: &mut StdRng) -> (f32, f32) {
let len = (dx * dx + dy * dy).sqrt();
if len < std::f32::EPSILON {
// random direction
let angle = rng.random_range(0.0..std::f32::consts::PI * 2.0);
(angle.cos(), angle.sin())
} else {
(dx / len, dy / len)
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/landmass.rs | src/generators/landmass.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::{normalize, report_progress};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct LandMassConf {
/// what proportion of the map should be above water 0.0-1.0
pub land_proportion: f32,
/// height of the water plane
pub water_level: f32,
/// apply h^plain_factor above sea level for sharper mountains and flatter plains
pub plain_factor: f32,
/// lower everything under water level by this value to avoid z fighting between land and water plane near shores
pub shore_height: f32,
}
impl Default for LandMassConf {
fn default() -> Self {
Self {
land_proportion: 0.6,
water_level: 0.12,
plain_factor: 2.5,
shore_height: 0.05,
}
}
}
pub fn render_landmass(ui: &mut egui::Ui, conf: &mut LandMassConf) {
ui.horizontal(|ui| {
ui.label("land proportion")
.on_hover_text("what proportion of the map should be above water");
ui.add(
egui::DragValue::new(&mut conf.land_proportion)
.speed(0.01)
.range(0.0..=1.0),
);
ui.label("water level")
.on_hover_text("height of the water plane");
ui.add(
egui::DragValue::new(&mut conf.water_level)
.speed(0.01)
.range(0.0..=1.0),
);
});
ui.horizontal(|ui| {
ui.label("plain factor")
.on_hover_text("increase for sharper mountains and flatter plains");
ui.add(
egui::DragValue::new(&mut conf.plain_factor)
.speed(0.01)
.range(1.0..=4.0),
);
ui.label("shore height")
.on_hover_text("lower underwater land by this value");
ui.add(
egui::DragValue::new(&mut conf.shore_height)
.speed(0.01)
.range(0.0..=0.1),
);
});
}
pub fn gen_landmass(
size: (usize, usize),
hmap: &mut [f32],
conf: &LandMassConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let mut height_count: [f32; 256] = [0.0; 256];
let mut progress = 0.0;
normalize(hmap, 0.0, 1.0);
for y in 0..size.1 {
let yoff = y * size.0;
for x in 0..size.0 {
let h = hmap[x + yoff];
let ih = (h * 255.0) as usize;
height_count[ih] += 1.0;
}
let new_progress = 0.33 * y as f32 / size.1 as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
let mut water_level = 0;
let mut water_cells = 0.0;
let target_water_cells = (size.0 * size.1) as f32 * (1.0 - conf.land_proportion);
while water_level < 256 && water_cells < target_water_cells {
water_cells += height_count[water_level];
water_level += 1;
}
let new_water_level = water_level as f32 / 255.0;
let land_coef = (1.0 - conf.water_level) / (1.0 - new_water_level);
let water_coef = conf.water_level / new_water_level;
// water level should be raised/lowered to newWaterLevel
for y in 0..size.1 {
let yoff = y * size.0;
for x in 0..size.0 {
let mut h = hmap[x + yoff];
if h > new_water_level {
h = conf.water_level + (h - new_water_level) * land_coef;
} else {
h = h * water_coef - conf.shore_height;
}
hmap[x + yoff] = h;
}
let new_progress = 0.33 + 0.33 * y as f32 / size.1 as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
// fix land/mountain ratio using h^plain_factor curve above sea level
for y in 0..size.1 {
let yoff = y * size.0;
for x in 0..size.0 {
let mut h = hmap[x + yoff];
if h >= conf.water_level {
let coef = (h - conf.water_level) / (1.0 - conf.water_level);
let coef = coef.powf(conf.plain_factor);
h = conf.water_level + coef * (1.0 - conf.water_level);
hmap[x + y * size.0] = h;
}
}
let new_progress = 0.66 + 0.33 * y as f32 / size.1 as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/island.rs | src/generators/island.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::{get_min_max, report_progress};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct IslandConf {
pub coast_range: f32,
}
impl Default for IslandConf {
fn default() -> Self {
Self { coast_range: 50.0 }
}
}
pub fn render_island(ui: &mut egui::Ui, conf: &mut IslandConf) {
ui.horizontal(|ui| {
ui.label("coast range %");
ui.add(
egui::DragValue::new(&mut conf.coast_range)
.speed(0.1)
.range(0.1..=50.0),
);
});
}
pub fn gen_island(
size: (usize, usize),
hmap: &mut [f32],
conf: &IslandConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let coast_h_dist = size.0 as f32 * conf.coast_range / 100.0;
let coast_v_dist = size.1 as f32 * conf.coast_range / 100.0;
let (min, _) = get_min_max(hmap);
let mut progress = 0.0;
for x in 0..size.0 {
for y in 0..coast_v_dist as usize {
let h_coef = y as f32 / coast_v_dist as f32;
let h = hmap[x + y * size.0];
hmap[x + y * size.0] = (h - min) * h_coef + min;
let h = hmap[x + (size.1 - 1 - y) * size.0];
hmap[x + (size.1 - 1 - y) * size.0] = (h - min) * h_coef + min;
}
let new_progress = 0.5 * x as f32 / size.0 as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
for y in 0..size.1 {
for x in 0..coast_h_dist as usize {
let h_coef = x as f32 / coast_h_dist as f32;
let h = hmap[x + y * size.0];
hmap[x + y * size.0] = (h - min) * h_coef + min;
let h = hmap[(size.0 - 1 - x) + y * size.0];
hmap[(size.0 - 1 - x) + y * size.0] = (h - min) * h_coef + min;
}
let new_progress = 0.5 + 0.5 * y as f32 / size.0 as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/mod.rs | src/generators/mod.rs | mod fbm;
mod hills;
mod island;
mod landmass;
mod mid_point;
mod mudslide;
mod normalize;
mod water_erosion;
use std::sync::mpsc::Sender;
pub use fbm::{gen_fbm, render_fbm, FbmConf};
pub use hills::{gen_hills, render_hills, HillsConf};
pub use island::{gen_island, render_island, IslandConf};
pub use landmass::{gen_landmass, render_landmass, LandMassConf};
pub use mid_point::{gen_mid_point, render_mid_point, MidPointConf};
pub use mudslide::{gen_mudslide, render_mudslide, MudSlideConf};
pub use normalize::{gen_normalize, NormalizeConf};
pub use water_erosion::{gen_water_erosion, render_water_erosion, WaterErosionConf};
use crate::ThreadMessage;
const DIRX: [i32; 9] = [0, -1, 0, 1, -1, 1, -1, 0, 1];
const DIRY: [i32; 9] = [0, -1, -1, -1, 0, 0, 1, 1, 1];
pub fn vec_get_safe<T>(v: &Vec<T>, off: usize) -> T
where
T: Default + Copy,
{
if off < v.len() {
return v[off];
}
T::default()
}
pub fn get_min_max(v: &[f32]) -> (f32, f32) {
let mut min = v[0];
let mut max = v[0];
for val in v.iter().skip(1) {
if *val > max {
max = *val;
} else if *val < min {
min = *val;
}
}
(min, max)
}
pub fn normalize(v: &mut [f32], target_min: f32, target_max: f32) {
let (min, max) = get_min_max(v);
let invmax = if min == max {
0.0
} else {
(target_max - target_min) / (max - min)
};
for val in v {
*val = target_min + (*val - min) * invmax;
}
}
pub fn _blur(v: &mut [f32], size: (usize, usize)) {
const FACTOR: usize = 8;
let small_size: (usize, usize) = (
(size.0 + FACTOR - 1) / FACTOR,
(size.1 + FACTOR - 1) / FACTOR,
);
let mut low_res = vec![0.0; small_size.0 * small_size.1];
for x in 0..size.0 {
for y in 0..size.1 {
let value = v[x + y * size.0];
let ix = x / FACTOR;
let iy = y / FACTOR;
low_res[ix + iy * small_size.0] += value;
}
}
let coef = 1.0 / FACTOR as f32;
for x in 0..size.0 {
for y in 0..size.1 {
v[x + y * size.0] =
_interpolate(&low_res, x as f32 * coef, y as f32 * coef, small_size);
}
}
}
pub fn _interpolate(v: &[f32], x: f32, y: f32, size: (usize, usize)) -> f32 {
let ix = x as usize;
let iy = y as usize;
let dx = x.fract();
let dy = y.fract();
let val_nw = v[ix + iy * size.0];
let val_ne = if ix < size.0 - 1 {
v[ix + 1 + iy * size.0]
} else {
val_nw
};
let val_sw = if iy < size.1 - 1 {
v[ix + (iy + 1) * size.0]
} else {
val_nw
};
let val_se = if ix < size.0 - 1 && iy < size.1 - 1 {
v[ix + 1 + (iy + 1) * size.0]
} else {
val_nw
};
let val_n = (1.0 - dx) * val_nw + dx * val_ne;
let val_s = (1.0 - dx) * val_sw + dx * val_se;
(1.0 - dy) * val_n + dy * val_s
}
fn report_progress(progress: f32, export: bool, tx: Sender<ThreadMessage>) {
if export {
tx.send(ThreadMessage::ExporterStepProgress(progress))
.unwrap();
} else {
tx.send(ThreadMessage::GeneratorStepProgress(progress))
.unwrap();
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/hills.rs | src/generators/hills.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use rand::{prelude::*, rngs::StdRng};
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::report_progress;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct HillsConf {
pub nb_hill: usize,
pub base_radius: f32,
pub radius_var: f32,
pub height: f32,
}
impl Default for HillsConf {
fn default() -> Self {
Self {
nb_hill: 600,
base_radius: 16.0,
radius_var: 0.7,
height: 0.3,
}
}
}
pub fn render_hills(ui: &mut egui::Ui, conf: &mut HillsConf) {
ui.horizontal(|ui| {
ui.label("count");
ui.add(
egui::DragValue::new(&mut conf.nb_hill)
.speed(1.0)
.range(1.0..=5000.0),
);
ui.label("radius");
ui.add(
egui::DragValue::new(&mut conf.base_radius)
.speed(1.0)
.range(1.0..=255.0),
);
});
ui.horizontal(|ui| {
ui.label("radius variation");
ui.add(
egui::DragValue::new(&mut conf.radius_var)
.speed(0.01)
.range(0.0..=1.0),
);
});
}
pub fn gen_hills(
seed: u64,
size: (usize, usize),
hmap: &mut [f32],
conf: &HillsConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let mut rng = StdRng::seed_from_u64(seed);
let real_radius = conf.base_radius * size.0 as f32 / 200.0;
let hill_min_radius = real_radius * (1.0 - conf.radius_var);
let hill_max_radius = real_radius * (1.0 + conf.radius_var);
let mut progress = 0.0;
for i in 0..conf.nb_hill {
let radius: f32 = if conf.radius_var == 0.0 {
hill_min_radius
} else {
rng.random_range(hill_min_radius..hill_max_radius)
};
let xh: f32 = rng.random_range(0.0..size.0 as f32);
let yh: f32 = rng.random_range(0.0..size.1 as f32);
let radius2 = radius * radius;
let coef = conf.height / radius2;
let minx = (xh - radius).max(0.0) as usize;
let maxx = (xh + radius).min(size.0 as f32) as usize;
let miny = (yh - radius).max(0.0) as usize;
let maxy = (yh + radius).min(size.1 as f32) as usize;
for px in minx..maxx {
let xdist = (px as f32 - xh).powi(2);
for py in miny..maxy {
let z = radius2 - xdist - (py as f32 - yh).powi(2);
if z > 0.0 {
hmap[px + py * size.0] += z * coef;
}
}
}
let new_progress = i as f32 / conf.nb_hill as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/mudslide.rs | src/generators/mudslide.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::{report_progress, vec_get_safe, DIRX, DIRY};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct MudSlideConf {
iterations: f32,
max_erosion_alt: f32,
strength: f32,
water_level: f32,
}
impl Default for MudSlideConf {
fn default() -> Self {
Self {
iterations: 5.0,
max_erosion_alt: 0.9,
strength: 0.4,
water_level: 0.12,
}
}
}
pub fn render_mudslide(ui: &mut egui::Ui, conf: &mut MudSlideConf) {
ui.horizontal(|ui| {
ui.label("iterations");
ui.add(
egui::DragValue::new(&mut conf.iterations)
.speed(0.5)
.range(1.0..=10.0),
);
ui.label("max altitude");
ui.add(
egui::DragValue::new(&mut conf.max_erosion_alt)
.speed(0.01)
.range(0.0..=1.0),
);
});
ui.horizontal(|ui| {
ui.label("strength");
ui.add(
egui::DragValue::new(&mut conf.strength)
.speed(0.01)
.range(0.0..=1.0),
);
ui.label("water level");
ui.add(
egui::DragValue::new(&mut conf.water_level)
.speed(0.01)
.range(0.0..=1.0),
);
});
}
pub fn gen_mudslide(
size: (usize, usize),
hmap: &mut Vec<f32>,
conf: &MudSlideConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
for i in 0..conf.iterations as usize {
mudslide(size, hmap, i, conf, export, tx.clone(), min_progress_step);
}
}
fn mudslide(
size: (usize, usize),
hmap: &mut Vec<f32>,
iteration: usize,
conf: &MudSlideConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let sand_coef = 1.0 / (1.0 - conf.water_level);
let mut new_hmap = vec![0.0; size.0 * size.1];
let mut progress = 0.0;
for y in 0..size.1 {
let yoff = y * size.0;
for x in 0..size.0 {
let h = vec_get_safe(hmap, x + yoff);
if h < conf.water_level - 0.01 || h >= conf.max_erosion_alt {
new_hmap[x + y * size.0] = h;
continue;
}
let mut sum_delta1 = 0.0;
let mut sum_delta2 = 0.0;
let mut nb1 = 1.0;
let mut nb2 = 1.0;
for i in 1..9 {
let ix = (x as i32 + DIRX[i]) as usize;
let iy = (y as i32 + DIRY[i]) as usize;
if ix < size.0 && iy < size.1 {
let ih = vec_get_safe(hmap, ix + iy * size.0);
if ih < h {
if i == 1 || i == 3 || i == 6 || i == 8 {
// diagonal neighbour
sum_delta1 += (ih - h) * 0.4;
nb1 += 1.0;
} else {
// adjacent neighbour
sum_delta2 += (ih - h) * 1.6;
nb2 += 1.0;
}
}
}
}
// average height difference with lower neighbours
let mut dh = sum_delta1 / nb1 + sum_delta2 / nb2;
dh *= conf.strength;
let hcoef = (h - conf.water_level) * sand_coef;
dh *= 1.0 - hcoef * hcoef * hcoef; // less smoothing at high altitudes
new_hmap[x + y * size.0] = h + dh;
}
let new_progress = iteration as f32 / conf.iterations as f32
+ (y as f32 / size.1 as f32) / conf.iterations as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone());
}
}
*hmap = new_hmap;
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/normalize.rs | src/generators/normalize.rs | use serde::{Deserialize, Serialize};
use super::normalize;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct NormalizeConf {
pub min: f32,
pub max: f32,
}
impl Default for NormalizeConf {
fn default() -> Self {
Self { min: 0.0, max: 1.0 }
}
}
pub fn gen_normalize(hmap: &mut [f32], conf: &NormalizeConf) {
normalize(hmap, conf.min, conf.max);
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
jice-nospam/wgen | https://github.com/jice-nospam/wgen/blob/1c28d7c19c374683f31c91010c03549bc33b0852/src/generators/fbm.rs | src/generators/fbm.rs | use std::sync::mpsc::Sender;
use eframe::egui;
use noise::{Fbm, MultiFractal, NoiseFn, Perlin};
use serde::{Deserialize, Serialize};
use crate::ThreadMessage;
use super::report_progress;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct FbmConf {
pub mulx: f32,
pub muly: f32,
pub addx: f32,
pub addy: f32,
pub octaves: f32,
pub delta: f32,
pub scale: f32,
}
impl Default for FbmConf {
fn default() -> Self {
Self {
mulx: 2.20,
muly: 2.20,
addx: 0.0,
addy: 0.0,
octaves: 6.0,
delta: 0.0,
scale: 2.05,
}
}
}
pub fn render_fbm(ui: &mut egui::Ui, conf: &mut FbmConf) {
ui.horizontal(|ui| {
ui.label("scale x");
ui.add(
egui::DragValue::new(&mut conf.mulx)
.speed(0.1)
.range(0.0..=100.0),
);
ui.label("y");
ui.add(
egui::DragValue::new(&mut conf.muly)
.speed(0.1)
.range(0.0..=100.0),
);
ui.label("octaves");
ui.add(
egui::DragValue::new(&mut conf.octaves)
.speed(0.5)
.range(1.0..=Fbm::<Perlin>::MAX_OCTAVES as f32),
);
});
ui.horizontal(|ui| {
ui.label("offset x");
ui.add(
egui::DragValue::new(&mut conf.addx)
.speed(0.1)
.range(0.0..=200.0),
);
ui.label("y");
ui.add(
egui::DragValue::new(&mut conf.addy)
.speed(0.1)
.range(0.0..=200.0),
);
ui.label("scale");
ui.add(
egui::DragValue::new(&mut conf.scale)
.speed(0.01)
.range(0.01..=10.0),
);
});
}
pub fn gen_fbm(
seed: u64,
size: (usize, usize),
hmap: &mut [f32],
conf: &FbmConf,
export: bool,
tx: Sender<ThreadMessage>,
min_progress_step: f32,
) {
let xcoef = conf.mulx / 400.0;
let ycoef = conf.muly / 400.0;
let mut progress = 0.0;
let num_threads = num_cpus::get();
std::thread::scope(|s| {
let size_per_job = size.1 / num_threads;
for (i, chunk) in hmap.chunks_mut(size_per_job * size.0).enumerate() {
// FIXME: Why was this here
// let i = i;
let fbm = Fbm::<Perlin>::new(seed as u32).set_octaves(conf.octaves as usize);
let tx = tx.clone();
s.spawn(move || {
let yoffset = i * size_per_job;
let lasty = size_per_job.min(size.1 - yoffset);
for y in 0..lasty {
let f1 = ((y + yoffset) as f32 * 512.0 / size.1 as f32 + conf.addy) * ycoef;
let mut offset = y * size.0;
for x in 0..size.0 {
let f0 = (x as f32 * 512.0 / size.0 as f32 + conf.addx) * xcoef;
let value =
conf.delta + fbm.get([f0 as f64, f1 as f64]) as f32 * conf.scale;
chunk[offset] += value;
offset += 1;
}
if i == 0 {
let new_progress = (y + 1) as f32 / size_per_job as f32;
if new_progress - progress >= min_progress_step {
progress = new_progress;
report_progress(progress, export, tx.clone())
}
}
}
});
}
});
}
| rust | MIT | 1c28d7c19c374683f31c91010c03549bc33b0852 | 2026-01-04T20:17:25.799607Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/src/lib.rs | src/lib.rs | use std::ops::Add;
mod args;
pub mod error;
pub use args::*;
pub fn run_blocking(args: Args) -> criner::error::Result<()> {
use SubCommands::*;
let cmd = args.sub.unwrap_or_default();
match cmd {
#[cfg(feature = "migration")]
Migrate => criner::migration::migrate("./criner.db"),
Export {
input_db_path,
export_db_path,
} => criner::export::run_blocking(input_db_path, export_db_path),
Mine {
repository,
db_path,
fps,
time_limit,
io_bound_processors,
cpu_bound_processors,
cpu_o_bound_processors,
no_gui,
no_db_download,
progress_message_scrollback_buffer_size,
fetch_every,
fetch_at_most,
process_at_most,
process_every,
download_crates_io_database_every_24_hours_starting_at,
report_every,
report_at_most,
glob,
} => criner::run::blocking(
db_path,
repository.unwrap_or_else(|| std::env::temp_dir().join("criner-crates-io-bare-index.git")),
time_limit.map(|d| std::time::SystemTime::now().add(*d)),
io_bound_processors,
cpu_bound_processors,
cpu_o_bound_processors,
!no_db_download,
criner::run::StageRunSettings {
every: fetch_every.into(),
at_most: fetch_at_most,
},
criner::run::StageRunSettings {
every: process_every.into(),
at_most: process_at_most,
},
criner::run::GlobStageRunSettings {
run: criner::run::StageRunSettings {
every: report_every.into(),
at_most: report_at_most,
},
glob,
},
download_crates_io_database_every_24_hours_starting_at,
criner::prodash::tree::root::Options {
message_buffer_capacity: progress_message_scrollback_buffer_size,
..criner::prodash::tree::root::Options::default()
}
.create()
.into(),
if no_gui {
None
} else {
Some(criner::prodash::render::tui::Options {
title: "Criner".into(),
frames_per_second: fps,
recompute_column_width_every_nth_frame: Option::from(fps as usize),
..criner::prodash::render::tui::Options::default()
})
},
),
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/src/args.rs | src/args.rs | use std::path::PathBuf;
fn parse_local_time(src: &str) -> Result<time::Time, time::error::Parse> {
time::Time::parse(
src,
&time::macros::format_description!("[hour repr:24 padding:none]:[minute padding:zero]"),
)
}
#[derive(Debug, clap::Parser)]
#[clap(about = "Interact with crates.io from the command-line")]
pub struct Args {
#[clap(subcommand)]
pub sub: Option<SubCommands>,
}
#[derive(Debug, clap::Parser)]
pub enum SubCommands {
/// Mine crates.io in an incorruptible and resumable fashion
#[clap(display_order = 0)]
#[clap(disable_version_flag(true))]
Mine {
/// If set, no gui will be presented. Best with RUST_LOG=info to see basic information.
#[clap(long)]
no_gui: bool,
/// The amount of frames to show per second
#[clap(long, name = "frames-per-second", default_value = "6.0")]
fps: f32,
/// The amount of progress messages to keep in a ring buffer.
#[clap(short = 's', long, default_value = "100")]
progress_message_scrollback_buffer_size: usize,
/// If set, the crates-index database for additional metadata will not be downloaded.
///
/// It costs a lot of initial processing time and IO when writing changes back to the database,
/// which isn't helpful while on a slow disk - right now it does so unconditionally and doesn't track
/// that the work was already done.
#[clap(long, short = 'D')]
no_db_download: bool,
/// The amount of IO-bound processors to run concurrently.
///
/// A way to choose a value is to see which part of the I/O is actually the bottle neck.
/// Depending on that number, one should experiment with an amount of processors that saturate
/// either input or output.
/// Most commonly, these are bound to the input, as it is the network.
#[clap(long, alias = "io", value_name = "io", default_value = "10")]
io_bound_processors: u32,
/// The amount of CPU- and Output-bound processors to run concurrently.
///
/// These will perform a computation followed by flushing its result to disk in the form
/// of multiple small files.
/// It's recommended to adjust that number to whatever can saturate the speed of writing to disk,
/// as these processors will yield when writing, allowing other processors to compute.
/// Computes are relatively inexpensive compared to the writes.
#[clap(long, alias = "cpu-o", value_name = "cpu-o", default_value = "20")]
cpu_o_bound_processors: u32,
/// The amount of CPU-bound processors to run concurrently.
///
/// One can assume that one of these can occupy one core of a CPU.
/// However, they will not use a lot of IO, nor will they use much memory.
#[clap(long, alias = "cpu", value_name = "cpu", default_value = "4")]
cpu_bound_processors: u32,
/// Path to the possibly existing crates.io repository clone. If unset, it will be cloned to a temporary spot.
#[clap(short = 'c', long, name = "REPO")]
repository: Option<PathBuf>,
/// The amount of time we can take for the computation. Specified in humantime, like 10s, 5min, or 2h, or '3h 2min 2s'
#[clap(long, short = 't')]
time_limit: Option<humantime::Duration>,
/// The time between each fetch operation, specified in humantime, like 10s, 5min, or 2h, or '3h 2min 2s'
#[clap(long, short = 'f', default_value = "5min")]
fetch_every: humantime::Duration,
/// If set, the amount of times the fetch stage will run. If set to 0, it will never run.
#[clap(long, short = 'F')]
fetch_at_most: Option<usize>,
/// The time between each processing run, specified in humantime, like 10s, 5min, or 2h, or '3h 2min 2s'
#[clap(long, short = 'p', default_value = "5min")]
process_every: humantime::Duration,
/// If set, the amount of times the process stage will run. If set to 0, they will never run.
#[clap(long, short = 'P')]
process_at_most: Option<usize>,
/// The time between each reporting and processing run, specified in humantime, like 10s, 5min, or 2h, or '3h 2min 2s'
#[clap(long, short = 'r', default_value = "5min")]
report_every: humantime::Duration,
/// If set, the amount of times the reporting stage will run. If set to 0, they will never run.
#[clap(long, short = 'R')]
report_at_most: Option<usize>,
/// If set, declare at which local time to download the crates.io database and digest it.
///
/// This job runs every 24h, as the database is updated that often.
/// If unset, the job starts right away.
/// Format is HH:MM, e.g. '14:30' for 2:30 pm or 03:15 for quarter past 3 in the morning.
#[clap(long, short = 'd', value_parser = parse_local_time)]
download_crates_io_database_every_24_hours_starting_at: Option<time::Time>,
/// If set, the reporting stage will only iterate over crates that match the given standard unix glob.
///
/// moz* would match only crates starting with 'moz' for example.
#[clap(long, short = 'g')]
glob: Option<String>,
/// Path to the possibly existing database. It's used to persist all mining results.
#[clap(default_value = "criner.db")]
db_path: PathBuf,
},
/// Export all Criner data into a format friendly for exploration via SQL, best viewed with https://sqlitebrowser.org
///
/// Criner stores binary blobs internally and migrates them on the fly, which is optimized for raw performance.
/// It's also impractical for exploring the data by hand, so the exported data will explode all types into
/// tables with each column being a field. Foreign key relations are set accordingly to allow joins.
/// Use this to get an overview of what's available, and possibly contribute a report generator which implements
/// a query using raw data and writes it into reports.
#[clap(display_order = 1)]
#[clap(disable_version_flag(true))]
Export {
/// The path to the source database in sqlite format
input_db_path: PathBuf,
/// Path to which to write the exported data. If it exists the operation will fail.
export_db_path: PathBuf,
},
#[cfg(feature = "migration")]
/// A special purpose command only to be executed in special circumstances
#[clap(display_order = 9)]
Migrate,
}
impl Default for SubCommands {
fn default() -> Self {
SubCommands::Mine {
no_gui: false,
fps: 6.0,
progress_message_scrollback_buffer_size: 100,
io_bound_processors: 5,
cpu_bound_processors: 2,
cpu_o_bound_processors: 10,
repository: None,
time_limit: None,
fetch_every: std::time::Duration::from_secs(60).into(),
fetch_at_most: None,
no_db_download: false,
process_every: std::time::Duration::from_secs(60).into(),
process_at_most: None,
download_crates_io_database_every_24_hours_starting_at: Some(
parse_local_time("3:00").expect("valid statically known time"),
),
report_every: std::time::Duration::from_secs(60).into(),
report_at_most: None,
db_path: PathBuf::from("criner.db"),
glob: None,
}
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/src/error.rs | src/error.rs | use std::{error::Error, fmt, process};
struct WithCauses<'a>(&'a dyn Error);
impl<'a> fmt::Display for WithCauses<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ERROR: {}", self.0)?;
let mut cursor = self.0;
while let Some(err) = cursor.source() {
write!(f, "\ncaused by: \n{}", err)?;
cursor = err;
}
writeln!(f)
}
}
pub fn ok_or_exit<T, E>(result: Result<T, E>) -> T
where
E: Error,
{
match result {
Ok(v) => v,
Err(err) => {
println!("{}", WithCauses(&err));
process::exit(2);
}
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/src/main.rs | src/main.rs | use clap::Parser;
fn main() -> criner::error::Result<()> {
let args = criner_cli::Args::parse();
if let Some(criner_cli::SubCommands::Mine { no_gui, .. }) = args.sub {
if no_gui {
env_logger::init_from_env(env_logger::Env::default().default_filter_or("info"));
}
} else {
env_logger::init();
}
criner_cli::run_blocking(args)
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/lib.rs | criner/src/lib.rs | #![allow(clippy::unneeded_field_pattern)]
#![deny(unsafe_code)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate quick_error;
#[cfg(feature = "migration")]
pub mod migration;
pub mod error;
pub use error::{Error, Result};
pub mod export;
pub(crate) mod model;
pub(crate) mod persistence;
pub(crate) mod utils;
mod spawn;
pub(crate) use spawn::spawn;
mod engine;
pub use engine::run;
pub use prodash;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/spawn.rs | criner/src/spawn.rs | // Copied and adapted from https://github.com/smol-rs/smol/blob/15447d6859df65fd1992f761ee46067bed62f8a5/src/spawn.rs
use std::future::Future;
use std::panic::catch_unwind;
use std::thread;
use async_executor::Executor;
pub use async_executor::Task;
use futures_lite::future;
use once_cell::sync::Lazy;
pub fn spawn<T: Send + 'static>(future: impl Future<Output = T> + Send + 'static) -> Task<T> {
static GLOBAL: Lazy<Executor<'_>> = Lazy::new(|| {
for i in 1..=2 {
thread::Builder::new()
.name(format!("smol-{}", i))
.spawn(|| loop {
catch_unwind(|| async_io::block_on(GLOBAL.run(future::pending::<()>()))).ok();
})
.expect("cannot spawn executor thread");
}
Executor::new()
});
GLOBAL.spawn(async_compat::Compat::new(future))
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/error.rs | criner/src/error.rs | use std::{fmt, time};
#[derive(Debug)]
pub struct FormatDeadline(pub time::SystemTime);
impl fmt::Display for FormatDeadline {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::result::Result<(), fmt::Error> {
let now = time::SystemTime::now();
write!(
f,
"{} ago at {}",
humantime::format_duration(now.duration_since(self.0).unwrap_or_default()),
humantime::format_rfc3339(now)
)
}
}
pub type Result<T> = std::result::Result<T, Error>;
quick_error! {
#[derive(Debug)]
pub enum Error {
Bug(d: &'static str) {
display("{}", d)
}
Message(d: String) {
display("{}", d)
}
InvalidHeader(d: &'static str) {
display("{}", d)
}
HttpStatus(status: http::StatusCode) {
display("{}", status)
}
DeadlineExceeded(d: FormatDeadline) {
display("Stopped computation as deadline was reached {}.", d)
}
Interrupted {
display("Interrupt or termination signal received")
}
Timeout(d: std::time::Duration, msg: String) {
display("{} - timeout after {:?}.", msg, d)
}
RmpSerdeEncode(err: rmp_serde::encode::Error) {
from()
source(err)
}
Git2(err: git2::Error) {
from()
source(err)
}
IndexDiffInit(err: crates_index_diff::index::init::Error) {
from()
source(err)
}
IndexDiffChanges(err: crates_index_diff::index::diff::Error) {
from()
source(err)
}
Io(err: std::io::Error) {
from()
source(err)
}
FromUtf8(err: std::string::FromUtf8Error) {
from()
source(err)
}
Reqwest(err: reqwest::Error) {
from()
source(err)
}
ParseInt(err: std::num::ParseIntError) {
from()
source(err)
}
Rusqlite(err: rusqlite::Error) {
from()
source(err)
}
GlobSet(err: globset::Error) {
from()
source(err)
}
Horrorshow(err: horrorshow::Error) {
from()
source(err)
}
SystemTime(err: std::time::SystemTimeError) {
from()
source(err)
}
StripPrefixError(err: std::path::StripPrefixError) {
from()
source(err)
}
Csv(err: csv::Error) {
from()
source(err)
}
GlobPattern(err: glob::PatternError) {
from()
source(err)
}
Glob(err: glob::GlobError) {
from()
source(err)
}
ChannelSendMessage(msg: &'static str) {
display("{}: Sending into a closed channel", msg)
}
}
}
impl Error {
pub fn send_msg<T>(msg: &'static str) -> impl FnOnce(async_channel::SendError<T>) -> Error {
move |_err| Error::ChannelSendMessage(msg)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/utils.rs | criner/src/utils.rs | use crate::error::{Error, FormatDeadline, Result};
use async_io::Timer;
use dia_semver::Semver;
use futures_util::{
future::{self, Either},
FutureExt,
};
use std::{
convert::TryInto,
future::Future,
time::{Duration, SystemTime},
};
pub fn parse_semver(version: &str) -> Semver {
use std::str::FromStr;
Semver::from_str(version)
.or_else(|_| {
Semver::from_str(
&version[..version
.find('-')
.or_else(|| version.find('+'))
.expect("some prerelease version")],
)
})
.expect("semver parsing to work if violating prerelease versions are stripped")
}
pub async fn wait_with_progress(
duration_s: usize,
progress: prodash::tree::Item,
deadline: Option<SystemTime>,
time: Option<time::Time>,
) -> Result<()> {
progress.init(Some(duration_s), Some("s".into()));
if let Some(time) = time {
progress.set_name(format!(
"{} scheduled at {}",
progress.name().unwrap_or_else(|| "un-named".into()),
time.format(&time::macros::format_description!("[hour]:[minute]"))
.expect("always formattable")
));
}
for s in 1..=duration_s {
Timer::after(Duration::from_secs(1)).await;
check(deadline)?;
progress.set(s);
}
Ok(())
}
fn desired_launch_at(time: Option<time::Time>) -> time::OffsetDateTime {
let time = time.unwrap_or_else(|| {
time::OffsetDateTime::now_local()
.unwrap_or_else(|_| time::OffsetDateTime::now_utc())
.time()
});
let now = time::OffsetDateTime::now_local().unwrap_or_else(|_| time::OffsetDateTime::now_utc());
let mut desired = now.date().with_time(time).assume_offset(now.offset());
if desired < now {
desired = desired
.date()
.next_day()
.expect("not running in year 9999")
.with_time(time)
.assume_offset(now.offset());
}
desired
}
fn duration_until(time: Option<time::Time>) -> Duration {
let desired = desired_launch_at(time);
let now_local = time::OffsetDateTime::now_local().unwrap_or_else(|_| time::OffsetDateTime::now_utc());
(desired - now_local)
.try_into()
.unwrap_or_else(|_| Duration::from_secs(1))
}
pub async fn repeat_daily_at<MakeFut, MakeProgress, Fut, T>(
time: Option<time::Time>,
mut make_progress: MakeProgress,
deadline: Option<SystemTime>,
mut make_future: MakeFut,
) -> Result<()>
where
Fut: Future<Output = Result<T>>,
MakeFut: FnMut() -> Fut,
MakeProgress: FnMut() -> prodash::tree::Item,
{
let mut iteration = 0;
let time = desired_launch_at(time).time();
loop {
iteration += 1;
if let Err(err) = make_future().await {
make_progress().fail(format!(
"{} : ignored by repeat_daily_at('{:?}',…) iteration {}",
err, time, iteration
))
}
wait_with_progress(
duration_until(Some(time)).as_secs() as usize,
make_progress(),
deadline,
Some(time),
)
.await?;
}
}
pub async fn repeat_every_s<MakeFut, MakeProgress, Fut, T>(
interval_s: usize,
mut make_progress: MakeProgress,
deadline: Option<SystemTime>,
at_most: Option<usize>,
mut make_future: MakeFut,
) -> Result<()>
where
Fut: Future<Output = Result<T>>,
MakeFut: FnMut() -> Fut,
MakeProgress: FnMut() -> prodash::tree::Item,
{
let max_iterations = at_most.unwrap_or(std::usize::MAX);
let mut iteration = 0;
loop {
if iteration == max_iterations {
return Ok(());
}
iteration += 1;
if let Err(err) = make_future().await {
make_progress().fail(format!(
"{} : ignored by repeat_every({}s,…) iteration {}",
err, interval_s, iteration
))
}
if iteration == max_iterations {
return Ok(());
}
wait_with_progress(interval_s, make_progress(), deadline, None).await?;
}
}
pub fn check(deadline: Option<SystemTime>) -> Result<()> {
deadline
.map(|d| {
if SystemTime::now() >= d {
Err(Error::DeadlineExceeded(FormatDeadline(d)))
} else {
Ok(())
}
})
.unwrap_or(Ok(()))
}
pub async fn handle_ctrl_c_and_sigterm<F, T>(f: F) -> Result<T>
where
F: Future<Output = T> + Unpin,
{
let (s, r) = async_channel::bounded(100);
ctrlc::set_handler(move || {
s.send(()).now_or_never();
})
.ok();
let selector = future::select(async move { r.recv().await }.boxed_local(), f);
match selector.await {
Either::Left((_, _f)) => Err(Error::Interrupted),
Either::Right((r, _interrupt)) => Ok(r),
}
}
pub async fn timeout_after<F, T>(duration: Duration, msg: impl Into<String>, f: F) -> Result<T>
where
F: Future<Output = T> + Unpin,
{
let selector = future::select(Timer::after(duration), f);
match selector.await {
Either::Left((_, _f)) => Err(Error::Timeout(duration, msg.into())),
Either::Right((r, _delay)) => Ok(r),
}
}
/// Use this if `f()` might block forever, due to code that doesn't implement timeouts like libgit2 fetch does as it has no timeout
/// on 'recv' bytes.
///
/// This approach eventually fails as we would accumulate more and more threads, but this will also give use additional
/// days of runtime for little effort. On a Chinese network, outside of data centers, one can probably restart criner on
/// a weekly basis or so, which is can easily be automated.
pub async fn enforce_threaded<F, T>(deadline: SystemTime, f: F) -> Result<T>
where
T: Send + 'static,
F: FnOnce() -> T + Send + 'static,
{
let unblocked = blocking::unblock(f);
let selector = future::select(
Timer::after(deadline.duration_since(SystemTime::now()).unwrap_or_default()),
unblocked.boxed(),
);
match selector.await {
Either::Left((_, _f_as_future)) => Err(Error::DeadlineExceeded(FormatDeadline(deadline))),
Either::Right((res, _delay)) => Ok(res),
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/model.rs | criner/src/model.rs | pub use crate::engine::report::waste::TarHeader;
use serde_derive::{Deserialize, Serialize};
use std::convert::TryFrom;
use std::{collections::HashMap, ops::Add, time::Duration, time::SystemTime};
/// Represents a top-level crate and associated information
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct Crate {
/// All versions published to crates.io, guaranteed to be sorted so that the most recent version is last.
/// The format is as specified in Cargo.toml:version
pub versions: Vec<String>,
}
impl From<CrateVersion> for Crate {
fn from(v: CrateVersion) -> Self {
Crate {
versions: vec![v.version],
}
}
}
/// Stores element counts of various kinds
#[derive(Default, Debug, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Clone)]
pub struct Counts {
/// The amount of crate versions stored in the database
pub crate_versions: u64,
/// The amount of crates in the database
pub crates: u32,
}
/// Stores wall clock time that elapsed for various kinds of computation
#[derive(Default, Debug, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Clone)]
pub struct Durations {
pub fetch_crate_versions: Duration,
}
/// Stores information about the work we have performed thus far
#[derive(Default, Debug, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Clone)]
pub struct Context {
/// Various elements counts
pub counts: Counts,
/// Various kinds of time we took for computation
pub durations: Durations,
}
impl Add<&Context> for Context {
type Output = Context;
fn add(self, rhs: &Context) -> Self::Output {
Context {
counts: Counts {
crate_versions: self.counts.crate_versions + rhs.counts.crate_versions,
crates: self.counts.crates + rhs.counts.crates,
},
durations: Durations {
fetch_crate_versions: self.durations.fetch_crate_versions + rhs.durations.fetch_crate_versions,
},
}
}
}
/// A single dependency of a specific crate version
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Dependency {
/// The crate name
pub name: String,
/// The version the parent crate requires of this dependency
#[serde(rename = "req")]
pub required_version: String,
/// All cargo features configured by the parent crate
pub features: Vec<String>,
/// True if this is an optional dependency
pub optional: bool,
/// True if default features are enabled
pub default_features: bool,
/// The name of the build target
pub target: Option<String>,
/// The kind of dependency, usually 'normal' or 'dev'
pub kind: Option<String>,
/// The package this crate is contained in
pub package: Option<String>,
}
impl From<crates_index_diff::Dependency> for Dependency {
fn from(v: crates_index_diff::Dependency) -> Self {
let crates_index_diff::Dependency {
name,
required_version,
features,
optional,
default_features,
target,
kind,
package,
} = v;
Dependency {
name: name.to_string(),
required_version: required_version.to_string(),
features,
optional,
default_features,
target: target.map(|t| t.to_string()),
kind: kind.map(|k| {
match k {
crates_index_diff::DependencyKind::Normal => "normal",
crates_index_diff::DependencyKind::Dev => "dev",
crates_index_diff::DependencyKind::Build => "build",
}
.to_owned()
}),
package: package.map(|p| p.to_string()),
}
}
}
/// Identify a kind of change that occurred to a crate
#[derive(Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub enum ChangeKind {
/// A crate version was added
Added,
/// A crate version was added or it was unyanked.
Yanked,
}
impl Default for ChangeKind {
fn default() -> Self {
ChangeKind::Added
}
}
impl<'de> serde::Deserialize<'de> for ChangeKind {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ChangeKind;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("boolean")
}
fn visit_bool<E>(self, value: bool) -> Result<ChangeKind, E>
where
E: ::serde::de::Error,
{
if value {
Ok(ChangeKind::Yanked)
} else {
Ok(ChangeKind::Added)
}
}
}
deserializer.deserialize_bool(Visitor)
}
}
impl serde::Serialize for ChangeKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_bool(self == &ChangeKind::Yanked)
}
}
/// Pack all information we know about a change made to a version of a crate.
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
pub struct CrateVersion {
/// The crate name, i.e. `clap`.
pub name: String,
/// The kind of change.
#[serde(rename = "yanked")]
pub kind: ChangeKind,
/// The semantic version of the crate.
#[serde(rename = "vers")]
pub version: String,
/// The checksum over the crate archive
#[serde(rename = "cksum")]
pub checksum: String,
/// All cargo features
pub features: HashMap<String, Vec<String>>,
/// All crate dependencies
#[serde(rename = "deps")]
pub dependencies: Vec<Dependency>,
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub enum ReportResult {
Done,
NotStarted,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum TaskState {
/// The task was never started
NotStarted,
/// The task tried to run, but failed N time with errors
AttemptsWithFailure(Vec<String>),
/// The task completed successfully
Complete,
/// Indicates a task is currently running
/// Please note that this would be unsafe as we don't update tasks in case the user requests
/// a shutdown or the program is killed.
/// Thus we cleanup in-progress tasks by checking if their stored_at time is before the process startup time.
InProgress(Option<Vec<String>>),
}
impl TaskState {
pub fn is_complete(&self) -> bool {
matches!(self, TaskState::Complete)
}
pub fn merge_with(&mut self, other: &TaskState) {
fn merge_vec(mut existing: Vec<String>, new: &[String]) -> Vec<String> {
existing.extend(new.iter().cloned());
existing
}
use TaskState::*;
*self = match (&self, other) {
(AttemptsWithFailure(existing), AttemptsWithFailure(new)) => {
AttemptsWithFailure(merge_vec(existing.clone(), new))
}
(AttemptsWithFailure(existing), InProgress(None)) => InProgress(Some(existing.clone())),
(AttemptsWithFailure(_), InProgress(Some(_))) => {
panic!("One must not create inProgress preloaded with failed attempts, I think :D")
}
(InProgress(Some(existing)), AttemptsWithFailure(other)) => {
AttemptsWithFailure(merge_vec(existing.clone(), other))
}
(_, other) => other.clone(),
};
}
}
impl Default for TaskState {
fn default() -> Self {
TaskState::NotStarted
}
}
/// Information about a task
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Task {
/// This is set automatically, and can be roughly equivalent to the time a task was finished running (no matter if successfully or failed,
/// but is generally equivalent to the last time the task was saved
pub stored_at: SystemTime,
/// Information about the process that we used to run
pub process: String,
/// Information about the process version
pub version: String,
pub state: TaskState,
}
impl Default for Task {
fn default() -> Self {
Task {
stored_at: SystemTime::now(),
process: Default::default(),
version: Default::default(),
state: Default::default(),
}
}
}
impl Task {
// NOTE: Racy if task should be spawned based on the outcome, only for tasks with no contention!
pub fn can_be_started(&self, startup_time: std::time::SystemTime) -> bool {
match self.state {
TaskState::NotStarted | TaskState::AttemptsWithFailure(_) => true,
TaskState::InProgress(_) => startup_time > self.stored_at,
_ => false,
}
}
}
/// Append-variant-only data structure, otherwise migrations are needed
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum TaskResult {
/// A dummy value just so that we can have a default value
None,
/// Most interesting information about an unpacked crate
ExplodedCrate {
/// Meta data of all entries in the crate
entries_meta_data: Vec<TarHeader>,
/// The actual content of selected files, Cargo.*, build.rs and lib/main
/// IMPORTANT: This file may be partial and limited in size unless it is Cargo.toml, which
/// is always complete.
/// Note that these are also present in entries_meta_data.
selected_entries: Vec<(TarHeader, Vec<u8>)>,
},
/// A download with meta data and the downloaded blob itself
Download {
kind: String,
url: String,
content_length: u32,
/// The content type, it's optional because it might not be set (even though it should)
content_type: Option<String>,
},
}
impl Default for TaskResult {
fn default() -> Self {
TaskResult::None
}
}
impl TryFrom<crates_index_diff::Change> for CrateVersion {
type Error = ();
fn try_from(v: crates_index_diff::Change) -> Result<Self, Self::Error> {
let v = match v {
crates_index_diff::Change::CrateDeleted { .. } | crates_index_diff::Change::VersionDeleted(_) => {
// ignore for now
return Err(());
}
crates_index_diff::Change::Unyanked(v)
| crates_index_diff::Change::Added(v)
| crates_index_diff::Change::AddedAndYanked(v)
| crates_index_diff::Change::Yanked(v) => v,
};
let crates_index_diff::CrateVersion {
name,
yanked,
version,
checksum,
features,
dependencies,
} = v;
Ok(CrateVersion {
name: name.to_string(),
kind: yanked.then(|| ChangeKind::Yanked).unwrap_or(ChangeKind::Added),
version: version.to_string(),
checksum: hex::encode(checksum),
features,
dependencies: dependencies.into_iter().map(Into::into).collect(),
})
}
}
pub mod db_dump {
use serde_derive::{Deserialize, Serialize};
use std::time::SystemTime;
pub type Id = u32;
pub type GitHubId = i32;
/// Identifies a kind of actor
#[derive(Clone, Copy, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub enum ActorKind {
User,
Team,
}
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Actor {
/// The id used by crates.io
pub crates_io_id: Id,
/// Whether actor is a user or a team
pub kind: ActorKind,
/// The URL to the GitHub avatar
pub github_avatar_url: String,
/// The ID identifying a user on GitHub
pub github_id: GitHubId,
/// The GitHUb login name
pub github_login: String,
/// The users given name
pub name: Option<String>,
}
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Feature {
/// The name of the feature
pub name: String,
/// The crates the feature depends on
pub crates: Vec<String>,
}
#[derive(Clone, Default, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Person {
pub name: String,
pub email: Option<String>,
}
/// A crate version from the crates-io db dump, containing additional meta data
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct CrateVersion {
/// The size of the crate in bytes, compressed
pub crate_size: Option<u32>,
/// The time when the first crate version was published
pub created_at: SystemTime,
/// The time at which the most recent create version was published
pub updated_at: SystemTime,
/// The amount of downloads of all create version in all time
pub downloads: u32,
/// Features specified in Cargo.toml
pub features: Vec<Feature>,
/// The license type
pub license: String,
/// The semantic version associated with this version
pub semver: String,
/// The actor that published the version
pub published_by: Option<Actor>,
/// If true, the version was yanked
pub is_yanked: bool,
}
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Keyword {
pub name: String,
/// The amount of crates using this keyword
pub crates_count: u32,
}
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Category {
pub name: String,
/// The amount of crates using this keyword
pub crates_count: u32,
pub description: String,
pub path: String,
pub slug: String,
}
/// Everything crates.io knows about a crate in one neat package
#[derive(Clone, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub struct Crate {
pub name: String,
/// The time at which this record was updated, i.e. how recent it is.
pub stored_at: SystemTime,
pub created_at: SystemTime,
pub updated_at: SystemTime,
pub description: Option<String>,
pub documentation: Option<String>,
pub downloads: u64,
pub homepage: Option<String>,
pub readme: Option<String>,
pub repository: Option<String>,
/// Versions, sorted by semantic version
pub versions: Vec<CrateVersion>,
pub keywords: Vec<Keyword>,
pub categories: Vec<Category>,
pub created_by: Option<Actor>,
pub owners: Vec<Actor>,
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/migration.rs | criner/src/migration.rs | use crate::persistence::{TableAccess, TaskResultTable};
use rusqlite::params;
use std::path::Path;
pub fn migrate(db_path: impl AsRef<Path>) -> crate::Result<()> {
log::info!("open db");
let db = crate::persistence::Db::open(&db_path)?;
let mut connection = db.open_connection_no_async_with_busy_wait()?;
let mut keys = Vec::<String>::new();
let table_name = TaskResultTable::table_name();
{
log::info!("begin iteration");
let mut statement = connection.prepare(&format!("SELECT key FROM {}", table_name))?;
let mut rows = statement.query([])?;
while let Some(r) = rows.next()? {
keys.push(r.get(0)?);
}
log::info!("got {} keys", keys.len());
}
{
log::info!("begin change");
let transaction = connection.transaction()?;
let mut statement = transaction.prepare(&format!("UPDATE {} SET key=?1 WHERE key=?2;", table_name))?;
for key in keys.into_iter() {
statement.execute(params![
format!(
"{}",
if key.ends_with(':') {
&key[..key.len() - 1]
} else {
&key[..]
}
),
key
])?;
}
drop(statement);
transaction.commit()?;
}
Ok(())
}
#[allow(dead_code)]
fn migrate_iterate_assets_and_update_db(db_path: impl AsRef<Path>) -> crate::Result<()> {
let assets_dir = db_path.as_ref().join("assets");
let db = crate::persistence::Db::open(&db_path)?;
let results = db.open_results()?;
let task = crate::engine::work::iobound::default_persisted_download_task();
let mut key = String::new();
let root = prodash::Tree::new();
let mut progress = root.add_child("does not matter");
for entry in jwalk::WalkDir::new(assets_dir)
.preload_metadata(true)
.into_iter()
.filter_map(Result::ok)
{
let entry: jwalk::DirEntry = entry;
if entry.file_type.as_ref().ok().map_or(true, |d| d.is_dir()) {
continue;
}
if entry.file_name != std::ffi::OsString::from("download:1.0.0.crate") {
let new_name = entry.path().parent().unwrap().join("download:1.0.0.crate");
std::fs::rename(entry.path(), &new_name)?;
log::warn!("Renamed '{}' to '{}'", entry.path().display(), new_name.display());
}
let file_size = entry.metadata.as_ref().unwrap().as_ref().unwrap().len();
let mut iter = entry.parent_path().iter().skip(3);
let name = iter.next().and_then(|p| p.to_str()).unwrap();
let version = iter.next().and_then(|p| p.to_str()).unwrap();
log::info!("{} {}", name, version);
key.clear();
let task_result = crate::model::TaskResult::Download {
kind: "crate".into(),
url: format!(
"https://crates.io/api/v1/crates/{name}/{version}/download",
name = name,
version = version,
)
.into(),
content_length: file_size as u32,
content_type: Some("application/x-tar".into()),
};
task_result.fq_key(name, version, &task, &mut key);
results.insert(&mut progress, &key, &task_result)?;
}
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/persistence/merge.rs | criner/src/persistence/merge.rs | use crate::model::{self, Context, CrateVersion, Task};
use crate::utils::parse_semver;
pub trait Merge<T> {
fn merge(self, other: &T) -> Self;
}
impl Merge<model::Task> for model::Task {
fn merge(mut self, other: &Task) -> Self {
let my_state = self.state;
self = other.clone();
self.state = my_state.merge(&other.state);
self
}
}
impl Merge<model::TaskState> for model::TaskState {
fn merge(mut self, other: &model::TaskState) -> Self {
fn merge_vec(mut existing: Vec<String>, new: &[String]) -> Vec<String> {
existing.extend(new.iter().cloned());
existing
}
use model::TaskState::*;
self = match (&self, other) {
(AttemptsWithFailure(existing), AttemptsWithFailure(new)) => {
AttemptsWithFailure(merge_vec(existing.clone(), new))
}
(AttemptsWithFailure(existing), InProgress(None)) => InProgress(Some(existing.clone())),
(AttemptsWithFailure(_), InProgress(Some(_))) => {
panic!("One must not create inProgress preloaded with failed attempts, I think :D")
}
(InProgress(Some(existing)), AttemptsWithFailure(other)) => {
AttemptsWithFailure(merge_vec(existing.clone(), other))
}
(_, other) => other.clone(),
};
self
}
}
impl Merge<model::Context> for model::Context {
fn merge(self, other: &Context) -> Self {
self + other
}
}
fn sort_semver(versions: &mut [String]) {
versions.sort_by_key(|v| parse_semver(v));
}
impl Merge<model::CrateVersion> for model::Crate {
fn merge(mut self, other: &CrateVersion) -> Self {
if !self.versions.contains(&other.version) {
self.versions.push(other.version.to_owned());
}
sort_semver(&mut self.versions);
self
}
}
impl model::Crate {
pub fn merge_mut(&mut self, other: &CrateVersion) -> &mut model::Crate {
if !self.versions.contains(&other.version) {
self.versions.push(other.version.to_owned());
}
sort_semver(&mut self.versions);
self
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/persistence/serde.rs | criner/src/persistence/serde.rs | use crate::model::{db_dump, Context, Crate, CrateVersion, ReportResult, Task, TaskResult};
fn expect<T, E: std::fmt::Display>(r: std::result::Result<T, E>, panic_message: impl FnOnce(E) -> String) -> T {
match r {
Ok(v) => v,
Err(e) => std::panic::panic_any(panic_message(e)),
}
}
macro_rules! impl_deserialize {
($ty:ty) => {
impl From<&[u8]> for $ty {
fn from(b: &[u8]) -> Self {
expect(rmp_serde::from_slice(b), |e| {
format!(
concat!("&[u8]: migration should succeed: ", stringify!($ty), "{:#?}: {}"),
rmpv::decode::value::read_value(&mut std::io::Cursor::new(b)).unwrap(),
e
)
})
}
}
};
}
impl_deserialize!(Crate);
impl_deserialize!(Task);
impl_deserialize!(TaskResult);
impl_deserialize!(CrateVersion);
impl_deserialize!(Context);
impl_deserialize!(ReportResult);
impl_deserialize!(db_dump::Crate);
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/persistence/table.rs | criner/src/persistence/table.rs | use crate::persistence::KEY_SEP_CHAR;
use crate::{
model::{Context, Crate, TaskResult},
model::{CrateVersion, Task},
persistence::{merge::Merge, Keyed},
Result,
};
use rusqlite::{params, OptionalExtension};
use std::time::{Duration, SystemTime};
/// Required as we send futures to threads. The type system can't statically prove that in fact
/// these connections will only ever be created while already in the thread they should execute on.
/// Also no one can prevent futures from being resumed in after having been send to a different thread.
pub type ThreadSafeConnection = std::sync::Arc<parking_lot::Mutex<rusqlite::Connection>>;
pub fn new_value_query_recent_first<'conn>(
table_name: &str,
connection: &'conn rusqlite::Connection,
offset: usize,
limit: usize,
) -> Result<rusqlite::Statement<'conn>> {
Ok(connection.prepare(&format!(
"SELECT data FROM {} ORDER BY _rowid_ DESC LIMIT {}, {}",
table_name, offset, limit
))?)
}
pub fn new_key_value_query_old_to_new_filtered<'conn>(
table_name: &str,
glob: Option<&str>,
connection: &'conn rusqlite::Connection,
chunk: Option<(usize, usize)>,
) -> Result<rusqlite::Statement<'conn>> {
Ok(connection.prepare(&format!(
"SELECT key,data FROM {} {} ORDER BY _rowid_ ASC {}",
table_name,
match glob {
Some(glob) => format!("where key glob \"{}\"", glob),
None => "".into(),
},
match chunk {
Some((offset, limit)) => format!("LIMIT {}, {}", offset, limit),
None => "".into(),
}
))?)
}
pub fn new_key_value_query_old_to_new<'conn>(
table_name: &str,
connection: &'conn rusqlite::Connection,
) -> Result<rusqlite::Statement<'conn>> {
new_key_value_query_old_to_new_filtered(table_name, None, connection, None)
}
pub fn new_key_value_insertion<'conn>(
table_name: &str,
connection: &'conn rusqlite::Connection,
) -> Result<rusqlite::Statement<'conn>> {
Ok(connection.prepare(&format!("REPLACE INTO '{}' (key, data) VALUES (?1, ?2)", table_name))?)
}
pub fn new_key_insertion<'conn>(
table_name: &str,
connection: &'conn rusqlite::Connection,
) -> Result<rusqlite::Statement<'conn>> {
Ok(connection.prepare(&format!("REPLACE INTO {} (key) VALUES (?1)", table_name))?)
}
pub fn value_iter<'stm, 'conn, StorageItem>(
statement: &'stm mut rusqlite::Statement<'conn>,
) -> Result<impl Iterator<Item = Result<StorageItem>> + 'stm>
where
StorageItem: for<'a> From<&'a [u8]>,
{
Ok(statement
.query_map([], |r| r.get::<_, Vec<u8>>(0).map(|v| StorageItem::from(v.as_slice())))?
.map(|r| r.map_err(Into::into)))
}
pub fn key_value_iter<'stm, 'conn, StorageItem>(
statement: &'stm mut rusqlite::Statement<'conn>,
) -> Result<impl Iterator<Item = Result<(String, StorageItem)>> + 'stm>
where
StorageItem: for<'a> From<&'a [u8]>,
{
Ok(statement
.query_map([], |r| {
let key = r.get::<_, String>(0)?;
let data = r.get::<_, Vec<u8>>(1)?;
Ok((key, StorageItem::from(data.as_slice())))
})?
.map(|r| r.map_err(Into::into)))
}
pub trait TableAccess {
type StorageItem: serde::Serialize + for<'a> From<&'a [u8]> + Default + From<Self::InsertItem>;
type InsertItem: Clone;
fn connection(&self) -> &ThreadSafeConnection;
fn table_name() -> &'static str;
fn merge(new_item: &Self::InsertItem, _existing_item: Option<Self::StorageItem>) -> Self::StorageItem {
Self::StorageItem::from(new_item.clone())
}
fn into_connection(self) -> ThreadSafeConnection;
fn count(&self) -> u64 {
self.count_filtered(None)
}
fn count_filtered(&self, glob: Option<&str>) -> u64 {
self.connection()
.lock()
.query_row(
&format!(
"SELECT COUNT(*) FROM {} {}",
Self::table_name(),
match glob {
Some(glob) => format!("where key glob \"{}\"", glob),
None => "".into(),
}
),
[],
|r| r.get::<_, i64>(0),
)
.unwrap_or(0) as u64
}
fn get(&self, key: impl AsRef<str>) -> Result<Option<Self::StorageItem>> {
Ok(self
.connection()
.lock()
.query_row(
&format!("SELECT data FROM {} WHERE key = '{}'", Self::table_name(), key.as_ref()),
[],
|r| r.get::<_, Vec<u8>>(0),
)
.optional()?
.map(|d| Self::StorageItem::from(d.as_slice())))
}
/// Update an existing item, or create it as default, returning the stored item
/// f(existing) should merge the items as desired
fn update(
&self,
progress: Option<&mut prodash::tree::Item>,
key: impl AsRef<str>,
f: impl Fn(Self::StorageItem) -> Self::StorageItem,
) -> Result<Self::StorageItem> {
retry_on_db_busy(progress, || {
let mut guard = self.connection().lock();
let transaction = guard.transaction_with_behavior(rusqlite::TransactionBehavior::Immediate)?;
let new_value = transaction
.query_row(
&format!("SELECT data FROM {} WHERE key = '{}'", Self::table_name(), key.as_ref()),
[],
|r| r.get::<_, Vec<u8>>(0),
)
.optional()?
.map_or_else(|| f(Self::StorageItem::default()), |d| f(d.as_slice().into()));
transaction.execute(
&format!("REPLACE INTO {} (key, data) VALUES (?1, ?2)", Self::table_name()),
params![key.as_ref(), rmp_serde::to_vec(&new_value)?],
)?;
transaction.commit()?;
Ok(new_value)
})
}
/// Similar to 'update', but provides full control over the default and allows deletion
fn upsert(
&self,
progress: &mut prodash::tree::Item,
key: impl AsRef<str>,
item: &Self::InsertItem,
) -> Result<Self::StorageItem> {
retry_on_db_busy(Some(progress), || {
let mut guard = self.connection().lock();
let transaction = guard.transaction_with_behavior(rusqlite::TransactionBehavior::Immediate)?;
let new_value = {
let maybe_vec = transaction
.query_row(
&format!("SELECT data FROM {} WHERE key = '{}'", Self::table_name(), key.as_ref()),
[],
|r| r.get::<_, Vec<u8>>(0),
)
.optional()?;
Self::merge(item, maybe_vec.map(|v| v.as_slice().into()))
};
transaction.execute(
&format!("REPLACE INTO {} (key, data) VALUES (?1, ?2)", Self::table_name()),
params![key.as_ref(), rmp_serde::to_vec(&new_value)?],
)?;
transaction.commit()?;
Ok(new_value)
})
}
fn insert(&self, progress: &mut prodash::tree::Item, key: impl AsRef<str>, v: &Self::InsertItem) -> Result<()> {
retry_on_db_busy(Some(progress), || {
self.connection().lock().execute(
&format!("REPLACE INTO {} (key, data) VALUES (?1, ?2)", Self::table_name()),
params![key.as_ref(), rmp_serde::to_vec(&Self::merge(v, None))?],
)?;
Ok(())
})
}
}
fn retry_on_db_busy<T>(mut progress: Option<&mut prodash::tree::Item>, mut f: impl FnMut() -> Result<T>) -> Result<T> {
use crate::Error;
use rusqlite::ffi::Error as SqliteFFIError;
use rusqlite::ffi::ErrorCode as SqliteFFIErrorCode;
use rusqlite::Error as SqliteError;
use std::ops::Add;
let max_wait_ms = Duration::from_secs(100);
let mut total_wait_time = Duration::default();
let mut wait_for = Duration::from_millis(1);
loop {
match f() {
Ok(v) => return Ok(v),
Err(
err @ Error::Rusqlite(SqliteError::SqliteFailure(
SqliteFFIError {
code: SqliteFFIErrorCode::DatabaseBusy,
..
},
_,
)),
) => {
if total_wait_time >= max_wait_ms {
log::warn!("Giving up to wait for {:?} after {:?})", err, total_wait_time);
return Err(err);
}
log::warn!(
"Waiting {:?} for {:?} (total wait time {:?})",
wait_for,
err,
total_wait_time
);
if let Some(p) = progress.as_mut() {
p.blocked("wait for write lock", Some(SystemTime::now().add(wait_for)));
};
std::thread::sleep(wait_for);
total_wait_time += wait_for;
wait_for *= 2;
}
Err(err) => return Err(err),
}
}
}
pub struct TaskTable {
pub(crate) inner: ThreadSafeConnection,
}
impl TableAccess for TaskTable {
type StorageItem = Task;
type InsertItem = Task;
fn connection(&self) -> &ThreadSafeConnection {
&self.inner
}
fn table_name() -> &'static str {
"task"
}
fn merge(new_task: &Self::InsertItem, existing_task: Option<Self::StorageItem>) -> Self::StorageItem {
Task {
stored_at: SystemTime::now(),
..existing_task.map_or_else(|| new_task.clone(), |existing_task| existing_task.merge(new_task))
}
}
fn into_connection(self) -> ThreadSafeConnection {
self.inner
}
}
pub struct ReportsTree {
pub(crate) inner: ThreadSafeConnection,
}
#[allow(dead_code)]
impl ReportsTree {
pub fn table_name() -> &'static str {
"report_done"
}
pub fn key_buf(crate_name: &str, crate_version: &str, report_name: &str, report_version: &str, buf: &mut String) {
buf.push_str(crate_name);
buf.push(KEY_SEP_CHAR);
buf.push_str(crate_version);
buf.push(KEY_SEP_CHAR);
buf.push_str(report_name);
buf.push(KEY_SEP_CHAR);
buf.push_str(report_version);
}
pub fn is_done(&self, key: impl AsRef<str>) -> bool {
self.inner
.lock()
.query_row(
&format!("SELECT key FROM {} where key = '{}'", Self::table_name(), key.as_ref()),
[],
|_r| Ok(()),
)
.optional()
.ok()
.unwrap_or_default()
.map_or(false, |_: ()| true)
}
}
pub struct TaskResultTable {
pub(crate) inner: ThreadSafeConnection,
}
impl TableAccess for TaskResultTable {
type StorageItem = TaskResult;
type InsertItem = TaskResult;
fn connection(&self) -> &ThreadSafeConnection {
&self.inner
}
fn table_name() -> &'static str {
"result"
}
fn into_connection(self) -> ThreadSafeConnection {
self.inner
}
}
pub struct MetaTable {
pub(crate) inner: ThreadSafeConnection,
}
impl TableAccess for MetaTable {
type StorageItem = Context;
type InsertItem = Context;
fn connection(&self) -> &ThreadSafeConnection {
&self.inner
}
fn table_name() -> &'static str {
"meta"
}
fn merge(new: &Context, existing_item: Option<Context>) -> Self::StorageItem {
existing_item.map_or_else(|| new.to_owned(), |existing| existing.merge(new))
}
fn into_connection(self) -> ThreadSafeConnection {
self.inner
}
}
impl MetaTable {
pub fn update_today(&self, f: impl Fn(&mut Context)) -> Result<Context> {
self.update(None, Context::default().key(), |mut c| {
f(&mut c);
c
})
}
// NOTE: impl iterator is not allowed in traits unfortunately, but one could implement one manually
pub fn most_recent(&self) -> Result<Option<(String, Context)>> {
Ok(self
.connection()
.lock()
.query_row("SELECT key, data FROM meta ORDER BY key DESC limit 1", [], |r| {
Ok((r.get::<_, String>(0)?, r.get::<_, Vec<u8>>(1)?))
})
.optional()?
.map(|(k, v)| (k, Context::from(v.as_slice()))))
}
}
#[derive(Clone)]
pub struct CrateTable {
pub(crate) inner: ThreadSafeConnection,
}
impl TableAccess for CrateTable {
type StorageItem = Crate;
type InsertItem = CrateVersion;
fn connection(&self) -> &ThreadSafeConnection {
&self.inner
}
fn table_name() -> &'static str {
"crate"
}
fn merge(new_item: &CrateVersion, existing_item: Option<Crate>) -> Crate {
existing_item.map_or_else(|| Crate::from(new_item.to_owned()), |c| c.merge(new_item))
}
fn into_connection(self) -> ThreadSafeConnection {
self.inner
}
}
#[derive(Clone)]
pub struct CrateVersionTable {
pub(crate) inner: ThreadSafeConnection,
}
impl TableAccess for CrateVersionTable {
type StorageItem = CrateVersion;
type InsertItem = CrateVersion;
fn connection(&self) -> &ThreadSafeConnection {
&self.inner
}
fn table_name() -> &'static str {
"crate_version"
}
fn into_connection(self) -> ThreadSafeConnection {
self.inner
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/persistence/keyed.rs | criner/src/persistence/keyed.rs | use crate::model::{Context, Crate, CrateVersion, Task, TaskResult};
use std::time::SystemTime;
pub const KEY_SEP_CHAR: char = ':';
pub trait Keyed {
fn key_buf(&self, buf: &mut String);
fn key(&self) -> String {
let mut buf = String::with_capacity(16);
self.key_buf(&mut buf);
buf
}
}
impl Keyed for Task {
fn key_buf(&self, buf: &mut String) {
buf.push_str(&self.process);
buf.push(KEY_SEP_CHAR);
buf.push_str(&self.version);
}
}
impl Task {
pub fn fq_key(&self, crate_name: &str, crate_version: &str, buf: &mut String) {
CrateVersion::key_from(crate_name, crate_version, buf);
buf.push(KEY_SEP_CHAR);
self.key_buf(buf);
}
}
impl Keyed for CrateVersion {
fn key_buf(&self, buf: &mut String) {
CrateVersion::key_from(&self.name, &self.version, buf)
}
}
impl Crate {
pub fn key_from_version_buf(v: &CrateVersion, buf: &mut String) {
buf.push_str(&v.name);
}
}
impl Keyed for TaskResult {
fn key_buf(&self, buf: &mut String) {
match self {
TaskResult::Download { kind, .. } => {
buf.push(KEY_SEP_CHAR);
buf.push_str(kind)
}
TaskResult::None | TaskResult::ExplodedCrate { .. } => {}
}
}
}
impl TaskResult {
pub fn fq_key(&self, crate_name: &str, crate_version: &str, task: &Task, buf: &mut String) {
task.fq_key(crate_name, crate_version, buf);
self.key_buf(buf);
}
}
impl Keyed for Context {
fn key_buf(&self, buf: &mut String) {
use std::fmt::Write;
write!(
buf,
"context/{}",
humantime::format_rfc3339(SystemTime::now())
.to_string()
.get(..10)
.expect("YYYY-MM-DD - 10 bytes")
)
.ok();
}
}
impl CrateVersion {
pub fn key_from(name: &str, version: &str, buf: &mut String) {
buf.push_str(name);
buf.push(KEY_SEP_CHAR);
buf.push_str(version);
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/persistence/mod.rs | criner/src/persistence/mod.rs | use crate::Result;
use std::path::{Path, PathBuf};
mod keyed;
mod merge;
pub use keyed::*;
mod serde;
mod table;
pub use table::*;
#[derive(Clone)]
pub struct Db {
sqlite_path: PathBuf,
}
impl Db {
pub fn open(path: impl AsRef<Path>) -> Result<Db> {
std::fs::create_dir_all(&path)?;
let sqlite_path = path.as_ref().join("db.msgpack.sqlite");
{
let mut connection = rusqlite::Connection::open(&sqlite_path)?;
connection.execute_batch("
PRAGMA journal_mode = WAL; -- better write-concurrency
PRAGMA synchronous = NORMAL; -- fsync only in critical moments
PRAGMA wal_autocheckpoint = 1000; -- write WAL changes back every 1000 pages, for an in average 1MB WAL file. May affect readers if number is increased
PRAGMA wal_checkpoint(TRUNCATE); -- free some space by truncating possibly massive WAL files from the last run.
")?;
let transaction = connection.transaction()?;
for name in &["meta", "crate_version", "crate", "task", "result", "crates.io-crate"] {
transaction.execute_batch(&format!(
"CREATE TABLE IF NOT EXISTS '{}' (
key TEXT PRIMARY KEY NOT NULL,
data BLOB NOT NULL
)",
name
))?;
}
transaction.execute_batch(
"CREATE TABLE IF NOT EXISTS report_done (
key TEXT PRIMARY KEY NOT NULL
)",
)?;
transaction.commit()?;
}
Ok(Db { sqlite_path })
}
pub fn open_connection(&self) -> Result<ThreadSafeConnection> {
Ok(std::sync::Arc::new(parking_lot::Mutex::new(
rusqlite::Connection::open(&self.sqlite_path)?,
)))
}
pub fn open_connection_with_busy_wait(&self) -> Result<ThreadSafeConnection> {
let connection = rusqlite::Connection::open(&self.sqlite_path)?;
connection.busy_handler(Some(sleeper))?;
Ok(std::sync::Arc::new(parking_lot::Mutex::new(connection)))
}
pub fn open_connection_no_async_with_busy_wait(&self) -> Result<rusqlite::Connection> {
let connection = rusqlite::Connection::open(&self.sqlite_path)?;
connection.busy_handler(Some(sleeper))?;
Ok(connection)
}
pub fn open_crate_versions(&self) -> Result<CrateVersionTable> {
Ok(CrateVersionTable {
inner: self.open_connection()?,
})
}
pub fn open_crates(&self) -> Result<CrateTable> {
Ok(CrateTable {
inner: self.open_connection()?,
})
}
pub fn open_tasks(&self) -> Result<TaskTable> {
Ok(TaskTable {
inner: self.open_connection()?,
})
}
pub fn open_results(&self) -> Result<TaskResultTable> {
Ok(TaskResultTable {
inner: self.open_connection()?,
})
}
pub fn open_context(&self) -> Result<MetaTable> {
Ok(MetaTable {
inner: self.open_connection()?,
})
}
pub fn open_reports(&self) -> Result<ReportsTree> {
Ok(ReportsTree {
inner: self.open_connection()?,
})
}
}
fn sleeper(attempts: i32) -> bool {
log::warn!("SQLITE_BUSY, retrying after 50ms (attempt {})", attempts);
std::thread::sleep(std::time::Duration::from_millis(50));
true
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/mod.rs | criner/src/export/mod.rs | mod run;
mod to_sql;
pub use run::run_blocking;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/run.rs | criner/src/export/run.rs | use super::to_sql::SqlConvert;
use crate::model;
use rusqlite::Connection;
use std::path::Path;
pub fn run_blocking(source_db: impl AsRef<Path>, destination_db: impl AsRef<Path>) -> crate::Result<()> {
if destination_db.as_ref().is_file() {
return Err(crate::Error::Message(format!(
"Destination database at '{}' does already exist - this is currently unsupported",
destination_db.as_ref().display()
)));
}
let mut input = Connection::open(source_db)?;
let mut output = Connection::open(destination_db)?;
// Turn off keychecks during insertion - we assume we can't get it wrong
// However, we do embed foreign key relations as form of documentation.
output.execute_batch(
"
PRAGMA foreign_keys = FALSE; -- assume we don't mess up relations, save validation time
PRAGMA journal_mode = 'OFF' -- no journal, direct writes
",
)?;
transfer::<model::db_dump::Crate>(&mut input, &mut output)?;
transfer::<model::Crate>(&mut input, &mut output)?;
transfer::<model::Task>(&mut input, &mut output)?;
transfer::<model::Context>(&mut input, &mut output)?;
transfer::<model::CrateVersion>(&mut input, &mut output)?;
transfer::<model::TaskResult>(&mut input, &mut output)?;
Ok(())
}
fn transfer<T>(input: &mut Connection, output: &mut Connection) -> crate::Result<()>
where
for<'a> T: SqlConvert + From<&'a [u8]>,
{
output.execute_batch(T::init_table_statement())?;
let mut istm = input.prepare(&format!("SELECT key, data FROM '{}'", T::source_table_name()))?;
let transaction = output.transaction()?;
let mut count = 0;
let start = std::time::SystemTime::now();
{
if let Some(res) = T::convert_to_sql(&mut istm, &transaction) {
count = res?;
} else {
let mut ostm = transaction.prepare(T::replace_statement())?;
let mut secondary_ostm = match T::secondary_replace_statement() {
Some(s) => Some(transaction.prepare(s)?),
None => None,
};
for (uid, res) in istm
.query_map([], |r| {
let key: String = r.get(0)?;
let value: Vec<u8> = r.get(1)?;
Ok((key, value))
})?
.enumerate()
{
count += 1;
let (key, value) = res?;
let value = T::from(value.as_slice());
value.insert(&key, uid as i32, &mut ostm, secondary_ostm.as_mut())?;
}
}
}
transaction.commit()?;
log::info!(
"Inserted {} {} in {:?}",
count,
T::source_table_name(),
std::time::SystemTime::now().duration_since(start).unwrap()
);
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/krate.rs | criner/src/export/to_sql/krate.rs | use crate::{export::to_sql::SqlConvert, model};
use rusqlite::{params, Statement};
impl SqlConvert for model::Crate {
fn replace_statement() -> &'static str {
"REPLACE INTO crate
(name, version)
VALUES (?1, ?2)"
}
fn source_table_name() -> &'static str {
"crate"
}
fn init_table_statement() -> &'static str {
"CREATE TABLE crate (
name TEXT NOT NULL,
version TEXT NOT NULL,
PRIMARY KEY (name, version)
)"
}
fn insert(
&self,
key: &str,
_uid: i32,
stm: &mut Statement<'_>,
_sstm: Option<&mut rusqlite::Statement<'_>>,
) -> crate::Result<usize> {
let mut tokens = key.split(crate::persistence::KEY_SEP_CHAR);
let name = tokens.next().unwrap();
assert!(tokens.next().is_none());
let Self { versions } = self;
for version in versions.iter() {
stm.execute(params![name, version])?;
}
Ok(versions.len())
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/dbdump_crate.rs | criner/src/export/to_sql/dbdump_crate.rs | use crate::{
export::to_sql::{to_seconds_since_epoch, SqlConvert},
model,
};
use rusqlite::{params, Statement};
impl SqlConvert for model::db_dump::Crate {
fn replace_statement() -> &'static str {
"will not be called"
}
fn source_table_name() -> &'static str {
"crates.io-crate"
}
fn init_table_statement() -> &'static str {
"
BEGIN;
CREATE TABLE 'crates.io-crate_version' (
parent_id INTEGER NOT NULL,
crate_name TEXT NOT NULL,
semver TEXT NOT NULL,
created_at TIMESTAMP NOT NULL,
updated_at TIMESTAMP NOT NULL,
downloads INTEGER NOT NULL,
features JSON NOT NULL, -- Array of Feature objects
license TEXT NOT NULL,
crate_size INTEGER,
published_by INTEGER, -- Github user id as index into crates.io-actor table
is_yanked INTEGER NOT NULL, -- is 1 if this version is yanked
FOREIGN KEY (parent_id) REFERENCES 'crates.io-crate'(_row_id_)
);
CREATE TABLE 'crates.io-actor' (
crates_io_id INTEGER NOT NULL, -- these IDs are not unique, so we can't use it as unique id
kind TEXT NOT NULL,
github_id INTEGER NOT NULL, -- This is a unique id across teams and users
github_avatar_url TEXT NOT NULL,
github_login TEXT NOT NULL,
name TEXT,
PRIMARY KEY (github_id)
);
CREATE TABLE 'crates.io-crate' (
name TEXT NOT NULL,
stored_at TIMESTAMP NOT NULL,
created_at TIMESTAMP NOT NULL,
updated_at TIMESTAMP NOT NULL,
description TEXT,
documentation TEXT,
downloads INTEGER NOT NULL,
homepage TEXT,
readme TEXT,
repository TEXT,
created_by INTEGER, -- Github user id as index into crates.io-actor table
owners JSON NOT NULL, -- Array of github user ids for indexing into the crates.io-actor table
keywords JSON NOT NULL, -- Array of strings, each string being a keyword
categories JSON NOT NULL, -- Array of category objects, providing a wealth of information for each
PRIMARY KEY (name),
FOREIGN KEY (created_by) REFERENCES actor(github_id)
);
COMMIT;
"
}
fn convert_to_sql(
input_statement: &mut rusqlite::Statement,
transaction: &rusqlite::Transaction,
) -> Option<crate::Result<usize>> {
Some(do_it(input_statement, transaction))
}
fn insert(
&self,
_key: &str,
_uid: i32,
_stm: &mut Statement<'_>,
_sstm: Option<&mut rusqlite::Statement<'_>>,
) -> crate::Result<usize> {
unimplemented!("we implement convert_to_sql instead (having our own loop and unlimited prepared statements")
}
}
fn do_it(input_statement: &mut rusqlite::Statement, transaction: &rusqlite::Transaction) -> crate::Result<usize> {
let mut insert_crate = transaction
.prepare("
REPLACE INTO 'crates.io-crate'
(name, stored_at, created_at, updated_at, description, documentation, downloads, homepage, readme, repository, created_by, owners, keywords, categories)
VALUES (?1 , ?2 , ?3 , ?4 , ?5 , ?6 , ?7 , ?8 , ?9 , ?10 , ?11 , ?12 , ?13 , ?14);
",)
.unwrap();
let mut insert_actor = transaction
.prepare(
"
INSERT OR IGNORE INTO 'crates.io-actor'
(crates_io_id, kind, github_id, github_avatar_url, github_login, name)
VALUES (?1 , ?2 , ?3 , ?4 , ?5 , ?6 );
",
)
.unwrap();
let mut insert_crate_version = transaction
.prepare(
"
INSERT OR IGNORE INTO 'crates.io-crate_version'
(parent_id, crate_name, semver, created_at, updated_at, downloads, features, license, crate_size, published_by, is_yanked)
VALUES (?1 , ?2 , ?3 , ?4 , ?5 , ?6 , ?7 , ?8 , ?9 , ?10 , , ?11);
",
)
.unwrap();
let mut count = 0;
for res in input_statement.query_map([], |r| {
let key: String = r.get(0)?;
let value: Vec<u8> = r.get(1)?;
Ok((key, value))
})? {
let (_crate_name, bytes) = res?;
let model::db_dump::Crate {
name,
stored_at,
created_at,
updated_at,
description,
documentation,
downloads,
homepage,
readme,
repository,
versions,
keywords,
categories,
created_by,
owners,
} = bytes.as_slice().into();
if let Some(actor) = created_by.as_ref() {
insert_actor_to_db(&mut insert_actor, actor)?;
}
for owner in owners.iter() {
insert_actor_to_db(&mut insert_actor, owner)?;
}
count += insert_crate.execute(params![
name,
to_seconds_since_epoch(stored_at),
to_seconds_since_epoch(created_at),
to_seconds_since_epoch(updated_at),
description,
documentation,
downloads as i64,
homepage,
readme,
repository,
created_by.map(|actor| actor.github_id),
serde_json::to_string_pretty(&owners.iter().map(|actor| actor.github_id).collect::<Vec<_>>()).unwrap(),
serde_json::to_string_pretty(&keywords).unwrap(),
serde_json::to_string_pretty(&categories).unwrap(),
])?;
for version in versions {
let model::db_dump::CrateVersion {
crate_size,
created_at,
updated_at,
downloads,
features,
license,
semver,
published_by,
is_yanked,
} = version;
insert_crate_version.execute(params![
count as i32,
name,
semver,
to_seconds_since_epoch(created_at),
to_seconds_since_epoch(updated_at),
downloads as i64,
serde_json::to_string_pretty(&features).unwrap(),
license,
crate_size,
published_by.map(|a| a.github_id),
is_yanked
])?;
}
}
Ok(count)
}
fn insert_actor_to_db(insert_actor: &mut Statement, actor: &model::db_dump::Actor) -> rusqlite::Result<usize> {
insert_actor.execute(params![
actor.crates_io_id,
match actor.kind {
model::db_dump::ActorKind::User => "user",
model::db_dump::ActorKind::Team => "team",
},
actor.github_id,
actor.github_avatar_url,
actor.github_login,
actor.name
])
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/krate_version.rs | criner/src/export/to_sql/krate_version.rs | use crate::{export::to_sql::SqlConvert, model};
use rusqlite::{params, Statement};
impl SqlConvert for model::CrateVersion {
fn replace_statement() -> &'static str {
"REPLACE INTO crate_version
(id, name, version, kind, checksum, features)
VALUES (?1, ?2 , ?3 , ?4 , ?5 , ?6);
"
}
fn secondary_replace_statement() -> Option<&'static str> {
Some(
"REPLACE INTO crate_version_dependency
(parent_id, name, required_version, features, optional, default_features, target, kind, package)
VALUES (?1 , ?2 , ?3 , ?4 , ?5 , ?6 , ?7 , ?8 , ?9);",
)
}
fn source_table_name() -> &'static str {
"crate_version"
}
fn init_table_statement() -> &'static str {
"CREATE TABLE crate_version (
id INTEGER UNIQUE NOT NULL,
name TEXT NOT NULL,
version TEXT NOT NULL,
kind TEXT NOT NULL,
checksum TEXT NOT NULL,
features JSON NOT NULL,
PRIMARY KEY (name, version)
);
CREATE TABLE crate_version_dependency (
parent_id INTEGER NOT NULL,
name TEXT NOT NULL,
required_version TEXT NOT NULL,
features JSON NOT NULL,
optional INTEGER NOT NULL, -- BOOL
default_features INTEGER NOT NULL, -- BOOL
target TEXT,
kind TEXT,
package TEXT,
FOREIGN KEY (parent_id) REFERENCES crate_version(id)
);
"
}
fn insert(
&self,
_key: &str,
uid: i32,
stm: &mut Statement<'_>,
sstm: Option<&mut Statement<'_>>,
) -> crate::Result<usize> {
let model::CrateVersion {
name,
kind,
version,
checksum,
features,
dependencies,
} = self;
use crate::model::ChangeKind::*;
stm.execute(params![
uid,
name,
version,
match kind {
Added => "added",
Yanked => "yanked",
},
checksum,
serde_json::to_string_pretty(features).unwrap()
])?;
let sstm = sstm.expect("secondary statement to be set");
for dep in dependencies {
let model::Dependency {
name,
required_version,
features,
optional,
default_features,
target,
kind,
package,
} = dep;
sstm.execute(params![
uid,
name,
required_version,
serde_json::to_string_pretty(features).unwrap(),
optional,
default_features,
target,
kind,
package
])?;
}
Ok(1)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/result.rs | criner/src/export/to_sql/result.rs | use crate::export::to_sql::SqlConvert;
use crate::model;
use rusqlite::{params, Statement};
impl SqlConvert for model::TaskResult {
fn convert_to_sql(
istm: &mut rusqlite::Statement,
transaction: &rusqlite::Transaction,
) -> Option<crate::Result<usize>> {
let res = (|| {
let mut num_downloads = 0;
let mut num_extract_crates = 0;
let mut num_crate_entries = 0;
let mut insert_download = transaction
.prepare(
"
REPLACE INTO result_download
(crate_name, crate_version, version, kind, url, content_length, content_type)
VALUES (?1 , ?2 , ?3 , ?4 , ?5 , ?6 , ?7);
",
)
.unwrap();
let mut insert_extract_crate = transaction
.prepare(
"
REPLACE INTO result_extract_crate
(id, crate_name, crate_version, version, num_crate_entries)
VALUES (?1, ?2 , ?3 , ?4 , ?5);
",
)
.unwrap();
let mut insert_crate_entry = transaction
.prepare(
"
REPLACE INTO crate_entry
(parent_id, path, size, entry_type, data)
VALUES (?1 , ?2 , ?3 , ?4 , ?5);
",
)
.unwrap();
for res in istm.query_map([], |r| {
let key: String = r.get(0)?;
let value: Vec<u8> = r.get(1)?;
Ok((key, value))
})? {
let (key, value) = res?;
let mut tokens = key.split(crate::persistence::KEY_SEP_CHAR);
let crate_name = tokens.next().unwrap();
let crate_version = tokens.next().unwrap();
let process = tokens.next().unwrap();
let process_version = tokens.next().unwrap();
let optional_last_key = tokens.next();
assert!(tokens.next().is_none());
let value = Self::from(value.as_slice());
use model::TaskResult;
match value {
TaskResult::Download {
kind,
url,
content_length,
content_type,
} => {
assert_eq!(process, "download");
assert_eq!(Some(kind.as_ref()), optional_last_key);
insert_download.execute(params![
crate_name,
crate_version,
process_version,
kind,
url,
content_length,
content_type
])?;
num_downloads += 1;
}
TaskResult::ExplodedCrate {
entries_meta_data,
selected_entries,
} => {
assert_eq!(process, "extract_crate");
let id = num_extract_crates as i32;
insert_extract_crate.execute(params![
id,
crate_name,
crate_version,
process_version,
entries_meta_data.len() as i64
])?;
for entry in entries_meta_data.iter() {
let model::TarHeader { path, size, entry_type } = entry;
insert_crate_entry.execute(params![
id,
std::str::from_utf8(path).expect("utf8 path in crate - lets see how long this is true"),
*size as i64,
entry_type,
rusqlite::types::Null
])?;
num_crate_entries += 1;
}
for (entry, data) in selected_entries.iter() {
let model::TarHeader { path, size, entry_type } = entry;
insert_crate_entry.execute(params![
id,
std::str::from_utf8(path).expect("utf8 path in crate - lets see how long this is true"),
*size as i64,
entry_type,
data
])?;
num_crate_entries += 1;
}
num_extract_crates += 1;
}
TaskResult::None => {}
};
}
Ok(num_downloads + num_extract_crates + num_crate_entries)
})();
Some(res)
}
fn replace_statement() -> &'static str {
"will not be called"
}
fn source_table_name() -> &'static str {
"result"
}
fn init_table_statement() -> &'static str {
"
BEGIN;
CREATE TABLE result_download (
crate_name TEXT NOT NULL,
crate_version TEXT NOT NULL,
version TEXT NOT NULL, -- version of the process that created the result
kind TEXT NOT NULL,
url TEXT NOT NULL,
content_length INTEGER NOT NULL,
content_type TEXT,
PRIMARY KEY (crate_name, crate_version, version, kind)
);
CREATE TABLE result_extract_crate (
id INTEGER UNIQUE NOT NULL,
crate_name TEXT NOT NULL,
crate_version TEXT NOT NULL,
version TEXT NOT NULL, -- version of the process that created the result
num_crate_entries INTEGER NOT NULL,
PRIMARY KEY (crate_name, crate_version, version)
);
CREATE TABLE crate_entry (
parent_id INTEGER NOT NULL,
path TEXT NOT NULL,
size INTEGER NOT NULL, -- size in bytes
entry_type INTEGER NOT NULL, -- tar::EntryType
data BLOB, -- optionally with entire content
PRIMARY KEY (parent_id, path),
FOREIGN KEY (parent_id) REFERENCES result_extract_crate(id)
);
COMMIT;
"
}
fn insert(
&self,
_key: &str,
_uid: i32,
_stm: &mut Statement<'_>,
_sstm: Option<&mut Statement<'_>>,
) -> crate::Result<usize> {
unimplemented!("we implement convert_to_sql instead (having our own loop and unlimited prepared statements")
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/mod.rs | criner/src/export/to_sql/mod.rs | mod dbdump_crate;
mod krate;
mod krate_version;
mod meta;
mod result;
mod task;
pub fn to_seconds_since_epoch(time: std::time::SystemTime) -> i64 {
time.duration_since(std::time::UNIX_EPOCH).unwrap().as_secs() as i64
}
pub trait SqlConvert {
fn convert_to_sql(
_input_statement: &mut rusqlite::Statement,
_transaction: &rusqlite::Transaction,
) -> Option<crate::Result<usize>> {
None
}
fn replace_statement() -> &'static str;
fn secondary_replace_statement() -> Option<&'static str> {
None
}
fn source_table_name() -> &'static str;
fn init_table_statement() -> &'static str;
fn insert(
&self,
key: &str,
uid: i32,
stm: &mut rusqlite::Statement,
sstm: Option<&mut rusqlite::Statement>,
) -> crate::Result<usize>;
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/meta.rs | criner/src/export/to_sql/meta.rs | use crate::export::to_sql::SqlConvert;
use crate::model;
use rusqlite::{params, Statement};
impl SqlConvert for model::Context {
fn replace_statement() -> &'static str {
"INSERT INTO runtime_statistic
(sample_day, num_new_crate_versions, num_new_crates, dur_s_fetch_new_crate_versions)
VALUES (?1 , ?2 , ?3 , ?4);
"
}
fn source_table_name() -> &'static str {
"meta"
}
fn init_table_statement() -> &'static str {
"CREATE TABLE runtime_statistic (
sample_day TIMESTAMP NOT NULL,
num_new_crate_versions INTEGER NOT NULL,
num_new_crates INTEGER NOT NULL,
dur_s_fetch_new_crate_versions INTEGER NOT NULL,
PRIMARY KEY (sample_day)
);
"
}
fn insert(
&self,
key: &str,
_uid: i32,
stm: &mut Statement<'_>,
_sstm: Option<&mut Statement<'_>>,
) -> crate::Result<usize> {
let mut tokens = key.split('/').skip(1);
let day_date = tokens.next().unwrap();
assert!(tokens.next().is_none());
assert_eq!(day_date.len(), 10);
let day_date = humantime::parse_rfc3339(&format!("{}T00:00:00Z", day_date)).unwrap();
let date_stamp = day_date.duration_since(std::time::UNIX_EPOCH).unwrap();
let model::Context {
counts: model::Counts { crate_versions, crates },
durations: model::Durations { fetch_crate_versions },
} = self;
stm.execute(params![
date_stamp.as_secs() as i64,
*crate_versions as i64,
*crates as i64,
fetch_crate_versions.as_secs() as i64
])
.map_err(Into::into)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/export/to_sql/task.rs | criner/src/export/to_sql/task.rs | use crate::{
export::to_sql::{to_seconds_since_epoch, SqlConvert},
model,
};
use rusqlite::{params, Statement};
impl SqlConvert for model::Task {
fn replace_statement() -> &'static str {
"REPLACE INTO task
(id, key, process, version, stored_at, state)
VALUES (?1, ?2, ?3, ?4, ?5, ?6); "
}
fn secondary_replace_statement() -> Option<&'static str> {
Some(
"REPLACE INTO task_error
(parent_id, error)
VALUES (?1 , ?2);",
)
}
fn source_table_name() -> &'static str {
"task"
}
fn init_table_statement() -> &'static str {
"BEGIN;
CREATE TABLE task (
id INTEGER UNIQUE NOT NULL,
key TEXT NOT NULL,
process TEXT NOT NULL,
version TEXT NOT NULL,
stored_at TIMESTAMP NOT NULL,
state TEXT NOT NULL,
PRIMARY KEY (key)
);
CREATE TABLE task_error (
parent_id INTEGER NOT NULL,
error TEXT NOT NULL,
FOREIGN KEY (parent_id) REFERENCES task(id)
);
COMMIT;"
}
fn insert(
&self,
key: &str,
uid: i32,
stm: &mut Statement<'_>,
sstm: Option<&mut rusqlite::Statement<'_>>,
) -> crate::Result<usize> {
use model::TaskState::*;
let Self {
stored_at,
process,
version,
state,
} = self;
stm.execute(params![
uid,
key,
process,
version,
to_seconds_since_epoch(*stored_at),
match state {
NotStarted => "NotStarted",
Complete => "Complete",
InProgress(_) => "InProgress",
AttemptsWithFailure(_) => "AttemptsWithFailure",
},
])?;
match state {
InProgress(Some(errors)) | AttemptsWithFailure(errors) => {
let sstm = sstm.ok_or(crate::Error::Bug("need secondary statement"))?;
for error in errors.iter() {
sstm.execute(params![uid, error])?;
}
}
_ => {}
}
Ok(1)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/mod.rs | criner/src/engine/mod.rs | pub mod report;
pub mod stage;
pub mod work;
pub mod run;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/run.rs | criner/src/engine/run.rs | use crate::{engine::stage, error::Result, model, persistence::Db, utils::*};
use futures_util::{
future::{Either, FutureExt},
stream::StreamExt,
};
use log::{info, warn};
use prodash::render::tui::{Event, Line};
use std::sync::Arc;
use std::{
path::{Path, PathBuf},
time::{Duration, SystemTime},
};
pub struct StageRunSettings {
/// Wait for the given duration after the stage ran
pub every: Duration,
/// If None, run the stage indefinitely. Otherwise run it the given amount of times. Some(0) disables the stage.
pub at_most: Option<usize>,
}
/// Like `StageRunSettings`, but also provides a glob pattern
pub struct GlobStageRunSettings {
pub glob: Option<String>,
pub run: StageRunSettings,
}
#[allow(clippy::too_many_arguments)]
/// Runs the statistics and mining engine.
/// May run for a long time unless a deadline is specified.
/// Even though timeouts can be achieved from outside of the future, knowing the deadline may be used
/// by the engine to manage its time even more efficiently.
pub async fn non_blocking(
db: Db,
crates_io_path: PathBuf,
deadline: Option<SystemTime>,
progress: Arc<prodash::tree::Root>,
io_bound_processors: u32,
cpu_bound_processors: u32,
cpu_o_bound_processors: u32,
interrupt_control: InterruptControlEvents,
db_download: bool,
fetch_settings: StageRunSettings,
process_settings: StageRunSettings,
report_settings: GlobStageRunSettings,
download_crates_io_database_every_24_hours_starting_at: Option<time::Time>,
assets_dir: PathBuf,
) -> Result<()> {
check(deadline)?;
let startup_time = SystemTime::now();
let db_download_handle = db_download.then(|| {
crate::spawn(repeat_daily_at(
download_crates_io_database_every_24_hours_starting_at,
{
let p = progress.clone();
move || p.add_child("Crates.io DB Digest")
},
deadline,
{
let db = db.clone();
let assets_dir = assets_dir.clone();
let progress = progress.clone();
move || {
stage::db_download::schedule(
db.clone(),
assets_dir.clone(),
progress.add_child("fetching crates-io db"),
startup_time,
)
}
},
))
});
let run = fetch_settings;
let fetch_handle = crate::spawn(repeat_every_s(
run.every.as_secs() as usize,
{
let p = progress.clone();
move || p.add_child("Fetch Timer")
},
deadline,
run.at_most,
{
let db = db.clone();
let progress = progress.clone();
move || {
stage::changes::fetch(
crates_io_path.clone(),
db.clone(),
progress.add_child("crates.io refresh"),
deadline,
)
}
},
));
let stage = process_settings;
let processing_handle = crate::spawn(repeat_every_s(
stage.every.as_secs() as usize,
{
let p = progress.clone();
move || p.add_child("Processing Timer")
},
deadline,
stage.at_most,
{
let progress = progress.clone();
let db = db.clone();
let assets_dir = assets_dir.clone();
move || {
stage::processing::process(
db.clone(),
progress.add_child("Process Crate Versions"),
io_bound_processors,
cpu_bound_processors,
progress.add_child("Downloads"),
assets_dir.clone(),
startup_time,
)
}
},
));
let stage = report_settings;
let report_handle = crate::spawn(repeat_every_s(
stage.run.every.as_secs() as usize,
{
let p = progress.clone();
move || p.add_child("Reporting Timer")
},
deadline,
stage.run.at_most,
{
move || {
let progress = progress.clone();
let db = db.clone();
let assets_dir = assets_dir.clone();
let glob = stage.glob.clone();
let interrupt_control = interrupt_control.clone();
async move {
let ctrl = interrupt_control;
ctrl.send(Interruptible::Deferred).await.ok(); // there might be no TUI
let res = stage::report::generate(
db.clone(),
progress.add_child("Reports"),
assets_dir.clone(),
glob.clone(),
deadline,
cpu_o_bound_processors,
)
.await;
ctrl.send(Interruptible::Instantly).await.ok(); // there might be no TUI
res
}
}
},
));
fetch_handle.await?;
if let Some(handle) = db_download_handle {
handle.await?
};
report_handle.await?;
processing_handle.await
}
pub enum Interruptible {
Instantly,
Deferred,
}
pub type InterruptControlEvents = async_channel::Sender<Interruptible>;
impl From<Interruptible> for prodash::render::tui::Event {
fn from(v: Interruptible) -> Self {
match v {
Interruptible::Instantly => Event::SetInterruptMode(prodash::render::tui::Interrupt::Instantly),
Interruptible::Deferred => Event::SetInterruptMode(prodash::render::tui::Interrupt::Deferred),
}
}
}
#[allow(clippy::too_many_arguments)]
/// For convenience, run the engine and block until done.
pub fn blocking(
db: impl AsRef<Path>,
crates_io_path: impl AsRef<Path>,
deadline: Option<SystemTime>,
io_bound_processors: u32,
cpu_bound_processors: u32,
cpu_o_bound_processors: u32,
db_download: bool,
fetch_settings: StageRunSettings,
process_settings: StageRunSettings,
report_settings: GlobStageRunSettings,
download_crates_io_database_every_24_hours_starting_at: Option<time::Time>,
root: Arc<prodash::tree::Root>,
gui: Option<prodash::render::tui::Options>,
) -> Result<()> {
let start_of_computation = SystemTime::now();
let assets_dir = db.as_ref().join("assets");
let db = Db::open(db)?;
std::fs::create_dir_all(&assets_dir)?;
let (interrupt_control_sink, interrupt_control_stream) = async_channel::bounded::<Interruptible>(1);
// dropping the work handle will stop (non-blocking) futures
let work_handle = non_blocking(
db.clone(),
crates_io_path.as_ref().into(),
deadline,
root.clone(),
io_bound_processors,
cpu_bound_processors,
cpu_o_bound_processors,
interrupt_control_sink,
db_download,
fetch_settings,
process_settings,
report_settings,
download_crates_io_database_every_24_hours_starting_at,
assets_dir,
);
match gui {
Some(gui_options) => {
let gui = crate::spawn(prodash::render::tui::render_with_input(
std::io::stdout(),
Arc::downgrade(&root),
gui_options,
futures_util::stream::select(
context_stream(&db, start_of_computation),
interrupt_control_stream.map(Event::from),
),
)?);
let either = futures_lite::future::block_on(futures_util::future::select(
handle_ctrl_c_and_sigterm(work_handle.boxed_local()).boxed_local(),
gui,
));
match either {
Either::Left((work_result, gui)) => {
futures_lite::future::block_on(gui.cancel());
if let Err(e) = work_result? {
warn!("work processor failed: {}", e);
}
}
Either::Right((_, _work_handle)) => {}
}
}
None => {
drop(interrupt_control_stream);
let work_result = futures_lite::future::block_on(handle_ctrl_c_and_sigterm(work_handle.boxed_local()));
if let Err(e) = work_result {
warn!("work processor failed: {}", e);
}
}
};
// at this point, we forget all currently running computation, and since it's in the local thread, it's all
// destroyed/dropped properly.
info!("{}", wallclock(start_of_computation));
Ok(())
}
fn wallclock(since: SystemTime) -> String {
format!(
"Wallclock elapsed: {}",
humantime::format_duration(SystemTime::now().duration_since(since).unwrap_or_default())
)
}
fn context_stream(db: &Db, start_of_computation: SystemTime) -> impl futures_util::stream::Stream<Item = Event> {
prodash::render::tui::ticker(Duration::from_secs(1)).map({
let db = db.clone();
move |_| {
db.open_context()
.ok()
.and_then(|c| c.most_recent().ok())
.flatten()
.map(|(_, c): (_, model::Context)| {
let lines = vec![
Line::Text(wallclock(start_of_computation)),
Line::Title("Durations".into()),
Line::Text(format!("fetch-crate-versions: {:?}", c.durations.fetch_crate_versions)),
Line::Title("Counts".into()),
Line::Text(format!("crate-versions: {}", c.counts.crate_versions)),
Line::Text(format!(" crates: {}", c.counts.crates)),
];
Event::SetInformation(lines)
})
.unwrap_or(Event::Tick)
}
})
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/work/schedule.rs | criner/src/engine/work/schedule.rs | use crate::{
engine::{work::cpubound, work::iobound},
error::Result,
model, persistence,
persistence::{TableAccess, TaskTable},
};
use std::{
path::{Path, PathBuf},
time::SystemTime,
};
const MAX_ATTEMPTS_BEFORE_WE_GIVE_UP: usize = 8;
#[derive(Clone, Copy)]
pub enum Scheduling {
// /// Considers work done if everything was done. Will block to assure that
// All,
/// Considers the work done if at least one task was scheduled. Will block to wait otherwise.
AtLeastOne,
// /// Prefer to never wait for workers to perform a task and instead return without having scheduled anything
// NeverBlock,
}
pub enum AsyncResult {
// /// The required scheduling cannot be fulfilled without blocking
// WouldBlock,
/// The minimal scheduling requirement was met
Done,
}
#[allow(clippy::too_many_arguments)]
pub async fn tasks(
assets_dir: &Path,
tasks: &persistence::TaskTable,
krate: &model::CrateVersion,
progress: &mut prodash::tree::Item,
_mode: Scheduling,
perform_io: &async_channel::Sender<iobound::DownloadRequest>,
perform_cpu: &async_channel::Sender<cpubound::ExtractRequest>,
startup_time: SystemTime,
) -> Result<AsyncResult> {
use SubmitResult::*;
let mut key_buf = String::with_capacity(32);
let io_task = task_or_default(tasks, &mut key_buf, krate, iobound::default_persisted_download_task)?;
let kind = "crate";
let submit_result = submit_single(startup_time, io_task, progress, perform_io, 1, 1, || {
let dummy_task = iobound::default_persisted_download_task();
let mut task_key = String::new();
dummy_task.fq_key(&krate.name, &krate.version, &mut task_key);
iobound::DownloadRequest {
output_file_path: download_file_path(
assets_dir,
&krate.name,
&krate.version,
&dummy_task.process,
&dummy_task.version,
kind,
),
progress_name: format!("{}:{}", krate.name, krate.version),
task_key,
crate_name_and_version: Some((krate.name.clone(), krate.version.clone())),
kind,
url: format!(
"https://static.crates.io/crates/{name}/{name}-{version}.crate",
name = krate.name,
version = krate.version
),
}
})
.await;
Ok(match submit_result {
PermanentFailure | Submitted => AsyncResult::Done,
Done(download_crate_task) => {
let cpu_task = task_or_default(tasks, &mut key_buf, krate, cpubound::default_persisted_extraction_task)?;
submit_single(startup_time, cpu_task, progress, perform_cpu, 2, 2, || {
cpubound::ExtractRequest {
download_task: download_crate_task,
crate_name: krate.name.clone(),
crate_version: krate.version.clone(),
}
})
.await;
AsyncResult::Done
}
})
}
fn task_or_default(
tasks: &TaskTable,
key_buf: &mut String,
crate_version: &model::CrateVersion,
make_task: impl FnOnce() -> model::Task,
) -> Result<model::Task> {
let task = make_task();
key_buf.clear();
task.fq_key(&crate_version.name, &crate_version.version, key_buf);
Ok(tasks.get(key_buf)?.unwrap_or(task))
}
enum SubmitResult {
Submitted,
Done(model::Task),
PermanentFailure,
}
async fn submit_single<R>(
startup_time: SystemTime,
task: model::Task,
progress: &mut prodash::tree::Item,
channel: &async_channel::Sender<R>,
step: usize,
max_step: usize,
f: impl FnOnce() -> R,
) -> SubmitResult {
use model::TaskState::*;
use SubmitResult::*;
let configure = || {
progress.init(Some(step), Some("task".into()));
progress.set(max_step);
progress.blocked("wait for consumer", None);
};
match task.state {
InProgress(_) => {
if startup_time > task.stored_at {
configure();
channel.send(f()).await.unwrap();
};
Submitted
}
NotStarted => {
configure();
channel.send(f()).await.unwrap();
Submitted
}
AttemptsWithFailure(ref v) if v.len() < MAX_ATTEMPTS_BEFORE_WE_GIVE_UP => {
configure();
progress.info(format!("Retrying task, attempt {}", v.len() + 1));
channel.send(f()).await.unwrap();
Submitted
}
AttemptsWithFailure(_) => PermanentFailure,
Complete => Done(task),
}
}
fn crate_dir(assets_dir: &Path, crate_name: &str) -> PathBuf {
// we can safely assume ascii here - otherwise we panic
let crate_path = match crate_name.len() {
1 => Path::new("1").join(crate_name),
2 => Path::new("2").join(crate_name),
3 => Path::new("3").join(&crate_name[..1]).join(&crate_name[1..]),
_ => Path::new(&crate_name[..2]).join(&crate_name[2..4]).join(crate_name),
};
assets_dir.join(crate_path)
}
pub fn download_file_path(
assets_dir: &Path,
crate_name: &str,
crate_version: &str,
process: &str,
version: &str,
kind: &str,
) -> PathBuf {
crate_dir(assets_dir, crate_name).join(format!(
"{crate_version}-{process}{sep}{version}.{kind}",
process = process,
sep = crate::persistence::KEY_SEP_CHAR,
version = version,
kind = kind,
crate_version = crate_version
))
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/work/iobound.rs | criner/src/engine/work/iobound.rs | use crate::{
model,
persistence::{self, TableAccess},
Error, Result,
};
use bytesize::ByteSize;
use futures_lite::{io::AsyncWriteExt, FutureExt};
use crate::utils::timeout_after;
use async_trait::async_trait;
use std::{
path::{Path, PathBuf},
time::{Duration, SystemTime},
};
const CONNECT_AND_FETCH_HEAD_TIMEOUT: Duration = Duration::from_secs(15);
const FETCH_CHUNK_TIMEOUT_SECONDS: Duration = Duration::from_secs(10);
struct ProcessingState {
url: String,
kind: &'static str,
output_file_path: PathBuf,
result_key: Option<String>,
}
pub struct Agent<Fn, FnResult> {
client: reqwest::Client,
results: persistence::TaskResultTable,
channel: async_channel::Sender<FnResult>,
state: Option<ProcessingState>,
make_state: Fn,
next_action_state: Option<FnResult>,
}
impl<Fn, FnResult> Agent<Fn, FnResult>
where
Fn: FnMut(Option<(String, String)>, &model::Task, &Path) -> Option<FnResult>,
{
pub fn new(
db: &persistence::Db,
channel: async_channel::Sender<FnResult>,
make_state: Fn,
) -> Result<Agent<Fn, FnResult>> {
let client = reqwest::ClientBuilder::new().gzip(true).build()?;
let results = db.open_results()?;
Ok(Agent {
client,
results,
channel,
state: None,
next_action_state: None,
make_state,
})
}
}
#[async_trait]
impl<Fn, FnResult> crate::engine::work::generic::Processor for Agent<Fn, FnResult>
where
Fn: FnMut(Option<(String, String)>, &model::Task, &Path) -> Option<FnResult> + Send,
FnResult: Send,
{
type Item = DownloadRequest;
fn set(
&mut self,
request: Self::Item,
progress: &mut prodash::tree::Item,
) -> Result<(model::Task, String, String)> {
progress.init(None, None);
let DownloadRequest {
output_file_path,
progress_name,
task_key,
crate_name_and_version,
kind,
url,
} = request;
let dummy_task = default_persisted_download_task();
let progress_name = format!("↓ {}", progress_name);
let task_result = model::TaskResult::Download {
kind: kind.to_owned(),
url: String::new(),
content_length: 0,
content_type: None,
};
self.next_action_state = (self.make_state)(crate_name_and_version.clone(), &dummy_task, &output_file_path);
self.state = Some(ProcessingState {
url,
kind,
output_file_path,
result_key: crate_name_and_version.as_ref().map(|(crate_name, crate_version)| {
let mut result_key = String::with_capacity(task_key.len() * 2);
task_result.fq_key(crate_name, crate_version, &dummy_task, &mut result_key);
result_key
}),
});
Ok((dummy_task, task_key, progress_name))
}
fn idle_message(&self) -> String {
"↓ IDLE".into()
}
async fn process(&mut self, progress: &mut prodash::tree::Item) -> std::result::Result<(), (Error, String)> {
let ProcessingState {
url,
kind,
output_file_path,
result_key,
} = self.state.take().expect("initialized state");
download_file_and_store_result(
progress,
result_key,
&self.results,
&self.client,
kind,
&url,
output_file_path,
)
.await
.map_err(|err| (err, format!("Failed to download '{}'", url)))
}
async fn schedule_next(&mut self, progress: &mut prodash::tree::Item) -> Result<()> {
if let Some(request) = self.next_action_state.take() {
progress.blocked("schedule crate extraction", None);
// Here we risk doing this work twice, but most of the time, we don't. And since it's fast,
// we take the risk of duplicate work for keeping more processors busy.
// NOTE: We assume there is no risk of double-scheduling, also we assume the consumer is faster
// then the producer (us), so we are ok with blocking until the task is scheduled.
self.channel
.send(request)
.await
.map_err(Error::send_msg("IO Bound: Schedule next task"))?;
}
Ok(())
}
}
#[derive(Clone)]
pub struct DownloadRequest {
pub output_file_path: PathBuf,
pub progress_name: String,
pub task_key: String,
pub crate_name_and_version: Option<(String, String)>,
pub kind: &'static str,
pub url: String,
}
pub fn default_persisted_download_task() -> model::Task {
const TASK_NAME: &str = "download";
const TASK_VERSION: &str = "1.0.0";
model::Task {
stored_at: SystemTime::now(),
process: TASK_NAME.into(),
version: TASK_VERSION.into(),
state: Default::default(),
}
}
async fn download_file_and_store_result(
progress: &mut prodash::tree::Item,
result_key: Option<String>,
results: &persistence::TaskResultTable,
client: &reqwest::Client,
kind: &str,
url: &str,
out_file: PathBuf,
) -> Result<()> {
blocking::unblock({
let out_file = out_file.clone();
move || std::fs::create_dir_all(&out_file.parent().expect("parent directory"))
})
.await?;
// NOTE: We assume that the files we download never change, and we assume the server supports resumption!
let (start_byte, truncate) = blocking::unblock({
let out_file = out_file.clone();
move || std::fs::metadata(&out_file)
})
.await
.map(|meta| (meta.len(), false))
.unwrap_or((0, true));
progress.blocked("fetch HEAD", None);
let mut response = timeout_after(
CONNECT_AND_FETCH_HEAD_TIMEOUT,
"fetching HEAD",
client
.get(url)
.header(http::header::RANGE, format!("bytes={}-", start_byte))
.send(),
)
.await??;
match response.status().as_u16() {
200..=299 => {}
416 => {
// we assume that this means we have fully downloaded the item previously, and that the DB result was written already
// but not checked
progress.running();
progress.done(format!(
"GET{}:{}: body-size = {}",
if start_byte != 0 {
"(resumed, already completed)"
} else {
""
},
url,
ByteSize(start_byte as u64)
));
return Ok(());
}
_ => return Err(Error::HttpStatus(response.status())),
};
let remaining_content_length = response
.content_length()
.ok_or(Error::InvalidHeader("expected content-length"))?;
let content_length = (start_byte + remaining_content_length) as usize;
progress.init(Some(content_length / 1024), Some("Kb".into()));
progress.done(format!(
"HEAD{}:{}: content-length = {}",
if start_byte != 0 { "(resumable)" } else { "" },
url,
ByteSize(content_length as u64)
));
if remaining_content_length != 0 {
let mut out = blocking::Unblock::new(
blocking::unblock({
let out_file = out_file.clone();
move || {
std::fs::OpenOptions::new()
.create(truncate)
.truncate(truncate)
.write(truncate)
.append(!truncate)
.open(out_file)
}
})
.await
.map_err(|err| crate::Error::Message(format!("Failed to open '{}': {}", out_file.display(), err)))?,
);
let mut bytes_received = start_byte as usize;
while let Some(chunk) = timeout_after(
FETCH_CHUNK_TIMEOUT_SECONDS,
format!(
"fetched {} of {}",
ByteSize(bytes_received as u64),
ByteSize(content_length as u64)
),
response.chunk().boxed(),
)
.await??
{
out.write_all(&chunk).await?;
bytes_received += chunk.len();
progress.set(bytes_received / 1024);
}
progress.done(format!(
"GET{}:{}: body-size = {}",
if start_byte != 0 { "(resumed)" } else { "" },
url,
ByteSize(bytes_received as u64)
));
out.flush().await?;
} else {
progress.done(format!("{} already on disk - skipping", url))
}
if let Some(result_key) = result_key {
let task_result = model::TaskResult::Download {
kind: kind.to_owned(),
url: url.to_owned(),
content_length: content_length as u32,
content_type: response
.headers()
.get(http::header::CONTENT_TYPE)
.and_then(|t| t.to_str().ok())
.map(Into::into),
};
results.insert(progress, &result_key, &task_result)?;
}
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/work/mod.rs | criner/src/engine/work/mod.rs | pub mod generic;
pub mod iobound;
pub mod schedule;
pub mod cpubound;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/work/cpubound.rs | criner/src/engine/work/cpubound.rs | use crate::engine::report::waste::{tar_path_to_utf8_str, CargoConfig};
use crate::{error::Result, model, persistence, Error};
use async_trait::async_trait;
use std::io::Seek;
use std::{fs::File, io::BufReader, io::Read, path::PathBuf, time::SystemTime};
struct ProcessingState {
downloaded_crate: PathBuf,
key: String,
}
pub struct Agent {
asset_dir: PathBuf,
results: persistence::TaskResultTable,
state: Option<ProcessingState>,
standard_bin_path: globset::GlobMatcher,
}
impl Agent {
pub fn new(asset_dir: PathBuf, db: &persistence::Db) -> Result<Agent> {
let results = db.open_results()?;
Ok(Agent {
asset_dir,
results,
state: None,
standard_bin_path: globset::Glob::new("src/bin/*.rs")
.expect("valid statically known glob")
.compile_matcher(),
})
}
}
#[async_trait]
impl crate::engine::work::generic::Processor for Agent {
type Item = ExtractRequest;
fn set(
&mut self,
request: Self::Item,
progress: &mut prodash::tree::Item,
) -> Result<(model::Task, String, String)> {
progress.init(None, Some("files extracted".into()));
let ExtractRequest {
download_task,
crate_name,
crate_version,
} = request;
let progress_info = format!("CPU UNZIP+UNTAR {}:{}", crate_name, crate_version);
let dummy_task = default_persisted_extraction_task();
let mut task_key = String::new();
dummy_task.fq_key(&crate_name, &crate_version, &mut task_key);
let downloaded_crate = super::schedule::download_file_path(
&self.asset_dir,
&crate_name,
&crate_version,
&download_task.process,
&download_task.version,
"crate",
);
let dummy_result = model::TaskResult::ExplodedCrate {
entries_meta_data: vec![],
selected_entries: vec![],
};
let mut key = String::with_capacity(task_key.len() * 2);
dummy_result.fq_key(&crate_name, &crate_version, &dummy_task, &mut key);
self.state = Some(ProcessingState { downloaded_crate, key });
Ok((dummy_task, task_key, progress_info))
}
fn idle_message(&self) -> String {
"CPU IDLE".into()
}
async fn process(&mut self, progress: &mut prodash::tree::Item) -> std::result::Result<(), (Error, String)> {
let ProcessingState { downloaded_crate, key } = self.state.take().expect("state to be set");
extract_crate(&self.results, &key, progress, downloaded_crate, &self.standard_bin_path)
.map_err(|err| (err, "Failed to extract crate".into()))
}
}
#[derive(Clone)]
pub struct ExtractRequest {
pub download_task: model::Task,
pub crate_name: String,
pub crate_version: String,
}
pub fn default_persisted_extraction_task() -> model::Task {
const TASK_NAME: &str = "extract_crate";
const TASK_VERSION: &str = "1.0.0";
model::Task {
stored_at: SystemTime::now(),
process: TASK_NAME.into(),
version: TASK_VERSION.into(),
state: Default::default(),
}
}
fn extract_crate(
results: &persistence::TaskResultTable,
key: &str,
progress: &mut prodash::tree::Item,
downloaded_crate: PathBuf,
standard_bin_path: &globset::GlobMatcher,
) -> Result<()> {
use persistence::TableAccess;
let mut archive = tar::Archive::new(libflate::gzip::Decoder::new(BufReader::new(File::open(
downloaded_crate,
)?))?);
let mut buf = Vec::new();
let mut interesting_paths = vec!["Cargo.toml".to_string(), "Cargo.lock".into()];
let mut files = Vec::new();
for e in archive.entries()? {
progress.inc();
let mut e: tar::Entry<_> = e?;
if tar_path_to_utf8_str(e.path_bytes().as_ref()) == "Cargo.toml" {
e.read_to_end(&mut buf)?;
let config = std::str::from_utf8(&buf).map(CargoConfig::from).unwrap_or_default();
interesting_paths.push(config.actual_or_expected_build_script_path().to_owned());
interesting_paths.push(config.lib_path().to_owned());
interesting_paths.extend(config.bin_paths().into_iter().map(|s| s.to_owned()));
break;
}
}
let mut archive = tar::Archive::new(libflate::gzip::Decoder::new(BufReader::new({
let mut file = archive.into_inner().into_inner();
file.seek(std::io::SeekFrom::Start(0))?;
file
}))?);
let mut meta_data = Vec::new();
let mut meta_count = 0;
let mut file_count = 0;
let mut max_storage_size = [0; 128 * 1024];
for e in archive.entries()? {
meta_count += 1;
progress.set(meta_count);
let mut e: tar::Entry<_> = e?;
meta_data.push(model::TarHeader {
path: e.path_bytes().to_vec(),
size: e.header().size()?,
entry_type: e.header().entry_type().as_byte(),
});
if interesting_paths
.iter()
.any(|p| p == tar_path_to_utf8_str(e.path_bytes().as_ref()))
|| standard_bin_path.is_match(tar_path_to_utf8_str(e.path_bytes().as_ref()))
{
file_count += 1;
let slice = if tar_path_to_utf8_str(e.path_bytes().as_ref()) == "Cargo.toml"
|| tar_path_to_utf8_str(e.path_bytes().as_ref()) == "Cargo.lock"
{
buf.clear();
e.read_to_end(&mut buf)?;
&buf
} else {
let bytes_read = e.read(&mut max_storage_size[..])?;
&max_storage_size[..bytes_read]
};
files.push((
meta_data.last().expect("to have pushed one just now").to_owned(),
slice.to_owned(),
));
}
}
progress.info(format!(
"Recorded {} files and stored {} in full",
meta_count, file_count
));
let task_result = model::TaskResult::ExplodedCrate {
entries_meta_data: meta_data,
selected_entries: files,
};
results.insert(progress, &key, &task_result)?;
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/work/generic.rs | criner/src/engine/work/generic.rs | use crate::{model, persistence, persistence::TableAccess, Error, Result};
use async_trait::async_trait;
#[async_trait]
pub trait Processor {
type Item;
fn set(&mut self, request: Self::Item, progress: &mut prodash::tree::Item)
-> Result<(model::Task, String, String)>;
fn idle_message(&self) -> String;
async fn process(&mut self, progress: &mut prodash::tree::Item) -> std::result::Result<(), (Error, String)>;
async fn schedule_next(&mut self, _progress: &mut prodash::tree::Item) -> Result<()> {
Ok(())
}
}
pub async fn processor<T: Clone>(
db: persistence::Db,
mut progress: prodash::tree::Item,
r: async_channel::Receiver<T>,
mut agent: impl Processor<Item = T> + Send,
max_retries_on_timeout: usize,
) -> Result<()> {
let tasks = db.open_tasks()?;
while let Ok(request) = r.recv().await {
let mut try_count = 0;
let (task, task_key) = loop {
let (dummy_task, task_key, progress_name) = agent.set(request.clone(), &mut progress)?;
progress.set_name(progress_name);
let mut task = tasks.update(Some(&mut progress), &task_key, |mut t| {
t.process = dummy_task.process.clone();
t.version = dummy_task.version.clone();
t.state.merge_with(&model::TaskState::InProgress(None));
t
})?;
try_count += 1;
progress.blocked("working", None);
let res = agent.process(&mut progress).await;
progress.running();
task.state = match res {
Err((err @ Error::Timeout(_, _), _)) if try_count < max_retries_on_timeout => {
progress.fail(format!("{} → retrying ({}/{})", err, try_count, max_retries_on_timeout));
continue;
}
Err((err, msg)) => {
progress.fail(format!("{}: {}", msg, err));
model::TaskState::AttemptsWithFailure(vec![err.to_string()])
}
Ok(_) => {
agent.schedule_next(&mut progress).await.ok();
model::TaskState::Complete
}
};
break (task, task_key);
};
tasks.upsert(&mut progress, &task_key, &task)?;
progress.set_name(agent.idle_message());
progress.init(None, None);
}
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/mod.rs | criner/src/engine/stage/mod.rs | pub mod changes;
pub mod db_download;
pub mod processing;
pub mod report;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/processing.rs | criner/src/engine/stage/processing.rs | use crate::persistence::{new_value_query_recent_first, value_iter, CrateVersionTable};
use crate::{
engine::work,
error::Result,
model::CrateVersion,
persistence::{Db, Keyed, TableAccess},
};
use futures_util::FutureExt;
use std::{path::PathBuf, time::SystemTime};
pub async fn process(
db: Db,
mut progress: prodash::tree::Item,
io_bound_processors: u32,
cpu_bound_processors: u32,
mut processing_progress: prodash::tree::Item,
assets_dir: PathBuf,
startup_time: SystemTime,
) -> Result<()> {
processing_progress.set_name("Downloads and Extractors");
let tx_cpu = {
let (tx_cpu, rx) = async_channel::bounded(1);
for idx in 0..cpu_bound_processors {
let max_retries_on_timeout = 0;
let db = db.clone();
let assets_dir = assets_dir.clone();
let progress = processing_progress.add_child(format!("{}:CPU IDLE", idx + 1));
let rx = rx.clone();
crate::spawn(blocking::unblock(move || -> Result<_> {
let agent = work::cpubound::Agent::new(assets_dir, &db)?;
#[allow(clippy::unit_arg)] // don't know where the unit is supposed to be
Ok(futures_lite::future::block_on(
work::generic::processor(db, progress, rx, agent, max_retries_on_timeout).map(|r| {
if let Err(e) = r {
log::warn!("CPU bound processor failed: {}", e);
}
}),
))
}))
.detach();
}
tx_cpu
};
let tx_io = {
let (tx_io, rx) = async_channel::bounded(1);
for idx in 0..io_bound_processors {
let max_retries_on_timeout = 40;
crate::spawn(
work::generic::processor(
db.clone(),
processing_progress.add_child(format!("{}: ↓ IDLE", idx + 1)),
rx.clone(),
work::iobound::Agent::new(&db, tx_cpu.clone(), |crate_name_and_version, task, _| {
crate_name_and_version.map(|(crate_name, crate_version)| work::cpubound::ExtractRequest {
download_task: task.clone(),
crate_name,
crate_version,
})
})?,
max_retries_on_timeout,
)
.map(|r| {
if let Err(e) = r {
log::warn!("iobound processor failed: {}", e);
}
}),
)
.detach();
}
tx_io
};
blocking::unblock(move || {
let versions = db.open_crate_versions()?;
let num_versions = versions.count();
progress.init(Some(num_versions as usize), Some("crate versions".into()));
let auto_checkpoint_every = 10000;
let checkpoint_connection = db.open_connection_with_busy_wait()?;
let mut fetched_versions = 0;
let mut versions = Vec::with_capacity(auto_checkpoint_every);
let mut last_elapsed_for_checkpointing = None;
let mut child_progress = progress.add_child("TBD");
loop {
let abort_loop = {
progress.blocked("fetching chunk of version to schedule", None);
let connection = db.open_connection_no_async_with_busy_wait()?;
let mut statement = new_value_query_recent_first(
CrateVersionTable::table_name(),
&connection,
fetched_versions,
auto_checkpoint_every,
)?;
let iter = value_iter::<CrateVersion>(&mut statement)?;
versions.clear();
versions.extend(iter);
fetched_versions += versions.len();
versions.len() != auto_checkpoint_every
};
let tasks = db.open_tasks()?;
for (vid, version) in versions.drain(..).enumerate() {
let version = version?;
progress.set(vid + fetched_versions + 1);
progress.halted("wait for task consumers", None);
child_progress.set_name(format!("schedule {}", version.key()));
// TODO: with blocking:: API improvements, remove this block-on as all is async
futures_lite::future::block_on(work::schedule::tasks(
&assets_dir,
&tasks,
&version,
&mut child_progress,
work::schedule::Scheduling::AtLeastOne,
&tx_io,
&tx_cpu,
startup_time,
))?;
}
// We have too many writers which cause the WAL to get so large that all reads are slowing to a crawl
// Standard SQLITE autocheckpoints are passive, which are not effective in our case as they never
// kick in with too many writers. There is no way to change the autocheckpoint mode to something more suitable… :/
let start = SystemTime::now();
progress.blocked(
"checkpointing database",
last_elapsed_for_checkpointing.map(|d| start + d),
);
checkpoint_connection
.lock()
.execute_batch("PRAGMA wal_checkpoint(TRUNCATE)")?;
last_elapsed_for_checkpointing = Some(SystemTime::now().duration_since(start)?);
if abort_loop {
progress.running();
break;
}
}
Ok(())
})
.await
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/changes.rs | criner/src/engine/stage/changes.rs | use crate::persistence::{key_value_iter, new_key_value_query_old_to_new, CrateTable, Keyed};
use crate::{
error::{Error, Result},
model,
persistence::{self, new_key_value_insertion, CrateVersionTable, TableAccess},
utils::enforce_threaded,
};
use crates_index_diff::Index;
use rusqlite::params;
use std::convert::TryFrom;
use std::sync::atomic::AtomicBool;
use std::{
collections::BTreeMap,
ops::Add,
path::Path,
time::{Duration, SystemTime},
};
pub async fn fetch(
crates_io_path: impl AsRef<Path>,
db: persistence::Db,
mut progress: prodash::tree::Item,
deadline: Option<SystemTime>,
) -> Result<()> {
let start = SystemTime::now();
let subprogress = progress.add_child("Fetching changes from crates.io index");
subprogress.blocked("potentially cloning", None);
let index = enforce_threaded(
deadline.unwrap_or_else(|| SystemTime::now().add(Duration::from_secs(60 * 60))),
{
let path = crates_io_path.as_ref().to_path_buf();
if !path.is_dir() {
std::fs::create_dir(&path)?;
}
|| Index::from_path_or_cloned(path)
},
)
.await??;
let (crate_versions, last_seen_git_object) = enforce_threaded(
deadline.unwrap_or_else(|| SystemTime::now().add(Duration::from_secs(10 * 60))),
move || {
index.peek_changes_with_options(
subprogress,
&AtomicBool::default(),
crates_index_diff::index::diff::Order::ImplementationDefined,
)
},
)
.await??;
progress.done(format!("Fetched {} changed crates", crate_versions.len()));
let mut store_progress = progress.add_child("processing new crates");
store_progress.init(Some(crate_versions.len()), Some("crate versions".into()));
let without_time_limit_unless_one_is_set =
deadline.unwrap_or_else(|| SystemTime::now().add(Duration::from_secs(24 * 60 * 60)));
enforce_threaded(without_time_limit_unless_one_is_set, {
let db = db.clone();
let index_path = crates_io_path.as_ref().to_path_buf();
move || {
let mut connection = db.open_connection_no_async_with_busy_wait()?;
let mut crates_lut: BTreeMap<_, _> = {
let transaction = connection.transaction()?;
store_progress.blocked("caching crates", None);
let mut statement = new_key_value_query_old_to_new(CrateTable::table_name(), &transaction)?;
let iter = key_value_iter::<model::Crate>(&mut statement)?.flat_map(Result::ok);
iter.collect()
};
let mut key_buf = String::new();
let crate_versions_len = crate_versions.len();
let mut new_crate_versions = 0;
let mut new_crates = 0;
store_progress.blocked("write lock for crate versions", None);
let transaction = connection.transaction_with_behavior(rusqlite::TransactionBehavior::Immediate)?;
{
let mut statement = new_key_value_insertion(CrateVersionTable::table_name(), &transaction)?;
for version in crate_versions
.into_iter()
.filter_map(|v| model::CrateVersion::try_from(v).ok())
{
key_buf.clear();
version.key_buf(&mut key_buf);
statement.execute(params![&key_buf, rmp_serde::to_vec(&version)?])?;
new_crate_versions += 1;
key_buf.clear();
model::Crate::key_from_version_buf(&version, &mut key_buf);
if crates_lut
.entry(key_buf.to_owned())
.or_default()
.merge_mut(&version)
.versions
.len()
== 1
{
new_crates += 1;
}
store_progress.inc();
}
}
store_progress.blocked("commit crate versions", None);
transaction.commit()?;
let transaction = {
store_progress.blocked("write lock for crates", None);
let mut t = connection.transaction_with_behavior(rusqlite::TransactionBehavior::Immediate)?;
t.set_drop_behavior(rusqlite::DropBehavior::Commit);
t
};
{
let mut statement = new_key_value_insertion(CrateTable::table_name(), &transaction)?;
store_progress.init(Some(crates_lut.len()), Some("crates".into()));
for (key, value) in crates_lut.into_iter() {
statement.execute(params![key, rmp_serde::to_vec(&value)?])?;
store_progress.inc();
}
}
store_progress.blocked("commit crates", None);
transaction.commit()?;
Index::from_path_or_cloned(index_path)?.set_last_seen_reference(last_seen_git_object)?;
db.open_context()?.update_today(|c| {
c.counts.crate_versions += new_crate_versions;
c.counts.crates += new_crates;
c.durations.fetch_crate_versions += SystemTime::now()
.duration_since(start)
.unwrap_or_else(|_| Duration::default())
})?;
store_progress.done(format!("Stored {} crate versions to database", crate_versions_len));
Ok::<_, Error>(())
}
})
.await??;
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/db_download/csv_model.rs | criner/src/engine/stage/db_download/csv_model.rs | use serde_derive::Deserialize;
use std::collections::BTreeMap;
use std::time::SystemTime;
type UserId = u32;
pub type Id = u32;
pub type GitHubId = i32;
#[derive(Deserialize, Default, Clone)]
pub struct Keyword {
pub id: Id,
#[serde(rename = "keyword")]
pub name: String,
// amount of crates using the keyword
#[serde(rename = "crates_cnt")]
pub crates_count: u32,
}
#[derive(Deserialize, Default, Clone)]
pub struct Category {
pub id: Id,
#[serde(rename = "category")]
pub name: String,
#[serde(rename = "crates_cnt")]
pub crates_count: u32,
pub description: String,
pub path: String,
pub slug: String,
}
#[derive(Deserialize)]
pub struct Crate {
pub id: Id,
pub name: String,
#[serde(deserialize_with = "deserialize_timestamp")]
pub created_at: SystemTime,
#[serde(deserialize_with = "deserialize_timestamp")]
pub updated_at: SystemTime,
pub description: Option<String>,
pub documentation: Option<String>,
pub downloads: u64,
pub homepage: Option<String>,
pub readme: Option<String>,
pub repository: Option<String>,
}
pub enum UserKind {
User,
Team,
}
#[derive(Deserialize)]
pub struct User {
pub id: Id,
#[serde(rename = "gh_avatar")]
pub github_avatar_url: String,
#[serde(rename = "gh_id")]
pub github_id: GitHubId,
#[serde(rename = "gh_login")]
pub github_login: String,
pub name: Option<String>,
}
#[derive(Deserialize)]
pub struct Team {
pub id: Id,
#[serde(rename = "avatar")]
pub github_avatar_url: String,
#[serde(rename = "github_id")]
pub github_id: GitHubId,
#[serde(rename = "login")]
pub github_login: String,
pub name: Option<String>,
}
fn deserialize_owner_kind<'de, D>(deserializer: D) -> Result<UserKind, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let val = u8::deserialize(deserializer)?;
Ok(if val == 0 { UserKind::User } else { UserKind::Team })
}
fn deserialize_json_map<'de, D>(deserializer: D) -> Result<Vec<Feature>, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let val = std::borrow::Cow::<'de, str>::deserialize(deserializer)?;
let val: BTreeMap<String, Vec<String>> = serde_json::from_str(&val).map_err(serde::de::Error::custom)?;
Ok(val.into_iter().map(|(name, crates)| Feature { name, crates }).collect())
}
fn deserialize_yanked<'de, D>(deserializer: D) -> Result<bool, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let val = std::borrow::Cow::<'de, str>::deserialize(deserializer)?;
Ok(val == "t")
}
fn deserialize_timestamp<'de, D>(deserializer: D) -> Result<SystemTime, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let val = std::borrow::Cow::<'de, str>::deserialize(deserializer)?;
// 2017-11-30 04:00:19.334919
let t = time::PrimitiveDateTime::parse(
val.as_ref().split('.').next().unwrap_or_else(|| val.as_ref()),
// 2015 -04 - 24 18 : 26 : 11
&time::macros::format_description!("[year]-[month]-[day] [hour]:[minute]:[second]"),
)
.map_err(serde::de::Error::custom)?;
Ok(t.assume_offset(time::UtcOffset::UTC).into())
}
pub struct Feature {
pub name: String,
/// The crates the feature depends on
pub crates: Vec<String>,
}
#[derive(Deserialize)]
pub struct Version {
pub id: Id,
pub crate_id: Id,
pub crate_size: Option<u32>,
#[serde(deserialize_with = "deserialize_timestamp")]
pub created_at: SystemTime,
#[serde(deserialize_with = "deserialize_timestamp")]
pub updated_at: SystemTime,
pub downloads: u32,
#[serde(deserialize_with = "deserialize_json_map")]
pub features: Vec<Feature>,
pub license: String,
#[serde(rename = "num")]
pub semver: String,
pub published_by: Option<UserId>,
#[serde(deserialize_with = "deserialize_yanked", rename = "yanked")]
pub is_yanked: bool,
}
#[derive(Deserialize)]
pub struct CrateOwner {
pub crate_id: Id,
pub created_by: Option<UserId>,
pub owner_id: UserId,
#[serde(deserialize_with = "deserialize_owner_kind")]
pub owner_kind: UserKind,
}
#[derive(Deserialize)]
pub struct CratesCategory {
pub category_id: Id,
pub crate_id: Id,
}
#[derive(Deserialize)]
pub struct CratesKeyword {
pub keyword_id: Id,
pub crate_id: Id,
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/db_download/convert.rs | criner/src/engine/stage/db_download/convert.rs | use super::csv_model;
use crate::{model::db_dump, utils::parse_semver};
use std::collections::BTreeMap;
use std::time::SystemTime;
lazy_static! {
static ref PERSON: regex::Regex =
regex::Regex::new("(?P<name>[\\w ]+)(<(?P<email>.*)>)?").expect("valid statically known regex");
}
impl From<csv_model::User> for db_dump::Actor {
fn from(
csv_model::User {
id,
github_avatar_url,
github_id,
github_login,
name,
}: csv_model::User,
) -> Self {
db_dump::Actor {
crates_io_id: id,
kind: db_dump::ActorKind::User,
github_avatar_url,
github_id,
github_login,
name,
}
}
}
impl From<csv_model::Team> for db_dump::Actor {
fn from(
csv_model::Team {
id,
github_avatar_url,
github_id,
github_login,
name,
}: csv_model::Team,
) -> Self {
db_dump::Actor {
crates_io_id: id,
kind: db_dump::ActorKind::Team,
github_avatar_url,
github_id,
github_login,
name,
}
}
}
impl From<csv_model::Version> for db_dump::CrateVersion {
fn from(
csv_model::Version {
id: _,
crate_id: _,
crate_size,
created_at,
updated_at,
downloads,
features,
license,
semver,
published_by: _,
is_yanked,
}: csv_model::Version,
) -> Self {
db_dump::CrateVersion {
crate_size,
created_at,
updated_at,
downloads,
features: features
.into_iter()
.map(|f| db_dump::Feature {
name: f.name,
crates: f.crates,
})
.collect(),
license,
semver,
published_by: None,
is_yanked,
}
}
}
impl From<csv_model::Crate> for db_dump::Crate {
fn from(
csv_model::Crate {
id: _,
name,
created_at,
updated_at,
description,
documentation,
downloads,
homepage,
readme,
repository,
}: csv_model::Crate,
) -> Self {
db_dump::Crate {
versions: Vec::new(),
keywords: Vec::new(),
categories: Vec::new(),
owners: Vec::new(),
stored_at: SystemTime::UNIX_EPOCH,
created_by: None,
name,
created_at,
updated_at,
description,
documentation,
downloads,
homepage,
readme,
repository,
}
}
}
impl From<csv_model::Keyword> for db_dump::Keyword {
fn from(
csv_model::Keyword {
id: _,
name,
crates_count,
}: csv_model::Keyword,
) -> Self {
db_dump::Keyword { name, crates_count }
}
}
impl From<csv_model::Category> for db_dump::Category {
fn from(
csv_model::Category {
id: _,
name,
crates_count,
description,
path,
slug,
}: csv_model::Category,
) -> Self {
db_dump::Category {
name,
crates_count,
description,
path,
slug,
}
}
}
impl From<String> for db_dump::Person {
fn from(v: String) -> Self {
PERSON
.captures(&v)
.map(|cap| db_dump::Person {
name: cap.name("name").expect("name should always exist").as_str().to_owned(),
email: cap.name("email").map(|e| e.as_str().to_owned()),
})
.unwrap_or_default()
}
}
impl From<csv_model::UserKind> for db_dump::ActorKind {
fn from(v: csv_model::UserKind) -> Self {
match v {
csv_model::UserKind::User => db_dump::ActorKind::User,
csv_model::UserKind::Team => db_dump::ActorKind::Team,
}
}
}
pub fn into_actors_by_id(
users: BTreeMap<csv_model::Id, csv_model::User>,
teams: BTreeMap<csv_model::Id, csv_model::Team>,
progress: prodash::tree::Item,
) -> BTreeMap<(db_dump::Id, db_dump::ActorKind), db_dump::Actor> {
progress.init(Some(users.len() + teams.len()), Some("users and teams".into()));
let mut map = BTreeMap::new();
let mut count = 0;
for (id, actor) in users.into_iter() {
count += 1;
progress.set(count);
let actor: db_dump::Actor = actor.into();
map.insert((id, actor.kind), actor);
}
for (id, actor) in teams.into_iter() {
count += 1;
progress.set(count);
let actor: db_dump::Actor = actor.into();
map.insert((id, actor.kind), actor);
}
map
}
pub fn into_versions_by_crate_id(
mut versions: Vec<csv_model::Version>,
actors: &BTreeMap<(db_dump::Id, db_dump::ActorKind), db_dump::Actor>,
mut progress: prodash::tree::Item,
) -> BTreeMap<db_dump::Id, Vec<db_dump::CrateVersion>> {
progress.init(Some(versions.len()), Some("versions converted".into()));
versions.sort_by_key(|v| v.id);
let versions_len = versions.len();
let mut version_by_id = BTreeMap::new();
for version in versions.into_iter() {
progress.inc();
let crate_id = version.crate_id;
let published_by = version.published_by;
let version_id = version.id;
let mut version: db_dump::CrateVersion = version.into();
version.published_by =
published_by.and_then(|user_id| actors.get(&(user_id, db_dump::ActorKind::User)).cloned());
version_by_id.insert(version_id, (crate_id, version));
}
progress.done(format!(
"transformed {} crate versions and assigned publishing actor",
version_by_id.len()
));
let mut map = BTreeMap::new();
progress.init(
Some(version_by_id.len()),
Some("version-crate associations made".into()),
);
for (_, (crate_id, version)) in version_by_id.into_iter() {
progress.inc();
map.entry(crate_id).or_insert_with(Vec::new).push(version);
}
progress.done(format!(
"Associated {} crate versions to {} crates",
versions_len,
map.len()
));
map
}
#[allow(clippy::too_many_arguments)]
pub fn into_crates(
crates: Vec<csv_model::Crate>,
mut keywords_by_id: BTreeMap<csv_model::Id, csv_model::Keyword>,
crates_keywords: Vec<csv_model::CratesKeyword>,
mut categories_by_id: BTreeMap<csv_model::Id, csv_model::Category>,
crates_categories: Vec<csv_model::CratesCategory>,
actors_by_id: BTreeMap<(db_dump::Id, db_dump::ActorKind), db_dump::Actor>,
crate_owners: Vec<csv_model::CrateOwner>,
mut versions_by_crate_id: BTreeMap<db_dump::Id, Vec<db_dump::CrateVersion>>,
mut progress: prodash::tree::Item,
) -> Vec<db_dump::Crate> {
let mut crate_by_id = BTreeMap::new();
progress.init(Some(crates.len()), Some("crates converted".into()));
for krate in crates.into_iter() {
progress.inc();
let crate_id = krate.id;
let mut krate: db_dump::Crate = krate.into();
let mut versions: Vec<_> = std::mem::take(match versions_by_crate_id.get_mut(&crate_id) {
Some(val) => val,
None => {
progress.fail(format!("Skipped crate {} without any version", crate_id));
continue;
}
});
versions.sort_by_key(|v| parse_semver(&v.semver));
krate.versions = versions;
crate_by_id.insert(crate_id, krate);
}
drop(versions_by_crate_id);
progress.done(format!(
"converted {} crates and assigned crate versions",
crate_by_id.len()
));
progress.init(Some(crates_keywords.len()), Some("crates keywords".into()));
let crates_keywords_len = crates_keywords.len();
for csv_model::CratesKeyword { keyword_id, crate_id } in crates_keywords.into_iter() {
progress.inc();
match crate_by_id.get_mut(&crate_id) {
Some(val) => val,
None => continue,
}
.keywords
.push(
keywords_by_id
.get_mut(&keyword_id)
.expect("keyword for id")
.to_owned()
.into(),
)
}
progress.done(format!("assigned {} keywords", crates_keywords_len));
progress.init(Some(crates_categories.len()), Some("crates categories".into()));
let crates_categories_len = crates_categories.len();
for csv_model::CratesCategory { category_id, crate_id } in crates_categories.into_iter() {
progress.inc();
match crate_by_id.get_mut(&crate_id) {
Some(val) => val,
None => continue,
}
.categories
.push(
categories_by_id
.get_mut(&category_id)
.expect("category for id")
.to_owned()
.into(),
)
}
progress.done(format!("assigned {} categories", crates_categories_len));
let crate_owners_len = crate_owners.len();
progress.init(Some(crate_owners_len), Some("crates owners".into()));
for csv_model::CrateOwner {
crate_id,
created_by,
owner_id,
owner_kind,
} in crate_owners.into_iter()
{
progress.inc();
if let Some(owner) = actors_by_id.get(&(owner_id, owner_kind.into())).map(ToOwned::to_owned) {
let created_by = created_by.and_then(|id| actors_by_id.get(&(id, db_dump::ActorKind::User)).cloned());
let krate = match crate_by_id.get_mut(&crate_id) {
Some(val) => val,
None => {
progress.fail(format!("Skipped crate {} as it doesn't seem to exist", crate_id));
continue;
}
};
if krate.created_by.is_none() {
krate.created_by = created_by;
}
krate.owners.push(owner);
}
}
progress.done(format!("assigned {} owners", crate_owners_len));
crate_by_id.into_iter().map(|(_, v)| v).collect()
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/db_download/from_csv.rs | criner/src/engine/stage/db_download/from_csv.rs | use super::csv_model;
use std::collections::BTreeMap;
pub trait AsId {
fn as_id(&self) -> csv_model::Id;
}
macro_rules! impl_as_id {
($name:ident) => {
impl AsId for csv_model::$name {
fn as_id(&self) -> csv_model::Id {
self.id
}
}
};
}
impl_as_id!(Keyword);
impl_as_id!(Version);
impl_as_id!(Category);
impl_as_id!(User);
impl_as_id!(Team);
impl_as_id!(Crate);
pub fn records<T>(
csv: impl std::io::Read,
progress: &mut prodash::tree::Item,
mut cb: impl FnMut(T),
) -> crate::Result<()>
where
T: serde::de::DeserializeOwned,
{
let mut rd = csv::ReaderBuilder::new()
.delimiter(b',')
.has_headers(true)
.flexible(true)
.from_reader(csv);
for item in rd.deserialize() {
cb(item?);
progress.inc();
}
Ok(())
}
pub fn mapping<T>(
rd: impl std::io::Read,
name: &'static str,
progress: &mut prodash::tree::Item,
) -> crate::Result<BTreeMap<csv_model::Id, T>>
where
T: serde::de::DeserializeOwned + AsId,
{
let mut decode = progress.add_child("decoding");
decode.init(None, Some(name.into()));
let mut map = BTreeMap::new();
records(rd, &mut decode, |v: T| {
map.insert(v.as_id(), v);
})?;
decode.info(format!("Decoded {} {} into memory", map.len(), name));
Ok(map)
}
pub fn vec<T>(rd: impl std::io::Read, name: &'static str, progress: &mut prodash::tree::Item) -> crate::Result<Vec<T>>
where
T: serde::de::DeserializeOwned,
{
let mut decode = progress.add_child("decoding");
decode.init(None, Some(name.into()));
let mut vec = Vec::new();
records(rd, &mut decode, |v: T| {
vec.push(v);
})?;
vec.shrink_to_fit();
decode.info(format!("Decoded {} {} into memory", vec.len(), name));
Ok(vec)
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/db_download/mod.rs | criner/src/engine/stage/db_download/mod.rs | use crate::model::db_dump;
use crate::{
engine::work, persistence::new_key_value_insertion, persistence::Db, persistence::TableAccess, Error, Result,
};
use bytesize::ByteSize;
use futures_util::FutureExt;
use rusqlite::params;
use rusqlite::TransactionBehavior;
use std::{collections::BTreeMap, fs::File, io::BufReader, path::PathBuf};
mod convert;
mod csv_model;
mod from_csv;
fn store(db: Db, crates: Vec<db_dump::Crate>, mut progress: prodash::tree::Item) -> Result<()> {
let now = std::time::SystemTime::now();
let crates_len = crates.len();
progress.init(Some(crates_len), Some("crates stored".into()));
let mut connection = db.open_connection_no_async_with_busy_wait()?;
let transaction = connection.transaction_with_behavior(TransactionBehavior::Immediate)?;
{
let mut insert = new_key_value_insertion("crates.io-crate", &transaction)?;
for mut krate in crates.into_iter() {
progress.inc();
krate.stored_at = now;
let data = rmp_serde::to_vec(&krate)?;
insert.execute(params![krate.name, data])?;
}
}
transaction.commit()?;
progress.done(format!("Stored {} crates in database", crates_len));
Ok(())
}
fn extract_and_ingest(db: Db, mut progress: prodash::tree::Item, db_file_path: PathBuf) -> Result<()> {
progress.init(None, Some("csv files".into()));
let mut archive = tar::Archive::new(libflate::gzip::Decoder::new(BufReader::new(File::open(db_file_path)?))?);
let whitelist_names = [
"crates",
"crate_owners",
"versions",
"crates_categories",
"categories",
"crates_keywords",
"keywords",
"users",
"teams",
];
let mut num_files_seen = 0;
let mut num_bytes_seen = 0;
let mut teams = None::<BTreeMap<csv_model::Id, csv_model::Team>>;
let mut categories = None::<BTreeMap<csv_model::Id, csv_model::Category>>;
let mut versions = None::<Vec<csv_model::Version>>;
let mut keywords = None::<BTreeMap<csv_model::Id, csv_model::Keyword>>;
let mut users = None::<BTreeMap<csv_model::Id, csv_model::User>>;
let mut crates = None::<Vec<csv_model::Crate>>;
let mut crate_owners = None::<Vec<csv_model::CrateOwner>>;
let mut crates_categories = None::<Vec<csv_model::CratesCategory>>;
let mut crates_keywords = None::<Vec<csv_model::CratesKeyword>>;
for (eid, entry) in archive.entries()?.enumerate() {
num_files_seen = eid + 1;
progress.set(eid);
let entry = entry?;
let entry_size = entry.header().size()?;
num_bytes_seen += entry_size;
if let Some(name) = entry
.path()
.ok()
.and_then(|p| whitelist_names.iter().find(|n| p.ends_with(format!("{}.csv", n))))
{
let done_msg = format!(
"extracted '{}' with size {}",
entry.path()?.display(),
ByteSize(entry_size)
);
match *name {
"teams" => teams = Some(from_csv::mapping(entry, name, &mut progress)?),
"categories" => {
categories = Some(from_csv::mapping(entry, "categories", &mut progress)?);
}
"versions" => {
versions = Some(from_csv::vec(entry, "versions", &mut progress)?);
}
"keywords" => {
keywords = Some(from_csv::mapping(entry, "keywords", &mut progress)?);
}
"users" => {
users = Some(from_csv::mapping(entry, "users", &mut progress)?);
}
"crates" => {
crates = Some(from_csv::vec(entry, "crates", &mut progress)?);
}
"crate_owners" => {
crate_owners = Some(from_csv::vec(entry, "crate_owners", &mut progress)?);
}
"crates_categories" => {
crates_categories = Some(from_csv::vec(entry, "crates_categories", &mut progress)?);
}
"crates_keywords" => {
crates_keywords = Some(from_csv::vec(entry, "crates_keywords", &mut progress)?);
}
_ => progress.fail(format!("bug or oversight: Could not parse table of type {:?}", name)),
}
progress.done(done_msg);
}
}
progress.done(format!(
"Saw {} files and a total of {}",
num_files_seen,
ByteSize(num_bytes_seen)
));
let users = users.ok_or(Error::Bug("expected users.csv in crates-io db dump"))?;
let teams = teams.ok_or(Error::Bug("expected teams.csv in crates-io db dump"))?;
let versions = versions.ok_or(Error::Bug("expected versions.csv in crates-io db dump"))?;
let crates = crates.ok_or(Error::Bug("expected crates.csv in crates-io db dump"))?;
let keywords = keywords.ok_or(Error::Bug("expected keywords.csv in crates-io db dump"))?;
let crates_keywords = crates_keywords.ok_or(Error::Bug("expected crates_keywords.csv in crates-io db dump"))?;
let categories = categories.ok_or(Error::Bug("expected categories.csv in crates-io db dump"))?;
let crates_categories =
crates_categories.ok_or(Error::Bug("expected crates_categories.csv in crates-io db dump"))?;
let crate_owners = crate_owners.ok_or(Error::Bug("expected crate_owners.csv in crates-io db dump"))?;
progress.init(Some(4), Some("conversion steps".into()));
progress.set_name("transform actors");
progress.set(1);
let actors_by_id = convert::into_actors_by_id(users, teams, progress.add_child("actors"));
progress.set_name("transform versions");
progress.set(2);
let versions_by_crate_id =
convert::into_versions_by_crate_id(versions, &actors_by_id, progress.add_child("versions"));
progress.set_name("transform crates");
progress.set(3);
let crates = convert::into_crates(
crates,
keywords,
crates_keywords,
categories,
crates_categories,
actors_by_id,
crate_owners,
versions_by_crate_id,
progress.add_child("crates"),
);
progress.set_name("storing crates");
progress.set(4);
store(db, crates, progress.add_child("persist"))
}
fn cleanup(db_file_path: PathBuf, mut progress: prodash::tree::Item) -> Result<()> {
let glob_pattern = db_file_path
.parent()
.expect("parent directory for db dump")
.join("[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-*")
.with_extension(db_file_path.extension().expect("file extension"));
let pattern = glob::Pattern::new(glob_pattern.to_str().expect("db dump path is valid utf8 string"))?;
if !pattern.matches_path(&db_file_path) {
return Err(crate::Error::Message(format!(
"BUG: Pattern {} did not match the original database path '{}'",
pattern,
db_file_path.display()
)));
}
for file in glob::glob(pattern.as_str())? {
let file = file?;
if file != db_file_path {
std::fs::remove_file(&file)?;
progress.done(format!("Deleted old db-dump at '{}'", file.display()));
}
}
Ok(())
}
pub async fn schedule(
db: Db,
assets_dir: PathBuf,
mut progress: prodash::tree::Item,
startup_time: std::time::SystemTime,
) -> Result<()> {
let (tx_result, rx_result) = async_channel::bounded(1);
let tx_io = {
let (tx_io, rx) = async_channel::bounded(1);
let max_retries_on_timeout = 80;
crate::spawn(
work::generic::processor(
db.clone(),
progress.add_child("↓ IDLE"),
rx,
work::iobound::Agent::new(&db, tx_result, {
move |_, _, output_file_path| Some(output_file_path.to_path_buf())
})?,
max_retries_on_timeout,
)
.map(|r| {
if let Err(e) = r {
log::warn!("db download: iobound processor failed: {}", e);
}
}),
)
.detach();
tx_io
};
let today_yyyy_mm_dd = time::OffsetDateTime::now_local()
.unwrap_or_else(|_| time::OffsetDateTime::now_utc())
.format(&time::macros::format_description!("[year]-[month]-[day]"))
.expect("formattable");
let file_suffix = "db-dump.tar.gz";
let task_key = format!(
"{}{}{}",
"crates-io-db-dump",
crate::persistence::KEY_SEP_CHAR,
today_yyyy_mm_dd
);
let db_file_path = assets_dir
.join("crates-io-db")
.join(format!("{}-{}", today_yyyy_mm_dd, file_suffix));
let tasks = db.open_tasks()?;
if tasks
.get(&task_key)?
.map(|t| t.can_be_started(startup_time) || t.state.is_complete()) // always allow the extractor to run - must be idempotent
.unwrap_or(true)
{
tx_io
.send(work::iobound::DownloadRequest {
output_file_path: db_file_path.clone(),
progress_name: "db dump".to_string(),
task_key,
crate_name_and_version: None,
kind: "tar.gz",
url: "https://static.crates.io/db-dump.tar.gz".to_string(),
})
.await
.map_err(Error::send_msg("Download Request"))?;
drop(tx_io);
if let Ok(db_file_path) = rx_result.recv().await {
blocking::unblock({
let progress = progress.add_child("ingest");
move || extract_and_ingest(db, progress, db_file_path)
})
.await
.map_err(|err| {
progress.fail(format!("ingestion failed: {}", err));
err
})?;
}
}
blocking::unblock(move || cleanup(db_file_path, progress.add_child("removing old db-dumps"))).await?;
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/report/git.rs | criner/src/engine/stage/report/git.rs | use crate::utils::enforce_threaded;
use crate::{
engine::report::generic::{WriteCallback, WriteCallbackState, WriteInstruction, WriteRequest},
{Error, Result},
};
use futures_util::{future::BoxFuture, FutureExt};
use std::{
path::{Path, PathBuf},
sync::atomic::{AtomicU64, Ordering},
time::{SystemTime, UNIX_EPOCH},
};
static TOTAL_LOOSE_OBJECTS_WRITTEN: AtomicU64 = AtomicU64::new(0);
fn file_index_entry(path: PathBuf, file_size: usize) -> git2::IndexEntry {
use std::os::unix::ffi::OsStringExt;
git2::IndexEntry {
ctime: git2::IndexTime::new(0, 0),
mtime: git2::IndexTime::new(0, 0),
dev: 0,
ino: 0,
mode: 0o100_644,
uid: 0,
gid: 0,
file_size: file_size as u32,
id: git2::Oid::zero(),
flags: 0,
flags_extended: 0,
path: path.into_os_string().into_vec(),
}
}
fn env_var(name: &str) -> Result<String> {
std::env::var(name).map_err(|e| match e {
std::env::VarError::NotPresent => crate::Error::Message(format!("environment variable {:?} must be set", name)),
std::env::VarError::NotUnicode(_) => crate::Error::Message(format!(
"environment variable {:?} was set but couldn't be decoded as UTF-8",
name
)),
})
}
pub fn select_callback(
processors: u32,
report_dir: &Path,
mut progress: prodash::tree::Item,
) -> (
WriteCallback,
WriteCallbackState,
Option<std::thread::JoinHandle<Result<()>>>,
) {
match git2::Repository::open(report_dir) {
Ok(repo) => {
let (tx, rx) = async_channel::bounded(processors as usize);
let is_bare_repo = repo.is_bare();
let report_dir = report_dir.to_owned();
let handle = std::thread::spawn(move || -> Result<()> {
let res = (|| {
progress.init(None, Some("files stored in index".into()));
let mut index = {
let mut i = repo.index()?;
if is_bare_repo {
if let Ok(tree_oid) = repo
.head()
.and_then(|h| h.resolve())
.and_then(|h| h.peel_to_tree())
.map(|t| t.id())
{
progress.info(format!("reading latest tree into in-memory index: {}", tree_oid));
progress.blocked("reading tree into in-memory index", None);
i.read_tree(&repo.find_tree(tree_oid).expect("a tree object to exist"))?;
progress.done("read tree into memory index");
}
}
i
};
let mut req_count = 0u64;
while let Ok(WriteRequest { path, content }) = futures_lite::future::block_on(rx.recv()) {
let path = path.strip_prefix(&report_dir)?;
req_count += 1;
let entry = file_index_entry(path.to_owned(), content.len());
index.add_frombuffer(&entry, &content)?;
progress.set(req_count as usize);
}
progress.init(Some(5), Some("steps".into()));
let tree_oid = {
progress.set(1);
progress.blocked("writing tree", None);
progress.info(format!(
"writing tree with {} new entries and a total of {} entries",
req_count,
index.len()
));
let oid = index.write_tree()?;
progress.done("Tree written successfully");
oid
};
TOTAL_LOOSE_OBJECTS_WRITTEN.fetch_add(req_count, Ordering::SeqCst);
progress.info(format!(
"Wrote {} loose blob objects since program start",
TOTAL_LOOSE_OBJECTS_WRITTEN.load(Ordering::Relaxed)
));
if !is_bare_repo {
progress.set(2);
progress.blocked("writing new index", None);
repo.set_index(&mut index)?;
}
drop(index);
if let Ok(current_tree) = repo
.head()
.and_then(|h| h.resolve())
.and_then(|h| h.peel_to_tree())
.map(|t| t.id())
{
if current_tree == tree_oid {
progress.info("Skipping git commit as there was no change");
return Ok(());
}
}
{
progress.set(3);
progress.blocked("writing commit", None);
let current_time =
git2::Time::new(SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64, 0);
let signature =
git2::Signature::new("Criner", "https://github.com/the-lean-crate/criner", ¤t_time)?;
let parent = repo
.head()
.and_then(|h| h.resolve())
.and_then(|h| h.peel_to_commit())
.ok();
let mut parent_store = Vec::with_capacity(1);
repo.commit(
Some("HEAD"),
&signature,
&signature,
&format!("update {} reports", req_count),
&repo.find_tree(tree_oid).expect("tree just written to be found"),
match parent.as_ref() {
Some(parent) => {
parent_store.push(parent);
&parent_store
}
None => &[],
},
)?;
progress.done("Commit created");
}
progress.set(4);
progress.blocked("pushing changes", None);
let remote_name = repo
.branch_upstream_remote(
repo.head()
.and_then(|h| h.resolve())?
.name()
.expect("branch name is valid utf8"),
)
.map(|b| b.as_str().expect("valid utf8").to_string())
.unwrap_or_else(|_| "origin".into());
futures_lite::future::block_on(enforce_threaded(
SystemTime::now() + std::time::Duration::from_secs(60 * 60),
{
let mut progress = progress.add_child("git push");
move || -> crate::Result<_> {
let mut remote = repo.find_remote(&remote_name)?;
let mut callbacks = git2::RemoteCallbacks::new();
let mut subprogress = progress.add_child("git credentials");
let sideband = progress.add_child("git sideband");
let username = env_var("CRINER_REPORT_PUSH_HTTP_USERNAME")?;
let password = env_var("CRINER_REPORT_PUSH_HTTP_PASSWORD")?;
callbacks
.transfer_progress(|p| {
progress.set_name(format!(
"Git pushing changes ({} received)",
bytesize::ByteSize(p.received_bytes() as u64)
));
progress.init(
Some(p.total_deltas() + p.total_objects()),
Some("objects".into()),
);
progress
.set(p.indexed_deltas() + p.received_objects() );
true
})
.sideband_progress(move |line| {
sideband.set_name(std::str::from_utf8(line).map(|s| s.trim()).unwrap_or(""));
true
}).credentials(move |url, username_from_url, allowed_types| {
subprogress.info(format!("Setting userpass plaintext credentials, allowed are {:?} for {:?} (username = {:?}", allowed_types, url, username_from_url));
git2::Cred::userpass_plaintext(&username, &password)
});
remote.push(
&["+HEAD:refs/heads/main"],
Some(
git2::PushOptions::new()
.packbuilder_parallelism(0)
.remote_callbacks(callbacks),
),
)?;
Ok(())
}
},
))??;
progress.done("Pushed changes");
Ok(())
})();
res.map_err(|err| {
progress.fail(format!("{}", err));
err
})
});
(
if is_bare_repo {
log::info!("Writing into bare git repo only, local writes disabled");
repo_bare
} else {
log::info!("Writing into git repo and working dir");
repo_with_working_dir
},
Some(tx),
Some(handle),
)
}
Err(err) => {
log::info!(
"no git available in '{}', will write files only (error is '{}')",
report_dir.display(),
err,
);
(not_available, None, None)
}
}
}
pub fn repo_with_working_dir(req: WriteRequest, send: &WriteCallbackState) -> BoxFuture<Result<WriteInstruction>> {
async move {
send.as_ref()
.expect("send to be available if a repo is available")
.send(req.clone())
.await
.map_err(Error::send_msg("Git Write Request"))?;
Ok(WriteInstruction::DoWrite(req))
}
.boxed()
}
pub fn repo_bare(req: WriteRequest, send: &WriteCallbackState) -> BoxFuture<Result<WriteInstruction>> {
async move {
send.as_ref()
.expect("send to be available if a repo is available")
.send(req)
.await
.map_err(Error::send_msg("Git Write Request"))?;
Ok(WriteInstruction::Skip)
}
.boxed()
}
pub fn not_available(req: WriteRequest, _state: &WriteCallbackState) -> BoxFuture<Result<WriteInstruction>> {
async move { Ok(WriteInstruction::DoWrite(req)) }.boxed()
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/stage/report/mod.rs | criner/src/engine/stage/report/mod.rs | use crate::{
engine::report,
persistence::{self, new_key_value_query_old_to_new_filtered, TableAccess},
utils::check,
{Error, Result},
};
use futures_util::FutureExt;
use std::{path::PathBuf, time::SystemTime};
mod git;
pub async fn generate(
db: persistence::Db,
mut progress: prodash::tree::Item,
assets_dir: PathBuf,
glob: Option<String>,
deadline: Option<SystemTime>,
cpu_o_bound_processors: u32,
) -> Result<()> {
use report::generic::Generator;
let krates = db.open_crates()?;
let output_dir = assets_dir
.parent()
.expect("assets directory to be in criner.db")
.join("reports");
let glob_str = glob.as_deref();
let num_crates = krates.count_filtered(glob_str) as usize;
let chunk_size = 500.min(num_crates);
if chunk_size == 0 {
return Ok(());
}
progress.init(Some(num_crates), Some("crates".into()));
let (processors, rx_result) = {
let (tx_task, rx_task) = async_channel::bounded(1);
let (tx_result, rx_result) = async_channel::bounded(cpu_o_bound_processors as usize * 2);
for _ in 0..cpu_o_bound_processors {
let task = rx_task.clone();
let result = tx_result.clone();
crate::spawn(blocking::unblock(move || {
futures_lite::future::block_on(async move {
while let Ok(f) = task.recv().await {
result.send(f.await).await.map_err(Error::send_msg("send CPU result"))?;
}
Ok::<_, Error>(())
})
}))
.detach();
}
(tx_task, rx_result)
};
let waste_report_dir = output_dir.join(report::waste::Generator::name());
blocking::unblock({
let dir = waste_report_dir.clone();
move || std::fs::create_dir_all(dir)
})
.await?;
use crate::engine::report::generic::WriteCallback;
let (cache_dir, (git_handle, git_state, maybe_join_handle)) = match glob.as_ref() {
Some(_) => (None, (git::not_available as WriteCallback, None, None)),
None => {
let cd = waste_report_dir.join("__incremental_cache__");
blocking::unblock({
let cd = cd.clone();
move || std::fs::create_dir_all(cd)
})
.await?;
(
Some(cd),
git::select_callback(cpu_o_bound_processors, &waste_report_dir, progress.add_child("git")),
)
}
};
let merge_reports = crate::spawn({
let merge_progress = progress.add_child("report aggregator");
merge_progress.init(Some(num_crates / chunk_size), Some("Reports".into()));
report::waste::Generator::merge_reports(
waste_report_dir.clone(),
cache_dir.clone(),
merge_progress,
rx_result,
git_handle,
git_state.clone(),
)
.map(|_| ())
.boxed()
});
let mut fetched_crates = 0;
let mut chunk = Vec::<(String, Vec<u8>)>::with_capacity(chunk_size as usize);
let mut cid = 0;
loop {
let abort_loop = {
progress.blocked("fetching chunk of crates to schedule", None);
let connection = db.open_connection_no_async_with_busy_wait()?;
let mut statement = new_key_value_query_old_to_new_filtered(
persistence::CrateTable::table_name(),
glob_str,
&connection,
Some((fetched_crates, chunk_size as usize)),
)?;
chunk.clear();
chunk.extend(
statement
.query_map([], |r| Ok((r.get(0)?, r.get(1)?)))?
.filter_map(|r| r.ok()),
);
fetched_crates += chunk.len();
chunk.len() != chunk_size as usize
};
cid += 1;
check(deadline)?;
progress.set(cid * chunk_size);
progress.halted("write crate report", None);
processors
.send(report::waste::Generator::write_files(
db.clone(),
waste_report_dir.clone(),
cache_dir.clone(),
chunk,
progress.add_child(""),
git_handle,
git_state.clone(),
))
.await
.map_err(Error::send_msg("Chunk of files to write"))?;
chunk = Vec::with_capacity(chunk_size as usize);
if abort_loop {
break;
}
}
drop(git_state);
drop(processors);
progress.set(num_crates);
merge_reports.await;
progress.done("Generating and merging waste report done");
if let Some(handle) = maybe_join_handle {
progress.blocked("waiting for git to finish", None);
if handle.join().is_err() {
progress.fail("git failed with unknown error");
}
};
Ok(())
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/mod.rs | criner/src/engine/report/mod.rs | pub mod generic;
pub mod waste;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/generic.rs | criner/src/engine/report/generic.rs | use crate::persistence::{CrateVersionTable, TableAccess};
use crate::{
error::Result,
model, persistence,
persistence::{new_key_insertion, ReportsTree},
};
use async_trait::async_trait;
use rusqlite::{params, TransactionBehavior};
use std::path::{Path, PathBuf};
fn all_but_recently_yanked(
crate_name: &str,
versions: &[String],
table: &CrateVersionTable,
key_buf: &mut String,
) -> Result<usize> {
let mut num_yanked = 0;
for version in versions.iter().rev() {
key_buf.clear();
model::CrateVersion::key_from(crate_name, version, key_buf);
let is_yanked = table
.get(&key_buf)?
.map(|v| v.kind == crate::model::ChangeKind::Yanked)
.unwrap_or(true);
if is_yanked {
num_yanked += 1;
} else {
break;
}
}
Ok(versions.len() - num_yanked)
}
#[derive(Clone)]
pub struct WriteRequest {
pub path: PathBuf,
pub content: Vec<u8>,
}
#[derive(Clone)]
pub enum WriteInstruction {
Skip,
DoWrite(WriteRequest),
}
pub type WriteCallbackState = Option<async_channel::Sender<WriteRequest>>;
pub type WriteCallback =
fn(WriteRequest, &WriteCallbackState) -> futures_util::future::BoxFuture<Result<WriteInstruction>>;
#[async_trait]
pub trait Aggregate
where
Self: Sized,
{
fn merge(self, other: Self) -> Self;
async fn complete(&mut self, progress: &mut prodash::tree::Item, out: &mut Vec<u8>) -> Result<()>;
async fn load_previous_state(&self, out_dir: &Path, progress: &mut prodash::tree::Item) -> Option<Self>;
async fn load_previous_top_level_state(out_dir: &Path, progress: &mut prodash::tree::Item) -> Option<Self>;
async fn store_current_state(&self, out_dir: &Path, progress: &mut prodash::tree::Item) -> Result<()>;
}
#[async_trait]
pub trait Generator {
type Report: Aggregate + Send + Sync + Clone;
type DBResult: Send;
fn name() -> &'static str;
fn version() -> &'static str;
fn fq_result_key(crate_name: &str, crate_version: &str, key_buf: &mut String);
fn fq_report_key(crate_name: &str, crate_version: &str, key_buf: &mut String) {
ReportsTree::key_buf(crate_name, crate_version, Self::name(), Self::version(), key_buf);
}
fn get_result(
connection: persistence::ThreadSafeConnection,
crate_name: &str,
crate_version: &str,
key_buf: &mut String,
) -> Result<Option<Self::DBResult>>;
async fn merge_reports(
out_dir: PathBuf,
cache_dir: Option<PathBuf>,
mut progress: prodash::tree::Item,
reports: async_channel::Receiver<Result<Option<Self::Report>>>,
write: WriteCallback,
write_state: WriteCallbackState,
) -> Result<()> {
let mut report = None::<Self::Report>;
let mut count = 0;
while let Ok(result) = reports.recv().await {
count += 1;
progress.set(count);
match result {
Ok(Some(new_report)) => {
report = Some(match report {
Some(report) => report.merge(new_report),
None => new_report,
})
}
Ok(None) => {}
Err(err) => {
progress.fail(format!("report failed: {}", err));
}
};
}
if let Some(mut report) = report {
let previous_report = match cache_dir.as_ref() {
Some(cd) => match Self::Report::load_previous_top_level_state(cd, &mut progress).await {
Some(r) => Some(r),
None => report.load_previous_state(cd, &mut progress).await,
},
None => None,
};
report = match previous_report {
Some(previous_report) => previous_report.merge(report),
None => report,
};
{
complete_and_write_report(
&mut report,
Vec::new(),
&mut progress,
out_dir.join("index.html"),
write,
&write_state,
)
.await?;
}
if let Some(cd) = cache_dir {
report.store_current_state(&cd, &mut progress).await?;
}
}
Ok(())
}
async fn generate_report(
crate_name: &str,
crate_version: &str,
result: Self::DBResult,
progress: &mut prodash::tree::Item,
) -> Result<Self::Report>;
async fn write_files(
db: persistence::Db,
out_dir: PathBuf,
cache_dir: Option<PathBuf>,
krates: Vec<(String, Vec<u8>)>,
mut progress: prodash::tree::Item,
write: WriteCallback,
write_state: WriteCallbackState,
) -> Result<Option<Self::Report>> {
let mut chunk_report = None::<Self::Report>;
let crate_versions = db.open_crate_versions()?;
let mut reports_to_mark_done = Vec::new();
let mut out_buf = Vec::new();
{
let connection = db.open_connection()?;
let reports = db.open_reports()?;
let mut key_buf = String::with_capacity(32);
// delaying writes works because we don't have overlap on work
for (name, krate) in krates.into_iter() {
let c: model::Crate = krate.as_slice().into();
let crate_dir = crate_dir(&out_dir, &name);
progress.init(Some(c.versions.len()), Some("versions".into()));
progress.set_name(&name);
let mut crate_report = None::<Self::Report>;
for version in c.versions.iter().take(all_but_recently_yanked(
&name,
&c.versions,
&crate_versions,
&mut key_buf,
)?) {
progress.inc();
key_buf.clear();
Self::fq_report_key(&name, version, &mut key_buf);
// If we have no cache, assume we are globbed (yes, I know…sigh), so always produce reports
// but don't invalidate data in caches by reading or writing them. Mostly used for testing
// as it creates a sub-report, every time without having to fiddle with the
// reports_done marker table.
if cache_dir.is_none() || !reports.is_done(&key_buf) {
let reports_key = key_buf.clone();
key_buf.clear();
if let Some(result) = Self::get_result(connection.clone(), &name, version, &mut key_buf)? {
let mut version_report =
Self::generate_report(&name, version, result, &mut progress).await?;
out_buf = complete_and_write_report(
&mut version_report,
out_buf,
&mut progress,
version_html_path(&crate_dir, version),
write,
&write_state,
)
.await?;
crate_report = Some(match crate_report {
Some(crate_report) => crate_report.merge(version_report),
None => version_report,
});
reports_to_mark_done.push(reports_key);
}
}
}
if let Some(mut crate_report) = crate_report {
let previous_state = match cache_dir.as_ref() {
Some(cd) => crate_report.load_previous_state(cd, &mut progress).await,
None => None,
};
match previous_state {
Some(previous_state) => {
let mut absolute_state = previous_state.merge(crate_report.clone());
out_buf = complete_and_write_report(
&mut absolute_state,
out_buf,
&mut progress,
crate_html_path(&crate_dir),
write,
&write_state,
)
.await?;
if let Some(cd) = cache_dir.as_ref() {
absolute_state.store_current_state(cd, &mut progress).await?;
};
}
None => {
out_buf = complete_and_write_report(
&mut crate_report,
out_buf,
&mut progress,
crate_html_path(&crate_dir),
write,
&write_state,
)
.await?;
if let Some(cd) = cache_dir.as_ref() {
crate_report.store_current_state(cd, &mut progress).await?;
}
}
}
chunk_report = Some(match chunk_report {
Some(chunk_report) => chunk_report.merge(crate_report),
None => crate_report,
});
}
}
}
if !reports_to_mark_done.is_empty() {
let mut connection = db.open_connection_no_async_with_busy_wait()?;
progress.blocked("wait for write lock", None);
let transaction = connection.transaction_with_behavior(TransactionBehavior::Immediate)?;
progress.init(
Some(reports_to_mark_done.len()),
Some("report done markers written".into()),
);
{
let mut statement = new_key_insertion(ReportsTree::table_name(), &transaction)?;
for key in reports_to_mark_done.iter() {
statement.execute(params![key])?;
progress.inc();
}
}
transaction.commit()?;
}
Ok(chunk_report)
}
}
fn crate_dir(base: &Path, crate_name: &str) -> PathBuf {
base.join(crate_name)
}
fn version_html_path(crate_dir: &Path, version: &str) -> PathBuf {
crate_dir.join(format!("{}.html", version))
}
fn crate_html_path(crate_dir: &Path) -> PathBuf {
crate_dir.join("index.html")
}
async fn complete_and_write_report(
report: &mut impl Aggregate,
mut out: Vec<u8>,
progress: &mut prodash::tree::Item,
path: impl AsRef<Path>,
write: WriteCallback,
write_state: &WriteCallbackState,
) -> Result<Vec<u8>> {
out.clear();
report.complete(progress, &mut out).await?;
progress.blocked("sending report to writer", None);
match write(
WriteRequest {
path: path.as_ref().to_path_buf(),
content: out,
},
write_state,
)
.await?
{
WriteInstruction::DoWrite(WriteRequest { path, content }) => {
blocking::unblock({
let path = path.clone();
move || std::fs::create_dir_all(path.parent().expect("file path with parent directory"))
})
.await?;
progress.halted("writing report to disk", None);
let content = blocking::unblock(move || std::fs::write(path, &content).map(|_| content)).await?;
Ok(content)
}
WriteInstruction::Skip => Ok(Vec::new()),
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/waste/merge.rs | criner/src/engine/report/waste/merge.rs | use super::{AggregateFileInfo, AggregateVersionInfo, Dict, Fix, Report, VersionInfo, WastedFile};
use crate::Result;
use async_trait::async_trait;
use criner_waste_report::{add_optional_aggregate, html::NO_EXT_MARKER};
use std::{
collections::BTreeMap,
ops::AddAssign,
path::{Path, PathBuf},
};
const TOP_LEVEL_REPORT_NAME: &str = "__top-level-report__";
fn path_from_prefix(out_dir: &Path, prefix: &str) -> PathBuf {
use crate::engine::report::generic::Generator;
out_dir.join(format!(
"{}-{}-{}.rmp",
prefix,
super::Generator::name(),
super::Generator::version()
))
}
fn path_to_storage_location(report: &Report, out_dir: &Path) -> PathBuf {
use Report::*;
let prefix = match report {
Version { crate_name, .. } | Crate { crate_name, .. } => crate_name.as_str(),
CrateCollection { .. } => TOP_LEVEL_REPORT_NAME,
};
path_from_prefix(out_dir, prefix)
}
pub fn vec_into_map_by_extension(initial: Dict<AggregateFileInfo>, from: Vec<WastedFile>) -> Dict<AggregateFileInfo> {
from.into_iter().fold(initial, |mut m, e| {
let entry = m
.entry(
PathBuf::from(e.0)
.extension()
.and_then(|oss| oss.to_str().map(|s| s.to_string()))
.unwrap_or_else(|| NO_EXT_MARKER.to_string()),
)
.or_insert_with(Default::default);
entry.total_bytes += e.1;
entry.total_files += 1;
m
})
}
pub fn fix_to_wasted_files_aggregate(fix: Option<Fix>) -> Option<AggregateFileInfo> {
match fix.unwrap_or(Fix::RemoveExclude) {
Fix::ImprovedInclude {
potential: Some(potential),
..
} => Some(potential.potential_waste),
_ => None,
}
.map(|v| {
v.into_iter().fold(AggregateFileInfo::default(), |mut a, e| {
a.total_files += 1;
a.total_bytes += e.size;
a
})
})
}
pub fn into_map_by_extension(from: Vec<WastedFile>) -> Dict<AggregateFileInfo> {
vec_into_map_by_extension(BTreeMap::new(), from)
}
pub fn map_into_map<T>(lhs: Dict<T>, rhs: Dict<T>) -> Dict<T>
where
T: std::ops::AddAssign + Default,
{
rhs.into_iter().fold(lhs, |mut m, (k, v)| {
let entry = m.entry(k).or_insert_with(Default::default);
entry.add_assign(v);
m
})
}
pub fn byte_count(files: &[WastedFile]) -> u64 {
files.iter().map(|e| e.1).sum::<u64>()
}
pub fn version_to_new_version_map(
crate_version: String,
total_size_in_bytes: u64,
total_files: u64,
wasted_files: &[WastedFile],
potential_gains: Option<AggregateFileInfo>,
) -> Dict<VersionInfo> {
let mut m = BTreeMap::new();
m.insert(
crate_version,
VersionInfo {
all: AggregateFileInfo {
total_bytes: total_size_in_bytes,
total_files,
},
waste: AggregateFileInfo {
total_bytes: byte_count(wasted_files),
total_files: wasted_files.len() as u64,
},
potential_gains,
waste_latest_version: None,
},
);
m
}
pub fn crate_collection_info_from_version_info(
crate_name: String,
info_by_version: Dict<VersionInfo>,
) -> Dict<AggregateVersionInfo> {
let (_, v) = info_by_version.into_iter().fold(
(String::new(), AggregateVersionInfo::default()),
|(mut previous_name, mut a), (version_name, v)| {
let VersionInfo {
waste,
all,
potential_gains,
waste_latest_version: _unused_and_always_none,
} = v;
a.waste.add_assign(waste.clone());
a.all.add_assign(all);
a.potential_gains = add_optional_aggregate(a.potential_gains.clone(), potential_gains);
a.waste_latest_version = if version_name > previous_name {
previous_name = version_name.clone();
Some((version_name, waste))
} else {
a.waste_latest_version
};
(previous_name, a)
},
);
let mut m = BTreeMap::new();
m.insert(crate_name, v);
m
}
pub fn collection_from_crate(
crate_name: String,
total_size_in_bytes: u64,
total_files: u64,
info_by_version: Dict<VersionInfo>,
wasted_by_extension: Dict<AggregateFileInfo>,
) -> Report {
Report::CrateCollection {
total_size_in_bytes,
total_files,
info_by_crate: crate_collection_info_from_version_info(crate_name, info_by_version),
wasted_by_extension,
}
}
pub fn crate_from_version(version: Report) -> Report {
match version {
Report::Version {
crate_name,
crate_version,
total_size_in_bytes,
total_files,
wasted_files,
suggested_fix,
} => Report::Crate {
crate_name,
info_by_version: version_to_new_version_map(
crate_version,
total_size_in_bytes,
total_files,
&wasted_files,
fix_to_wasted_files_aggregate(suggested_fix),
),
total_size_in_bytes,
total_files,
wasted_by_extension: into_map_by_extension(wasted_files),
},
_ => unreachable!("must only be called with version variant"),
}
}
#[async_trait]
impl crate::engine::report::generic::Aggregate for Report {
fn merge(self, other: Self) -> Self {
use Report::*;
match (self, other) {
(lhs @ Version { .. }, rhs @ Version { .. }) => crate_from_version(lhs).merge(rhs),
(version @ Version { .. }, krate @ Crate { .. }) => krate.merge(version),
(version @ Version { .. }, collection @ CrateCollection { .. }) => collection.merge(version),
(collection @ CrateCollection { .. }, version @ Version { .. }) => {
collection.merge(crate_from_version(version))
}
(krate @ Crate { .. }, collection @ CrateCollection { .. }) => collection.merge(krate),
(
Crate {
crate_name: lhs_crate_name,
total_size_in_bytes: lhs_tsb,
total_files: lhs_tf,
info_by_version,
wasted_by_extension,
},
Version {
crate_name: rhs_crate_name,
crate_version,
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
wasted_files,
suggested_fix,
},
) => {
if lhs_crate_name == rhs_crate_name {
Crate {
crate_name: lhs_crate_name,
total_size_in_bytes: lhs_tsb + rhs_tsb,
total_files: lhs_tf + rhs_tf,
info_by_version: map_into_map(
info_by_version,
version_to_new_version_map(
crate_version,
rhs_tsb,
rhs_tf,
&wasted_files,
fix_to_wasted_files_aggregate(suggested_fix),
),
),
wasted_by_extension: vec_into_map_by_extension(wasted_by_extension, wasted_files),
}
} else {
collection_from_crate(lhs_crate_name, lhs_tsb, lhs_tf, info_by_version, wasted_by_extension).merge(
Version {
crate_name: rhs_crate_name,
crate_version,
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
wasted_files,
suggested_fix,
},
)
}
}
(
Crate {
crate_name: lhs_crate_name,
total_size_in_bytes: lhs_tsb,
total_files: lhs_tf,
info_by_version: lhs_ibv,
wasted_by_extension: lhs_wbe,
},
Crate {
crate_name: rhs_crate_name,
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
info_by_version: rhs_ibv,
wasted_by_extension: rhs_wbe,
},
) => {
if lhs_crate_name != rhs_crate_name {
collection_from_crate(lhs_crate_name, lhs_tsb, lhs_tf, lhs_ibv, lhs_wbe).merge(Crate {
crate_name: rhs_crate_name,
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
info_by_version: rhs_ibv,
wasted_by_extension: rhs_wbe,
})
} else {
Crate {
crate_name: lhs_crate_name,
total_size_in_bytes: lhs_tsb + rhs_tsb,
total_files: lhs_tf + rhs_tf,
info_by_version: map_into_map(lhs_ibv, rhs_ibv),
wasted_by_extension: map_into_map(lhs_wbe, rhs_wbe),
}
}
}
(
CrateCollection {
total_size_in_bytes: lhs_tsb,
total_files: lhs_tf,
info_by_crate: lhs_ibc,
wasted_by_extension: lhs_wbe,
},
CrateCollection {
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
info_by_crate: rhs_ibc,
wasted_by_extension: rhs_wbe,
},
) => CrateCollection {
total_size_in_bytes: lhs_tsb + rhs_tsb,
total_files: lhs_tf + rhs_tf,
info_by_crate: map_into_map(lhs_ibc, rhs_ibc),
wasted_by_extension: map_into_map(lhs_wbe, rhs_wbe),
},
(
CrateCollection {
total_size_in_bytes: lhs_tsb,
total_files: lhs_tf,
info_by_crate,
wasted_by_extension: lhs_wbe,
},
Crate {
crate_name,
total_size_in_bytes: rhs_tsb,
total_files: rhs_tf,
info_by_version,
wasted_by_extension: rhs_wbe,
},
) => CrateCollection {
total_size_in_bytes: lhs_tsb + rhs_tsb,
total_files: lhs_tf + rhs_tf,
wasted_by_extension: map_into_map(lhs_wbe, rhs_wbe),
info_by_crate: map_into_map(
info_by_crate,
crate_collection_info_from_version_info(crate_name, info_by_version),
),
},
}
}
async fn complete(&mut self, _progress: &mut prodash::tree::Item, out: &mut Vec<u8>) -> Result<()> {
use horrorshow::Template;
let report = self.clone();
report.write_to_io(out)?;
Ok(())
}
async fn load_previous_top_level_state(out_dir: &Path, progress: &mut prodash::tree::Item) -> Option<Self> {
let path = path_from_prefix(out_dir, TOP_LEVEL_REPORT_NAME);
progress.blocked("loading previous top-level waste report from disk", None);
blocking::unblock(move || std::fs::read(path))
.await
.ok()
.and_then(|v| rmp_serde::from_read(v.as_slice()).ok())
}
async fn load_previous_state(&self, out_dir: &Path, progress: &mut prodash::tree::Item) -> Option<Self> {
let path = path_to_storage_location(self, out_dir);
progress.blocked("loading previous waste report from disk", None);
blocking::unblock(move || std::fs::read(path))
.await
.ok()
.and_then(|v| rmp_serde::from_read(v.as_slice()).ok())
}
async fn store_current_state(&self, out_dir: &Path, progress: &mut prodash::tree::Item) -> Result<()> {
let path = path_to_storage_location(self, out_dir);
progress.blocked("storing current waste report to disk", None);
let data = rmp_serde::to_vec(self)?;
blocking::unblock(move || std::fs::write(path, data))
.await
.map_err(Into::into)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/waste/mod.rs | criner/src/engine/report/waste/mod.rs | use crate::persistence::TableAccess;
use crate::{error::Result, model::TaskResult, persistence};
use async_trait::async_trait;
pub use criner_waste_report::*;
mod merge;
pub struct Generator;
// NOTE: When multiple reports should be combined, this must become a compound generator which combines
// multiple implementations into one, statically.
#[async_trait]
impl super::generic::Generator for Generator {
type Report = Report;
type DBResult = TaskResult;
fn name() -> &'static str {
"waste"
}
fn version() -> &'static str {
"1.0.0"
}
fn fq_result_key(crate_name: &str, crate_version: &str, key_buf: &mut String) {
let dummy_task = crate::engine::work::cpubound::default_persisted_extraction_task();
let dummy_result = TaskResult::ExplodedCrate {
entries_meta_data: Default::default(),
selected_entries: Default::default(),
};
dummy_result.fq_key(crate_name, crate_version, &dummy_task, key_buf);
}
fn get_result(
connection: persistence::ThreadSafeConnection,
crate_name: &str,
crate_version: &str,
key_buf: &mut String,
) -> Result<Option<TaskResult>> {
Self::fq_result_key(crate_name, crate_version, key_buf);
let table = persistence::TaskResultTable { inner: connection };
table.get(&key_buf)
}
async fn generate_report(
crate_name: &str,
crate_version: &str,
result: TaskResult,
_progress: &mut prodash::tree::Item,
) -> Result<Self::Report> {
Ok(match result {
TaskResult::ExplodedCrate {
entries_meta_data,
selected_entries,
} => Report::from_package(
crate_name,
crate_version,
TarPackage {
entries_meta_data,
entries: selected_entries,
},
),
_ => unreachable!("caller must assure we are always an exploded entry"),
})
}
}
#[cfg(test)]
mod report_test;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/waste/report_test/merge.rs | criner/src/engine/report/waste/report_test/merge.rs | use crate::{
engine::report::generic::Aggregate,
engine::report::waste::{AggregateFileInfo, Fix, PotentialWaste, Report, VersionInfo},
model::TarHeader,
};
use common_macros::b_tree_map;
use std::collections::BTreeMap;
#[test]
fn crate_merging_version_equivalent_to_version_merging_crate() {
let version = Report::Version {
crate_name: "a".into(),
crate_version: "1".into(),
total_size_in_bytes: 1,
total_files: 4,
wasted_files: vec![("a.a".into(), 20)],
suggested_fix: Some(Fix::RemoveExclude),
};
let krate = Report::Crate {
crate_name: "a".into(),
total_size_in_bytes: 3,
total_files: 9,
info_by_version: BTreeMap::new(),
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 2, total_bytes: 60},
"b".into() => AggregateFileInfo {total_files: 3, total_bytes: 80},
"c".into() => AggregateFileInfo {total_files: 1, total_bytes: 90},
},
};
assert_eq!(version.clone().merge(krate.clone()), krate.merge(version));
}
#[test]
fn crate_and_crate_of_different_name() {
assert_eq!(
Report::Crate {
crate_name: "a".into(),
total_size_in_bytes: 3,
total_files: 9,
info_by_version: b_tree_map! {
"1".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains: Some(AggregateFileInfo {
total_bytes: 2,
total_files: 8
}),
waste_latest_version: None,
},
"2".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains: None,
waste_latest_version: None,
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
"b".into() => AggregateFileInfo {total_files: 2, total_bytes: 20},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
},
}
.merge(Report::Crate {
crate_name: "b".into(),
total_size_in_bytes: 9,
total_files: 3,
info_by_version: b_tree_map! {
"2".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: None,
waste_latest_version: None,
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"b".into() => AggregateFileInfo {total_files: 2, total_bytes: 20},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
},
}),
Report::CrateCollection {
total_size_in_bytes: 12,
total_files: 12,
info_by_crate: b_tree_map! {
"a".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4*2, total_bytes: 1*2},
waste: AggregateFileInfo { total_files: 3*2, total_bytes: 50*2},
potential_gains: Some(AggregateFileInfo {
total_bytes: 2,
total_files: 8
}),
waste_latest_version: Some(("2".into(), AggregateFileInfo { total_files: 3, total_bytes: 50 }))
},
"b".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: None,
waste_latest_version: Some(("2".into(), AggregateFileInfo { total_files: 6, total_bytes: 150 }))
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"b".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
},
}
);
}
#[test]
fn two_crate_collections() {
let lhs_collection = Report::CrateCollection {
total_size_in_bytes: 12,
total_files: 10,
info_by_crate: b_tree_map! {
"a".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1},
waste: AggregateFileInfo { total_files: 3, total_bytes: 50},
potential_gains: Some(AggregateFileInfo {
total_files: 5,
total_bytes: 10,
}),
waste_latest_version: Some(("3".into(), AggregateFileInfo { total_files: 1, total_bytes: 20},))
},
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"b".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
},
};
let rhs_collection = Report::CrateCollection {
total_size_in_bytes: 12,
total_files: 10,
info_by_crate: b_tree_map! {
"a".into() => VersionInfo {
all: AggregateFileInfo { total_files: 40, total_bytes: 10},
waste: AggregateFileInfo { total_files: 30, total_bytes: 500},
potential_gains: Some(AggregateFileInfo {
total_files: 50,
total_bytes: 100,
}),
waste_latest_version: Some(("4".into(), AggregateFileInfo { total_files: 2, total_bytes: 40}))
},
"b".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: None,
waste_latest_version: Some(("1".into(), AggregateFileInfo { total_files: 3, total_bytes: 50}))
},
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"b".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
"e".into() => AggregateFileInfo {total_files: 4, total_bytes: 2},
},
};
assert_eq!(
lhs_collection.merge(rhs_collection),
Report::CrateCollection {
total_size_in_bytes: 24,
total_files: 20,
info_by_crate: b_tree_map! {
"a".into() => VersionInfo {
all: AggregateFileInfo { total_files: 40+4, total_bytes: 10 +1},
waste: AggregateFileInfo { total_files: 30+3, total_bytes: 500+50},
potential_gains: Some(AggregateFileInfo {
total_files: 55,
total_bytes: 110
}),
waste_latest_version: Some(("4".into(), AggregateFileInfo { total_files: 2, total_bytes: 40}))
},
"b".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: None,
waste_latest_version: Some(("1".into(), AggregateFileInfo { total_files: 3, total_bytes: 50}))
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 4*2, total_bytes: 40*2},
"b".into() => AggregateFileInfo {total_files: 4*2, total_bytes: 40*2},
"c".into() => AggregateFileInfo {total_files: 3*2, total_bytes: 30*2},
"d".into() => AggregateFileInfo {total_files: 1*2, total_bytes: 10*2},
"e".into() => AggregateFileInfo {total_files: 4, total_bytes: 2},
},
}
);
}
#[test]
fn crate_and_crate_of_same_name() {
assert_eq!(
Report::Crate {
crate_name: "a".into(),
total_size_in_bytes: 3,
total_files: 9,
info_by_version: b_tree_map! {
"1".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains: Some(AggregateFileInfo {
total_files: 50,
total_bytes: 100
}),
waste_latest_version: None,
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
"b".into() => AggregateFileInfo {total_files: 2, total_bytes: 20},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
},
}
.merge(Report::Crate {
crate_name: "a".into(),
total_size_in_bytes: 9,
total_files: 3,
info_by_version: b_tree_map! {
"2".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: Some(AggregateFileInfo {
total_files: 5,
total_bytes: 10
}),
waste_latest_version: None,
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"b".into() => AggregateFileInfo {total_files: 2, total_bytes: 20},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
},
}),
Report::Crate {
crate_name: "a".to_string(),
total_size_in_bytes: 12,
total_files: 12,
info_by_version: b_tree_map! {
"1".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains:Some(AggregateFileInfo {
total_files: 50,
total_bytes: 100
}),
waste_latest_version: None,
},
"2".into() => VersionInfo {
all: AggregateFileInfo { total_files: 8, total_bytes: 10 },
waste: AggregateFileInfo { total_files: 6, total_bytes: 150 },
potential_gains: Some(AggregateFileInfo {
total_files: 5,
total_bytes: 10
}),
waste_latest_version: None,
}
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"b".into() => AggregateFileInfo {total_files: 4, total_bytes: 40},
"c".into() => AggregateFileInfo {total_files: 3, total_bytes: 30},
"d".into() => AggregateFileInfo {total_files: 1, total_bytes: 10},
},
}
);
}
#[test]
fn two_versions_of_same_crate() {
assert_eq!(
Report::Version {
crate_name: "a".into(),
crate_version: "1".into(),
total_size_in_bytes: 1,
total_files: 4,
wasted_files: vec![("a.a".into(), 20), ("b/a.b".into(), 20), ("c/a.b".into(), 10)],
suggested_fix: Some(Fix::ImprovedInclude {
include: vec![],
include_removed: vec![],
potential: Some(PotentialWaste {
patterns_to_fix: vec![],
potential_waste: vec![TarHeader {
path: (&b"a/d.c"[..]).into(),
size: 10,
entry_type: 0
}]
}),
has_build_script: false
})
}
.merge(Report::Version {
crate_name: "a".into(),
crate_version: "2".into(),
total_size_in_bytes: 2,
total_files: 5,
wasted_files: vec![("a.a".into(), 40), ("c/a.b".into(), 50), ("d/a.c".into(), 90)],
suggested_fix: Some(Fix::ImprovedInclude {
include: vec![],
include_removed: vec![],
potential: Some(PotentialWaste {
patterns_to_fix: vec![],
potential_waste: vec![TarHeader {
path: (&b"a/d.c"[..]).into(),
size: 100,
entry_type: 0
}]
}),
has_build_script: false
})
}),
Report::Crate {
crate_name: "a".into(),
total_size_in_bytes: 3,
total_files: 9,
info_by_version: b_tree_map! {
"1".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains: Some(AggregateFileInfo {total_files: 1, total_bytes: 10}),
waste_latest_version: None,
},
"2".into() => VersionInfo {
all: AggregateFileInfo { total_files: 5, total_bytes: 2 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 180 },
potential_gains: Some(AggregateFileInfo {total_files: 1, total_bytes: 100}),
waste_latest_version: None,
},
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 2, total_bytes: 60},
"b".into() => AggregateFileInfo {total_files: 3, total_bytes: 80},
"c".into() => AggregateFileInfo {total_files: 1, total_bytes: 90},
},
}
);
}
#[test]
fn two_versions_of_different_crate() {
assert_eq!(
Report::Version {
crate_name: "a".into(),
crate_version: "1".into(),
total_size_in_bytes: 1,
total_files: 4,
wasted_files: vec![("a.a".into(), 20), ("b/a.b".into(), 20), ("c/a.b".into(), 10)],
suggested_fix: Some(Fix::ImprovedInclude {
include: vec![],
include_removed: vec![],
potential: Some(PotentialWaste {
patterns_to_fix: vec![],
potential_waste: vec![TarHeader {
path: (&b"a/b.c"[..]).into(),
size: 10,
entry_type: 0
}]
}),
has_build_script: false
})
}
.merge(Report::Version {
crate_name: "b".into(),
crate_version: "1".into(),
total_size_in_bytes: 2,
total_files: 5,
wasted_files: vec![("a.a".into(), 40), ("c/a.b".into(), 50), ("d/a.c".into(), 90)],
suggested_fix: Some(Fix::ImprovedInclude {
include: vec![],
include_removed: vec![],
potential: Some(PotentialWaste {
patterns_to_fix: vec![],
potential_waste: vec![TarHeader {
path: (&b"a/d.c"[..]).into(),
size: 100,
entry_type: 0
}]
}),
has_build_script: false
})
}),
Report::CrateCollection {
total_size_in_bytes: 3,
total_files: 9,
info_by_crate: b_tree_map! {
"a".into() => VersionInfo {
all: AggregateFileInfo { total_files: 4, total_bytes: 1 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 50 },
potential_gains: Some(AggregateFileInfo{total_files: 1, total_bytes: 10}),
waste_latest_version: Some(("1".into(), AggregateFileInfo { total_files: 3, total_bytes: 50 }))
},
"b".into() => VersionInfo {
all: AggregateFileInfo { total_files: 5, total_bytes: 2 },
waste: AggregateFileInfo { total_files: 3, total_bytes: 180 },
potential_gains: Some(AggregateFileInfo{total_files: 1, total_bytes: 100}),
waste_latest_version: Some(("1".into(), AggregateFileInfo { total_files: 3, total_bytes: 180 }))
},
},
wasted_by_extension: b_tree_map! {
"a".into() => AggregateFileInfo {total_files: 2, total_bytes: 60},
"b".into() => AggregateFileInfo {total_files: 3, total_bytes: 80},
"c".into() => AggregateFileInfo {total_files: 1, total_bytes: 90},
},
}
);
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner/src/engine/report/waste/report_test/mod.rs | criner/src/engine/report/waste/report_test/mod.rs | mod merge;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner-waste-report/src/html.rs | criner-waste-report/src/html.rs | use super::{AggregateFileInfo, Dict, Fix, Report, VersionInfo};
use bytesize::ByteSize;
use dia_semver::Semver;
use horrorshow::{box_html, helper::doctype, html, Render, RenderBox, RenderOnce, TemplateBuffer};
use std::time::SystemTime;
pub fn fix_to_wasted_files_aggregate(fix: Option<Fix>) -> Option<AggregateFileInfo> {
match fix.unwrap_or(Fix::RemoveExclude) {
Fix::ImprovedInclude {
potential: Some(potential),
..
} => Some(potential.potential_waste),
_ => None,
}
.map(|v| {
v.into_iter().fold(AggregateFileInfo::default(), |mut a, e| {
a.total_files += 1;
a.total_bytes += e.size;
a
})
})
}
pub const NO_EXT_MARKER: &str = "<NO_EXT>";
fn parse_semver(version: &str) -> Semver {
use std::str::FromStr;
Semver::from_str(version)
.or_else(|_| {
Semver::from_str(
&version[..version
.find('-')
.or_else(|| version.find('+'))
.expect("some prerelease version")],
)
})
.expect("semver parsing to work if violating prerelease versions are stripped")
}
// TODO: fix these unnecessary clones while maintaining composability
fn potential_savings(info_by_crate: &Dict<VersionInfo>) -> Option<AggregateFileInfo> {
let gains = info_by_crate
.iter()
.fold(AggregateFileInfo::default(), |mut s, (_, e)| {
s += e.potential_gains.clone().unwrap_or_default();
s
});
if gains.total_bytes > 0 {
Some(gains)
} else {
None
}
}
fn total_section(bytes: u64, files: u64) -> Box<dyn Render> {
box_html! {
section(id="total-bytes") {
h3: "total uncompressed bytes";
p: format!("{}", ByteSize(bytes))
}
section(id="total-files") {
h3: "total files";
p: files
}
}
}
fn savings_section(d: Option<AggregateFileInfo>) -> Box<dyn Render> {
box_html! {
@ if let Some(all) = d.as_ref() {
section(id="potential-savings") {
h3: "potential savings";
p: format!("{} total in {} files", ByteSize(all.total_bytes), all.total_files);
}
}
}
}
fn title_section(title: impl Into<String>) -> Box<dyn RenderBox> {
let title = title.into();
box_html! {
title: title
}
}
fn page_head(title: impl Into<String>) -> Box<dyn RenderBox> {
let title = title.into();
box_html! {
head {
title: title;
span(style="position: fixed; top: 1em; right: 1em; color: pink"): "Ugly Alpha 1";
}
}
}
fn info_section(name: String, info: VersionInfo) -> Box<dyn RenderBox> {
let VersionInfo {
all,
waste,
potential_gains,
waste_latest_version,
} = info;
box_html! {
section(id="child-total") {
h3: "Total";
p: format!("{} total in {} files", ByteSize(all.total_bytes), all.total_files);
}
section(id="child-waste") {
h3: "Waste in all versions";
p: format!("{} wasted in {} files", ByteSize(waste.total_bytes), waste.total_files);
}
@ if let Some((child_name, info)) = waste_latest_version {
section(id="child-waste-latest-version") {
h3 {
: "Waste in ";
a(href=format!("{}/{}.html", name, child_name)): child_name;
}
p: format!("{} wasted in {} files", ByteSize(info.total_bytes), info.total_files);
}
}
@ if let Some(gains) = potential_gains {
section(id="child-gains") {
h3: "Potential Gains";
p: format!("{} potentially gained in {} files", ByteSize(gains.total_bytes), gains.total_files);
}
}
}
}
fn page_footer() -> impl Render {
html! {
footer {
span {
: "Created by ";
a(href="https://github.com/Byron/"): "Byron";
}
: " | ";
span {
a(href="https://github.com/the-lean-crate/criner/issues/new?labels=bug&template=waste-report-incorrect.md&title=%5BINCORRECT+WASTE+REPORT%5D+%3CCrate+Name%3E",
target="_blank",
rel="noopener noreferrer"): "Provide feedback";
}
: " | ";
span {
: "Generated by ";
a(href="https://github.com/the-lean-crate/criner#the-criner-waste-report"): "Criner";
: format!(" at {}", humantime::format_rfc3339_seconds(SystemTime::now()));
}
}
}
}
#[derive(Clone, Copy)]
enum SortOrder {
Semver,
Waste,
}
fn child_items_section(
title: impl Into<String>,
info_by_child: Dict<VersionInfo>,
prefix: String,
suffix: impl Into<String>,
order: SortOrder,
) -> Box<dyn RenderBox> {
let title = title.into();
let suffix = suffix.into();
let mut sorted: Vec<_> = info_by_child.into_iter().collect();
sorted.sort_by(|(ln, le), (rn, re)| match order {
SortOrder::Semver => parse_semver(ln).cmp(&parse_semver(rn)),
SortOrder::Waste => match (&le.waste_latest_version, &re.waste_latest_version) {
(Some(le), Some(re)) => le.1.total_bytes.cmp(&re.1.total_bytes),
_ => le.waste.total_bytes.cmp(&re.waste.total_bytes),
},
});
box_html! {
section(id="children") {
h1: title;
ol {
@ for (name, info) in sorted.into_iter().rev() {
li {
h3 {
a(href=format!("{}{}{}", prefix, name, suffix)) {
: name.clone()
}
}
: info_section(name, info);
}
}
}
}
}
}
fn by_extension_section(wasted_by_extension: Dict<AggregateFileInfo>) -> Box<dyn RenderBox> {
let mut sorted: Vec<_> = wasted_by_extension.into_iter().collect();
sorted.sort_by_key(|(_, e)| e.total_bytes);
let top_list = 20;
let skip_info = if sorted.len() > top_list {
Some((
sorted.len() - top_list,
sorted
.iter()
.rev()
.skip(top_list)
.fold((0, 0), |(tf, tb), e| (tf + e.1.total_files, tb + e.1.total_bytes)),
))
} else {
None
};
box_html! {
section {
h1: "Waste by Extension";
ol {
@ for (name, info) in sorted.into_iter().rev().take(top_list) {
li {
h3 {
@ if name.ends_with(NO_EXT_MARKER) {
: "no extension"
} else {
: &format!("*.{}", name)
}
}
p: format!("{} waste in {} files", ByteSize(info.total_bytes), info.total_files);
}
}
}
@ if let Some((num_skipped, (tf, tb))) = skip_info {
p: format!("Skipped {} extensions totalling {} files and {}", num_skipped, tf, ByteSize(tb))
}
}
}
}
impl RenderOnce for Report {
fn render_once(self, tmpl: &mut TemplateBuffer<'_>)
where
Self: Sized,
{
use super::Report::*;
match self {
Version {
crate_name,
crate_version,
total_files,
total_size_in_bytes,
mut wasted_files,
suggested_fix,
} => {
wasted_files.sort_by_key(|(_, s)| *s);
let title = format!("{}:{}", crate_name, crate_version);
tmpl << html! {
: doctype::HTML;
html {
: page_head(title.clone());
body {
article {
: title_section(title);
: total_section(total_size_in_bytes, total_files);
: savings_section(fix_to_wasted_files_aggregate(suggested_fix.clone()));
@ if let Some(suggested_fix) = suggested_fix {
section {
h3: "Fix";
section {
|t| write!(t, "{:#?}", suggested_fix)
}
}
} else {
p: "Perfectly lean!"
}
@ if !wasted_files.is_empty() {
section {
h3: format!("{} wasted files", wasted_files.len());
p: format!("total waste: {}", ByteSize(wasted_files.iter().map(|(_, s)| *s).sum::<u64>()));
ol {
@ for (path, size) in wasted_files.into_iter().rev() {
li : format_args!("{} : {}", path, ByteSize(size))
}
}
}
}
}
}
: page_footer();
}
}
}
Crate {
crate_name,
total_size_in_bytes,
total_files,
info_by_version,
wasted_by_extension,
} => {
let gains = potential_savings(&info_by_version);
let no_prefix = String::new();
tmpl << html! {
: doctype::HTML;
html {
: page_head(crate_name.clone());
body {
article {
: title_section(crate_name.clone());
: total_section(total_size_in_bytes, total_files);
: savings_section(gains);
: by_extension_section(wasted_by_extension);
: child_items_section("Versions", info_by_version, no_prefix, ".html", SortOrder::Semver);
}
}
: page_footer();
}
}
}
CrateCollection {
total_size_in_bytes,
total_files,
info_by_crate,
wasted_by_extension,
} => {
let title = "Crates.io Waste Report";
let no_prefix = String::new();
let no_suffix = String::new();
let gains = potential_savings(&info_by_crate);
let (waste_in_bytes, wasted_files_count) = wasted_by_extension
.iter()
.fold((0, 0), |(waste_bytes, waste_files), e| {
(waste_bytes + e.1.total_bytes, waste_files + e.1.total_files)
});
tmpl << html! {
: doctype::HTML;
html {
: page_head(title);
body {
article {
: title_section(title);
: total_section(total_size_in_bytes, total_files);
section {
h3: format!("{} wasted in {} files", ByteSize(waste_in_bytes), wasted_files_count);
}
: savings_section(gains);
: by_extension_section(wasted_by_extension);
: child_items_section("Crates", info_by_crate, no_prefix, no_suffix, SortOrder::Waste);
}
}
: page_footer();
}
}
}
}
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner-waste-report/src/lib.rs | criner-waste-report/src/lib.rs | #![deny(unsafe_code)]
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "html")]
pub mod html;
pub mod result;
#[cfg(test)]
mod test;
use serde_derive::{Deserialize, Serialize};
use std::collections::BTreeMap;
pub use result::{globset_from_patterns, tar_path_to_utf8_str};
pub type Patterns = Vec<String>;
/// An entry in a tar archive, including the most important meta-data
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct TarHeader {
/// The normalized path of the entry. May not be unicode encoded.
pub path: Vec<u8>,
/// The size of the file in bytes
pub size: u64,
/// The type of entry, to be analyzed with tar::EntryType
pub entry_type: u8,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct TarPackage {
/// Meta data of all entries in the crate
pub entries_meta_data: Vec<TarHeader>,
/// The actual content of selected files, Cargo.*, build.rs and lib/main
/// IMPORTANT: This file may be partial and limited in size unless it is Cargo.toml, which
/// is always complete.
/// Note that these are also present in entries_meta_data.
pub entries: Vec<(TarHeader, Vec<u8>)>,
}
#[derive(PartialEq, Eq, Debug, Clone, Deserialize, Serialize)]
pub struct PotentialWaste {
pub patterns_to_fix: Patterns,
pub potential_waste: Vec<TarHeader>,
}
#[derive(PartialEq, Eq, Debug, Clone, Deserialize, Serialize)]
pub enum Fix {
ImprovedInclude {
include: Patterns,
include_removed: Patterns,
potential: Option<PotentialWaste>,
has_build_script: bool,
},
EnrichedExclude {
exclude: Patterns,
exclude_added: Patterns,
has_build_script: bool,
},
NewInclude {
include: Patterns,
has_build_script: bool,
},
RemoveExcludeAndUseInclude {
include_added: Patterns,
include: Patterns,
include_removed: Patterns,
},
RemoveExclude,
}
impl Fix {
pub fn merge(self, rhs: Option<PotentialWaste>, mut waste: Vec<TarHeader>) -> (Fix, Vec<TarHeader>) {
match (self, rhs) {
(
Fix::NewInclude {
mut include,
has_build_script,
},
Some(potential),
) => (
Fix::NewInclude {
has_build_script,
include: {
include.extend(potential.patterns_to_fix);
include
},
},
{
waste.extend(potential.potential_waste);
waste
},
),
(lhs, _) => (lhs, waste),
}
}
}
#[derive(Default, Deserialize)]
pub struct CargoConfig {
pub package: Option<PackageSection>,
pub lib: Option<SectionWithPath>,
pub bin: Option<Vec<SectionWithPath>>,
}
impl CargoConfig {
pub fn actual_or_expected_build_script_path(&self) -> &str {
self.build_script_path().unwrap_or("build.rs")
}
pub fn build_script_path(&self) -> Option<&str> {
self.package.as_ref().and_then(|p| p.build_script_path())
}
pub fn lib_path(&self) -> &str {
self.lib
.as_ref()
.and_then(|l| l.path.as_deref())
.unwrap_or("src/lib.rs")
}
pub fn bin_paths(&self) -> Vec<&str> {
self.bin
.as_ref()
.map(|l| l.iter().filter_map(|s| s.path.as_deref()).collect())
.unwrap_or_else(|| vec!["src/main.rs"])
}
}
impl From<&str> for CargoConfig {
fn from(v: &str) -> Self {
toml::from_str::<CargoConfig>(v).unwrap_or_default() // you would think all of them parse OK, but that's wrong :D
}
}
#[derive(Default, Deserialize)]
pub struct SectionWithPath {
pub path: Option<String>,
}
#[derive(Default, Deserialize)]
pub struct PackageSection {
pub include: Option<Patterns>,
pub exclude: Option<Patterns>,
pub build: Option<toml::value::Value>,
}
impl PackageSection {
pub fn build_script_path(&self) -> Option<&str> {
self.build.as_ref().and_then(|s| s.as_str())
}
}
pub type WastedFile = (String, u64);
#[derive(Default, Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
pub struct AggregateFileInfo {
pub total_bytes: u64,
pub total_files: u64,
}
impl std::ops::AddAssign for AggregateFileInfo {
fn add_assign(&mut self, rhs: Self) {
let Self {
total_bytes,
total_files,
} = rhs;
self.total_bytes += total_bytes;
self.total_files += total_files;
}
}
impl std::ops::AddAssign for VersionInfo {
fn add_assign(&mut self, rhs: Self) {
let Self {
all,
waste,
potential_gains,
waste_latest_version,
} = rhs;
self.all += all;
self.waste += waste;
self.potential_gains = add_optional_aggregate(self.potential_gains.clone(), potential_gains);
self.waste_latest_version =
add_named_optional_aggregate(self.waste_latest_version.clone(), waste_latest_version);
}
}
fn add_named_optional_aggregate(
lhs: Option<(String, AggregateFileInfo)>,
rhs: Option<(String, AggregateFileInfo)>,
) -> Option<(String, AggregateFileInfo)> {
Some(match (lhs, rhs) {
(Some((lhs_name, lhs)), Some((rhs_name, _))) if lhs_name > rhs_name => (lhs_name, lhs),
(Some(_), Some((rhs_name, rhs))) => (rhs_name, rhs),
(Some(v), None) => v,
(None, Some(v)) => v,
(None, None) => return None,
})
}
pub fn add_optional_aggregate(
lhs: Option<AggregateFileInfo>,
rhs: Option<AggregateFileInfo>,
) -> Option<AggregateFileInfo> {
Some(match (lhs, rhs) {
(Some(mut lhs), Some(rhs)) => {
lhs += rhs;
lhs
}
(Some(v), None) => v,
(None, Some(v)) => v,
(None, None) => return None,
})
}
#[derive(Default, Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
pub struct VersionInfo {
pub all: AggregateFileInfo,
pub waste: AggregateFileInfo,
pub waste_latest_version: Option<(String, AggregateFileInfo)>,
pub potential_gains: Option<AggregateFileInfo>,
}
pub type AggregateVersionInfo = VersionInfo;
pub type Dict<T> = BTreeMap<String, T>;
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
pub enum Report {
Version {
crate_name: String,
crate_version: String,
total_size_in_bytes: u64,
total_files: u64,
wasted_files: Vec<WastedFile>,
suggested_fix: Option<Fix>,
},
Crate {
crate_name: String,
total_size_in_bytes: u64,
total_files: u64,
info_by_version: Dict<VersionInfo>,
wasted_by_extension: Dict<AggregateFileInfo>,
},
CrateCollection {
total_size_in_bytes: u64,
total_files: u64,
info_by_crate: Dict<AggregateVersionInfo>,
wasted_by_extension: Dict<AggregateFileInfo>,
},
}
fn remove_implicit_entries(entries: &mut Vec<TarHeader>) {
entries.retain(|e| {
let p = tar_path_to_utf8_str(&e.path);
p != ".cargo_vcs_info.json" && p != "Cargo.toml.orig"
});
}
impl Report {
pub fn from_package(
crate_name: &str,
crate_version: &str,
TarPackage {
mut entries_meta_data,
entries,
}: TarPackage,
) -> Report {
remove_implicit_entries(&mut entries_meta_data);
let total_size_in_bytes = entries_meta_data.iter().map(|e| e.size).sum();
let total_files = entries_meta_data.len() as u64;
let cargo_config = Self::cargo_config_from_entries(&entries);
let (includes, excludes, compile_time_includes, build_script_name) =
Self::cargo_config_into_includes_excludes(cargo_config, &entries, &entries_meta_data);
let (suggested_fix, wasted_files) = match (includes, excludes, build_script_name, compile_time_includes) {
(Some(includes), Some(excludes), _presence_of_build_script_not_relevant, _) => {
Self::compute_includes_from_includes_and_excludes(entries_meta_data, includes, excludes)
}
(Some(includes), None, build_script_name, _) => {
Self::enrich_includes(entries_meta_data, includes, build_script_name.is_some())
}
(None, Some(excludes), build_script_name, compile_time_includes) => Self::enrich_excludes(
entries_meta_data,
excludes,
compile_time_includes,
build_script_name.is_some(),
),
(None, None, build_script_name, compile_time_includes) => {
Self::standard_includes(entries_meta_data, build_script_name, compile_time_includes)
}
};
let wasted_files = Self::convert_to_wasted_files(wasted_files);
Report::Version {
crate_name: crate_name.into(),
crate_version: crate_version.into(),
total_size_in_bytes,
total_files,
wasted_files,
suggested_fix,
}
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner-waste-report/src/result.rs | criner-waste-report/src/result.rs | use super::{CargoConfig, Fix, Patterns, PotentialWaste, Report, TarHeader, WastedFile};
use std::{collections::BTreeSet, path::Path, path::PathBuf};
lazy_static! {
static ref COMPILE_TIME_INCLUDE: regex::bytes::Regex =
regex::bytes::Regex::new(r##"include_(str|bytes)!\("(?P<include>.+?)"\)"##)
.expect("valid statically known regex");
static ref BUILD_SCRIPT_PATHS: regex::bytes::Regex =
regex::bytes::Regex::new(r##""cargo:rerun-if-changed=(?P<path>.+?)"|"(?P<path_like>.+?)""##)
.expect("valid statically known regex");
static ref STANDARD_EXCLUDES_GLOBSET: globset::GlobSet = globset_from_patterns(standard_exclude_patterns());
static ref STANDARD_EXCLUDE_MATCHERS: Vec<(&'static str, globset::GlobMatcher)> = standard_exclude_patterns()
.iter()
.cloned()
.map(|p| (p, make_glob(p).compile_matcher()))
.collect();
static ref STANDARD_INCLUDE_GLOBS: Vec<globset::Glob> =
standard_include_patterns().iter().map(|p| make_glob(p)).collect();
static ref STANDARD_INCLUDE_MATCHERS: Vec<(&'static str, globset::GlobMatcher)> = standard_include_patterns()
.iter()
.cloned()
.map(|p| (p, make_glob(p).compile_matcher()))
.collect();
}
pub fn tar_path_to_utf8_str(mut bytes: &[u8]) -> &str {
// Tar paths include the parent directory, cut it to crate relative paths
if let Some(pos) = bytes.iter().position(|b| *b == b'/' || *b == b'\\') {
bytes = bytes.get(pos + 1..).unwrap_or(bytes);
}
std::str::from_utf8(bytes).expect("valid utf8 paths in crate archive")
}
fn tar_path_to_path(bytes: &[u8]) -> &Path {
Path::new(tar_path_to_utf8_str(bytes))
}
fn tar_path_to_path_no_strip(bytes: &[u8]) -> &Path {
Path::new(std::str::from_utf8(bytes).expect("valid utf8 paths in crate archive"))
}
// NOTE: Actually there only seem to be files in these archives, but let's be safe
// There are definitely no directories
fn entry_is_file(entry_type: u8) -> bool {
entry_type == b'\x00' || entry_type == b'0'
}
fn split_to_matched_and_unmatched(
entries: Vec<TarHeader>,
globset: &globset::GlobSet,
) -> (Vec<TarHeader>, Vec<TarHeader>) {
let mut unmatched = Vec::new();
#[allow(clippy::unnecessary_filter_map)] // we need to keep the unmatched element
let matched = entries
.into_iter()
.filter_map(|e| {
if globset.is_match(tar_path_to_utf8_str(&e.path)) {
Some(e)
} else {
unmatched.push(e);
None
}
})
.collect();
(matched, unmatched)
}
fn directories_of(entries: &[TarHeader]) -> Vec<TarHeader> {
let mut directories = BTreeSet::new();
for e in entries {
if entry_is_file(e.entry_type) {
if let Some(parent) = tar_path_to_path_no_strip(&e.path).parent() {
directories.insert(parent);
}
}
}
let tar_directory_entry = b'5';
directories
.into_iter()
.map(|k| TarHeader {
path: k.to_str().expect("utf8 paths").as_bytes().to_owned(),
size: 0,
entry_type: tar_directory_entry,
})
.collect()
}
fn standard_exclude_patterns() -> &'static [&'static str] {
&[
"**/*.jpg",
"**/*.jpeg",
"**/*.jpeg",
"**/*.png",
"**/*.gif",
"**/*.bmp",
"**/doc/**/*",
"**/docs/**/*",
"**/benches/**/*",
"**/benchmark/**/*",
"**/benchmarks/**/*",
"**/test/**/*",
"**/*_test.*",
"**/*_test/**/*",
"**/tests/**/*",
"**/*_tests.*",
"**/*_tests/**/*",
"**/testing/**/*",
"**/spec/**/*",
"**/*_spec.*",
"**/*_spec/**/*",
"**/specs/**/*",
"**/*_specs.*",
"**/*_specs/**/*",
"**/example/**/*",
"**/examples/**/*",
"**/target/**/*",
"**/build/**/*",
"**/out/**/*",
"**/tmp/**/*",
"**/etc/**/*",
"**/testdata/**/*",
"**/samples/**/*",
"**/assets/**/*",
"**/maps/**/*",
"**/media/**/*",
"**/fixtures/**/*",
"**/node_modules/**/*",
]
}
fn standard_include_patterns() -> &'static [&'static str] {
&[
"src/**/*",
"Cargo.*",
"authors",
"AUTHORS",
"license.*",
"license-*",
"LICENSE.*",
"LICENSE-*",
"license",
"LICENSE",
"readme.*",
"README.*",
"readme",
"README",
"changelog.*",
"CHANGELOG.*",
"changelog",
"CHANGELOG",
]
}
pub fn globset_from_patterns(patterns: impl IntoIterator<Item = impl AsRef<str>>) -> globset::GlobSet {
let mut builder = globset::GlobSetBuilder::new();
for pattern in patterns.into_iter() {
builder.add(make_glob(pattern.as_ref()));
}
builder.build().expect("multiple globs to always fit into a globset")
}
pub fn globset_from_globs_and_patterns(
globs: &[globset::Glob],
patterns: impl IntoIterator<Item = impl AsRef<str>>,
) -> globset::GlobSet {
let mut builder = globset::GlobSetBuilder::new();
for glob in globs.iter() {
builder.add(glob.clone());
}
for pattern in patterns.into_iter() {
builder.add(make_glob(pattern.as_ref()));
}
builder.build().expect("multiple globs to always fit into a globset")
}
fn split_by_matching_directories(
entries: Vec<TarHeader>,
directories: &[TarHeader],
) -> (Vec<TarHeader>, Vec<TarHeader>) {
// Shortcut: we assume '/' as path separator, which is true for all paths in crates.io except for 214 :D - it's OK to not find things in that case.
let globs = globset_from_patterns(directories.iter().map(|e| {
let mut s = tar_path_to_utf8_str(&e.path).to_string();
s.push_str("/**");
s
}));
split_to_matched_and_unmatched(entries, &globs)
}
fn remove_implicit_includes(include_patterns: &mut Patterns, mut removed_include_patterns: impl AsMut<Patterns>) {
let removed_include_patterns = removed_include_patterns.as_mut();
let mut current_removed_count = removed_include_patterns.len();
loop {
if let Some(pos_to_remove) = include_patterns.iter().position(|p| {
p == "Cargo.toml.orig"
|| p == "Cargo.toml"
|| p == "Cargo.lock"
|| p == "./Cargo.toml"
|| p == "./Cargo.lock"
}) {
removed_include_patterns.push(include_patterns[pos_to_remove].to_owned());
include_patterns.remove(pos_to_remove);
}
if current_removed_count != removed_include_patterns.len() {
current_removed_count = removed_include_patterns.len();
continue;
}
break;
}
}
/// These input patterns are **file paths**, and we would like to make them into the smallest possible amount of **include patterns**.
/// These patterns must not accidentally match the 'pattern_to_not_match', as it is the pattern they are supposed to replace.
/// Note that one could also use negated patterns, so keep the 'pattern to not match', but add a specific negation.
/// HELP WANTED: This could be done by finding the common ancestors and resolve to patterns that match their children most specifically
/// See https://github.com/the-lean-crate/criner/issues/2
fn turn_file_paths_into_patterns(added_include_patterns: Patterns, _pattern_to_not_match: &str) -> Patterns {
added_include_patterns
}
fn find_include_patterns_that_incorporate_exclude_patterns(
entries_to_exclude: &[TarHeader],
entries_to_include: &[TarHeader],
include_patterns: Patterns,
) -> (Patterns, Patterns, Patterns) {
let mut added_include_patterns = Vec::new();
let mut removed_include_patterns = Vec::new();
let mut all_include_patterns = Vec::with_capacity(include_patterns.len());
for pattern in include_patterns {
let glob = make_glob(&pattern);
let include = glob.compile_matcher();
if entries_to_exclude
.iter()
.any(|e| include.is_match(tar_path_to_path(&e.path)))
{
let added_includes = turn_file_paths_into_patterns(
entries_to_include
.iter()
.filter(|e| include.is_match(tar_path_to_path(&e.path)))
.map(|e| tar_path_to_utf8_str(&e.path).to_string())
.collect(),
&pattern,
);
removed_include_patterns.push(pattern);
added_include_patterns.extend(added_includes.clone().into_iter());
all_include_patterns.extend(added_includes.into_iter());
} else {
all_include_patterns.push(pattern);
}
}
remove_implicit_includes(&mut all_include_patterns, &mut removed_include_patterns);
(all_include_patterns, added_include_patterns, removed_include_patterns)
}
fn make_glob(pattern: &str) -> globset::Glob {
globset::GlobBuilder::new(pattern)
.literal_separator(false)
.case_insensitive(false)
.backslash_escape(true) // most paths in crates.io are forward slashes, there are only 214 or so with backslashes
.build()
.expect("valid include patterns")
}
fn simplify_includes<'a>(
include_patterns_and_matchers: impl Iterator<Item = &'a (&'a str, globset::GlobMatcher)>,
mut entries: Vec<TarHeader>,
) -> Patterns {
let mut out_patterns = Vec::new();
let mut matched = Vec::<String>::new();
for (pattern, glob) in include_patterns_and_matchers {
matched.clear();
matched.extend(
entries
.iter()
.filter(|e| glob.is_match(tar_path_to_utf8_str(&e.path)))
.map(|e| tar_path_to_utf8_str(&e.path).to_string()),
);
match matched.len() {
0 => {}
1 => {
out_patterns.push(matched[0].clone());
}
_ => {
out_patterns.push((*pattern).to_string());
}
}
entries.retain(|e| !matched.iter().any(|p| p == tar_path_to_utf8_str(&e.path)));
}
remove_implicit_includes(&mut out_patterns, Vec::new());
out_patterns
}
fn find_in_entries<'buffer>(
entries_with_buffer: &'buffer [(TarHeader, Vec<u8>)],
entries: &[TarHeader],
name: &str,
) -> Option<(TarHeader, Option<&'buffer [u8]>)> {
entries_with_buffer
.iter()
.find_map(|(h, v)| {
if tar_path_to_utf8_str(&h.path) == name {
Some((h.clone(), Some(v.as_slice())))
} else {
None
}
})
.or_else(|| {
entries.iter().find_map(|e| {
if tar_path_to_utf8_str(&e.path) == name {
Some((e.clone(), None))
} else {
None
}
})
})
}
fn matches_in_set_a_but_not_in_set_b(
mut patterns_to_amend: Patterns,
set_a: &[(&str, globset::GlobMatcher)],
set_b: &globset::GlobSet,
mut entries: Vec<TarHeader>,
) -> (Vec<TarHeader>, Patterns, Patterns) {
let set_a_len = patterns_to_amend.len();
let all_entries = entries.clone();
for (pattern_a, glob_a) in set_a {
if entries.iter().any(|e| glob_a.is_match(tar_path_to_utf8_str(&e.path))) {
entries.retain(|e| !glob_a.is_match(tar_path_to_utf8_str(&e.path)));
if entries.is_empty() {
break;
}
if set_b.is_empty() {
patterns_to_amend.push((*pattern_a).to_string());
continue;
}
if entries.iter().any(|e| set_b.is_match(tar_path_to_utf8_str(&e.path))) {
patterns_to_amend.push((*pattern_a).to_string());
}
}
}
let patterns_in_set_a_which_do_not_match_a_pattern_in_set_b = patterns_to_amend
.get(set_a_len..)
.map(|v| v.to_vec())
.unwrap_or_else(Vec::new);
let (entries, _) = split_to_matched_and_unmatched(
all_entries,
&globset_from_patterns(&patterns_in_set_a_which_do_not_match_a_pattern_in_set_b),
);
(
entries,
patterns_to_amend,
patterns_in_set_a_which_do_not_match_a_pattern_in_set_b,
)
}
/// Takes something like "src/deep/lib.rs" and "../data/foo.bin" and turns it into "src/data/foo.bin", replicating
/// the way include_str/bytes interprets include paths. Thus it makes these paths relative to the crate, instead of
/// relative to the source file they are included in.
fn to_crate_relative_path(source_file_path: impl AsRef<Path>, relative_path: impl AsRef<Path>) -> String {
use std::path::Component::*;
let relative_path = relative_path.as_ref();
let source_path = source_file_path
.as_ref()
.parent()
.expect("directory containing the file");
let leading_parent_path_components = relative_path
.components()
.take_while(|c| matches!(c, ParentDir | CurDir))
.filter(|c| matches!(c, ParentDir))
.count();
let components_to_take_from_relative_path = relative_path
.components()
.filter(|c| !matches!(c, CurDir))
.skip(leading_parent_path_components);
let components_to_take = source_path
.components()
.count()
.saturating_sub(leading_parent_path_components);
source_path
.components()
.take(components_to_take)
.chain(components_to_take_from_relative_path)
.fold(PathBuf::new(), |mut p, c| {
p.push(c);
p
})
.to_str()
.expect("utf8 only")
.to_string()
}
fn simplify_standard_excludes_and_match_against_standard_includes(
potential_waste: Vec<TarHeader>,
existing_exclude: Patterns,
compile_time_include: Option<Patterns>,
) -> (Vec<TarHeader>, Patterns, Patterns) {
let compile_time_include = compile_time_include.unwrap_or_default();
let include_iter =
globset_from_globs_and_patterns(&STANDARD_INCLUDE_GLOBS, compile_time_include.iter().map(|s| s.as_str()));
matches_in_set_a_but_not_in_set_b(
existing_exclude,
&STANDARD_EXCLUDE_MATCHERS,
&include_iter,
potential_waste,
)
}
fn included_paths_of(file: Option<(TarHeader, Option<&[u8]>)>) -> Vec<String> {
file.and_then(|(header, maybe_data)| maybe_data.map(|d| (header, d)))
.map(|(header, data)| {
COMPILE_TIME_INCLUDE
.captures_iter(data)
.map(|cap| {
to_crate_relative_path(
tar_path_to_utf8_str(&header.path),
std::str::from_utf8(&cap["include"]).expect("utf8 path"),
)
})
.collect()
})
.unwrap_or_default()
}
/// HELP WANTED find the largest common ancestors (e.g. curl/* for curl/foo/* and curl/bar/*) and return these
/// instead of the ones they contain. This can help speeding up later use of the patterns, as there are less of them.
fn optimize_directories(dir_patterns: Vec<String>) -> Vec<String> {
dir_patterns
}
fn find_paths_mentioned_in_build_script(build: Option<(TarHeader, Option<&[u8]>)>) -> Vec<String> {
build
.and_then(|(header, maybe_data)| maybe_data.map(|d| (header, d)))
.map(|(_, data)| {
let mut v: Vec<_> = BUILD_SCRIPT_PATHS
.captures_iter(data)
.map(|cap| {
std::str::from_utf8(
cap.name("path")
.or_else(|| cap.name("path_like"))
.expect("one of the two matches")
.as_bytes(),
)
.expect("valid utf8")
.to_string()
})
.filter(|p| {
!(p.contains('{')
|| p.contains(' ')
|| p.contains('@')
|| (!p.as_bytes().iter().any(|b| b.is_ascii_digit()) && &p.to_uppercase() == p) // probably environment variable
|| p.starts_with("cargo:")
|| p.starts_with('-'))
})
.collect();
let dirs: BTreeSet<_> = v
.iter()
.filter_map(|p| {
Path::new(p)
.parent()
.filter(|p| p.components().count() > 0)
.and_then(|p| p.to_str().map(|s| s.to_string()))
})
.collect();
let possible_patterns = if dirs.is_empty() {
v.extend(v.clone().into_iter().map(|p| format!("{}/*", p)));
v
} else {
let mut dirs = optimize_directories(dirs.into_iter().map(|d| format!("{}/*", d)).collect());
dirs.extend(v.into_iter().map(|p| format!("{}/*", p)));
dirs
};
possible_patterns
.into_iter()
.filter_map(|p| globset::Glob::new(&p).ok().map(|_| p))
.collect()
})
.unwrap_or_default()
}
fn potential_negated_includes(entries: Vec<TarHeader>, patters_to_avoid: globset::GlobSet) -> Option<PotentialWaste> {
let (entries_we_would_remove, patterns, _) =
matches_in_set_a_but_not_in_set_b(Vec::new(), &STANDARD_EXCLUDE_MATCHERS, &patters_to_avoid, entries);
let negated_patterns: Vec<_> = patterns.into_iter().map(|s| format!("!{}", s)).collect();
if negated_patterns.is_empty() {
None
} else {
Some(PotentialWaste {
patterns_to_fix: negated_patterns,
potential_waste: entries_we_would_remove,
})
}
}
fn add_to_includes_if_non_default(file_path: &str, include: &mut Patterns) {
let recursive_pattern = Path::new(file_path).parent().expect("file path as input").join("**");
if !standard_include_patterns().contains(&recursive_pattern.join("*").to_str().expect("utf8 only")) {
include.push(recursive_pattern.join("*.rs").to_str().expect("utf 8 only").to_string())
}
}
fn non_greedy_patterns<S: AsRef<str>>(patterns: impl IntoIterator<Item = S>) -> impl Iterator<Item = S> {
patterns
.into_iter()
.filter(|p| !p.as_ref().starts_with('*') && p.as_ref().ends_with('*'))
}
impl Report {
pub(crate) fn cargo_config_from_entries(entries: &[(TarHeader, Vec<u8>)]) -> CargoConfig {
find_in_entries(entries, &[], "Cargo.toml")
.and_then(|(_e, v)| v.and_then(|v| std::str::from_utf8(v).ok().map(CargoConfig::from)))
.unwrap_or_default()
}
pub(crate) fn convert_to_wasted_files(entries: Vec<TarHeader>) -> Vec<WastedFile> {
entries
.into_iter()
.map(|e| (tar_path_to_utf8_str(&e.path).to_owned(), e.size))
.collect()
}
pub(crate) fn standard_includes(
entries: Vec<TarHeader>,
build_script_name: Option<String>,
compile_time_include: Option<Patterns>,
) -> (Option<Fix>, Vec<TarHeader>) {
let mut compile_time_include = compile_time_include.unwrap_or_default();
let has_build_script = match build_script_name {
Some(build_script_name) => {
compile_time_include.push(build_script_name);
true
}
None => false,
};
let include_globs =
globset_from_globs_and_patterns(&STANDARD_INCLUDE_GLOBS, compile_time_include.iter().map(|s| s.as_str()));
let (included_entries, excluded_entries) = split_to_matched_and_unmatched(entries, &include_globs);
let compile_time_include_matchers: Vec<_> = compile_time_include
.iter()
.map(|s| (s.as_str(), make_glob(s).compile_matcher()))
.collect();
let include_patterns = simplify_includes(
STANDARD_INCLUDE_MATCHERS
.iter()
.chain(compile_time_include_matchers.iter()),
included_entries.clone(),
);
let potential = potential_negated_includes(
included_entries,
globset_from_patterns(non_greedy_patterns(&compile_time_include)),
);
if excluded_entries.is_empty() && potential.is_none() {
(None, Vec::new())
} else {
let (fix, waste) = Fix::NewInclude {
include: include_patterns,
has_build_script,
}
.merge(potential, excluded_entries);
(Some(fix), waste)
}
}
pub(crate) fn compute_includes_from_includes_and_excludes(
entries: Vec<TarHeader>,
include: Patterns,
exclude: Patterns,
) -> (Option<Fix>, Vec<TarHeader>) {
let exclude_globs = globset_from_patterns(&exclude);
let directories = directories_of(&entries);
let (mut entries_that_should_be_excluded, remaining_entries) =
split_to_matched_and_unmatched(entries, &exclude_globs);
let (directories_that_should_be_excluded, _remaining_directories) =
split_to_matched_and_unmatched(directories, &exclude_globs);
let (entries_that_should_be_excluded_by_directory, remaining_entries) =
split_by_matching_directories(remaining_entries, &directories_that_should_be_excluded);
entries_that_should_be_excluded.extend(entries_that_should_be_excluded_by_directory.into_iter());
let fix = if entries_that_should_be_excluded.is_empty() {
Some(Fix::RemoveExclude)
} else {
let (include, include_added, include_removed) = find_include_patterns_that_incorporate_exclude_patterns(
&entries_that_should_be_excluded,
&remaining_entries,
include,
);
if include_added.is_empty() && include_removed.is_empty() {
None
} else {
Some(Fix::RemoveExcludeAndUseInclude {
include_added,
include,
include_removed,
})
}
};
(fix, entries_that_should_be_excluded)
}
/// This implementation respects all explicitly given includes but proposes potential excludes based on our exclude list.
pub(crate) fn enrich_includes(
entries: Vec<TarHeader>,
mut include: Patterns,
has_build_script: bool,
) -> (Option<Fix>, Vec<TarHeader>) {
let mut include_removed = Vec::new();
remove_implicit_includes(&mut include, &mut include_removed);
(
if include_removed.is_empty() {
None
} else {
let potential =
potential_negated_includes(entries, globset_from_patterns(non_greedy_patterns(&include)));
Some(Fix::ImprovedInclude {
include,
include_removed,
has_build_script,
potential,
})
},
Vec::new(),
)
}
pub(crate) fn enrich_excludes(
entries: Vec<TarHeader>,
exclude: Patterns,
compile_time_include: Option<Patterns>,
has_build_script: bool,
) -> (Option<Fix>, Vec<TarHeader>) {
let (potential_waste, _remaining) = split_to_matched_and_unmatched(entries, &STANDARD_EXCLUDES_GLOBSET);
let (wasted_files, exclude, exclude_added) = simplify_standard_excludes_and_match_against_standard_includes(
potential_waste,
exclude,
compile_time_include,
);
if wasted_files.is_empty() {
(None, Vec::new())
} else {
(
if exclude_added.is_empty() {
None
} else {
Some(Fix::EnrichedExclude {
exclude,
exclude_added,
has_build_script,
})
},
wasted_files,
)
}
}
pub(crate) fn cargo_config_into_includes_excludes(
config: CargoConfig,
entries_with_buffer: &[(TarHeader, Vec<u8>)],
entries: &[TarHeader],
) -> (Option<Patterns>, Option<Patterns>, Option<Patterns>, Option<String>) {
let mut maybe_build_script_path = config.build_script_path().map(|s| s.to_owned());
let compile_time_includes = {
let mut includes_parsed_from_files = Vec::new();
includes_parsed_from_files.extend(included_paths_of(find_in_entries(
entries_with_buffer,
entries,
config.lib_path(),
)));
add_to_includes_if_non_default(config.lib_path(), &mut includes_parsed_from_files);
for path in config.bin_paths() {
includes_parsed_from_files.extend(included_paths_of(find_in_entries(
entries_with_buffer,
entries,
path,
)));
add_to_includes_if_non_default(path, &mut includes_parsed_from_files);
}
let build_script_name = config.actual_or_expected_build_script_path();
let maybe_data = find_in_entries(entries_with_buffer, entries, build_script_name);
maybe_build_script_path =
maybe_build_script_path.or_else(|| maybe_data.as_ref().map(|_| build_script_name.to_owned()));
includes_parsed_from_files.extend(find_paths_mentioned_in_build_script(maybe_data));
if includes_parsed_from_files.is_empty() {
None
} else {
Some(includes_parsed_from_files)
}
};
let package = config.package.unwrap_or_default();
(
package.include,
package.exclude,
compile_time_includes,
maybe_build_script_path,
)
}
}
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner-waste-report/src/test/from_package.rs | criner-waste-report/src/test/from_package.rs | use super::super::{Fix, Report, TarPackage};
use std::path::Path;
fn tar_package(file_name: &str) -> TarPackage {
rmp_serde::from_slice(
std::fs::read(
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures")
.join(format!("{}.package.rmp", file_name)),
)
.unwrap()
.as_slice(),
)
.unwrap()
}
#[test]
fn ripgrep_perfectly_lean_which_is_unexpected_actually() {
assert_eq!(
Report::from_package("a", "1", tar_package("ripgrep-12.0.0-extract_crate-1.0.0")),
Report::Version {
crate_name: "a".into(),
crate_version: "1".to_string(),
total_size_in_bytes: 1369472,
total_files: 89,
wasted_files: vec![],
suggested_fix: None
}
);
}
#[test]
fn avr_libc_missing_build_script_even_though_it_is_there() {
assert_eq!(
Report::from_package("a", "1", tar_package("avr_libc-0.1.3extract_crate-1.0.0")),
Report::Version {
crate_name: "a".into(),
crate_version: "1".to_string(),
total_size_in_bytes: 69665780,
total_files: 1399,
wasted_files: [
(".gitignore", 528),
("HACKING.md", 123),
("Xargo.toml", 118),
("avr-atmega328p.json", 542),
("examples/eeprom.rs", 1396),
("avr-libc/doc/CHANGES.old", 7735),
("avr-libc/doc/ChangeLog", 70),
("avr-libc/doc/INSTALL", 1365),
("avr-libc/doc/LICENSE-Changes/PERM-Bob-Paddock", 1591),
("avr-libc/doc/LICENSE-Changes/PERM-Colin-OFlynn", 1632),
("avr-libc/doc/LICENSE-Changes/PERM-Joerg-Wunsch", 1566),
("avr-libc/doc/LICENSE-Changes/PERM-Juergen-Schilling", 1805),
("avr-libc/doc/LICENSE-Changes/PERM-Keith-Gudger", 1611),
("avr-libc/doc/LICENSE-Changes/PERM-Marek-Michalkiewicz", 2702),
("avr-libc/doc/LICENSE-Changes/PERM-Micheal-Stumpf", 2366),
("avr-libc/doc/LICENSE-Changes/PERM-Nils-Kristian-Strom", 1017),
("avr-libc/doc/LICENSE-Changes/PERM-Peter-Jansen", 2161),
("avr-libc/doc/LICENSE-Changes/PERM-Reinhard-Jessich", 1734),
("avr-libc/doc/LICENSE-Changes/PERM-Steinar-Haugen", 1660),
("avr-libc/doc/LICENSE-Changes/PERM-Theodore-A-Roth", 1025),
("avr-libc/doc/Makefile.am", 1750),
("avr-libc/doc/TODO", 1122),
("avr-libc/doc/api/Makefile.am", 8998),
("avr-libc/doc/api/acknowledge.dox", 3581),
("avr-libc/doc/api/assembler.dox", 12752),
("avr-libc/doc/api/avrs.png-save", 7027),
("avr-libc/doc/api/bench-libc.dox", 4826),
("avr-libc/doc/api/bench-libm.dox", 3050),
("avr-libc/doc/api/bench.dox", 2529),
("avr-libc/doc/api/dox.css", 99),
("avr-libc/doc/api/dox_html_footer", 715),
("avr-libc/doc/api/dox_html_header", 2779),
("avr-libc/doc/api/doxygen.config.in", 105138),
("avr-libc/doc/api/faq.dox", 64562),
("avr-libc/doc/api/inline_asm.dox", 29514),
("avr-libc/doc/api/interrupts.dox", 9502),
("avr-libc/doc/api/library.dox", 8552),
("avr-libc/doc/api/main_page.dox", 11446),
("avr-libc/doc/api/malloc-std.fig", 2763),
("avr-libc/doc/api/malloc-x1.fig", 2951),
("avr-libc/doc/api/malloc-x2.fig", 3230),
("avr-libc/doc/api/malloc.dox", 12692),
("avr-libc/doc/api/optimize.dox", 5888),
("avr-libc/doc/api/overview.dox", 12806),
("avr-libc/doc/api/pgmspace.dox", 12442),
("avr-libc/doc/api/porting.dox", 8999),
("avr-libc/doc/api/rel-method.dox", 7341),
("avr-libc/doc/api/releases.fig", 2691),
("avr-libc/doc/api/sections.dox", 9870),
("avr-libc/doc/api/sfr.dox", 3665),
("avr-libc/doc/api/tools-install.dox", 27559),
("avr-libc/doc/api/unjs.pl", 3210),
("avr-libc/doc/api/using-avrprog.dox", 4900),
("avr-libc/doc/api/using-tools.dox", 40797),
("avr-libc/doc/api/vectortable.dox", 39573),
("avr-libc/doc/examples/Makefile.am", 3992),
("avr-libc/doc/examples/all-demos.dox", 4732),
("avr-libc/doc/examples/asmdemo/Makefile", 5434),
("avr-libc/doc/examples/asmdemo/asmdemo.c", 3703),
("avr-libc/doc/examples/asmdemo/asmdemo.dox", 11406),
("avr-libc/doc/examples/asmdemo/isrs.S", 3095),
("avr-libc/doc/examples/asmdemo/project.h", 871),
("avr-libc/doc/examples/demo/Makefile", 3385),
("avr-libc/doc/examples/demo/demo.c", 2128),
("avr-libc/doc/examples/demo/demo.dox", 13569),
("avr-libc/doc/examples/demo/demo.fig", 8895),
("avr-libc/doc/examples/demo/iocompat.h", 5665),
("avr-libc/doc/examples/largedemo/Makefile", 2046),
("avr-libc/doc/examples/largedemo/largedemo-setup.jpg", 71742),
("avr-libc/doc/examples/largedemo/largedemo-wiring.jpg", 45703),
("avr-libc/doc/examples/largedemo/largedemo-wiring2.jpg", 46337),
("avr-libc/doc/examples/largedemo/largedemo.c", 12200),
("avr-libc/doc/examples/largedemo/largedemo.dox", 17503),
("avr-libc/doc/examples/stdiodemo/Makefile", 1817),
("avr-libc/doc/examples/stdiodemo/defines.h", 942),
("avr-libc/doc/examples/stdiodemo/hd44780.c", 5320),
("avr-libc/doc/examples/stdiodemo/hd44780.h", 2803),
("avr-libc/doc/examples/stdiodemo/lcd.c", 1993),
("avr-libc/doc/examples/stdiodemo/lcd.h", 719),
("avr-libc/doc/examples/stdiodemo/stdiodemo-setup.jpg", 100670),
("avr-libc/doc/examples/stdiodemo/stdiodemo.c", 2065),
("avr-libc/doc/examples/stdiodemo/stdiodemo.dox", 21760),
("avr-libc/doc/examples/stdiodemo/uart.c", 4215),
("avr-libc/doc/examples/stdiodemo/uart.h", 974),
("avr-libc/doc/examples/twitest/Makefile", 635),
("avr-libc/doc/examples/twitest/twitest.c", 14345),
("avr-libc/doc/examples/twitest/twitest.dox", 12732),
("avr-libc/tests/simulate/avr/eeprom-1.c", 4511),
("avr-libc/tests/simulate/avr/eeprom-2.c", 2139),
("avr-libc/tests/simulate/avr/eeprom-3.c", 4751),
("avr-libc/tests/simulate/avr/sfr-1.c", 6365),
("avr-libc/tests/simulate/avr/sfr-2.c", 8083),
("avr-libc/tests/simulate/avr/sfr-3.c", 7690),
("avr-libc/tests/simulate/avr/sfrasm-1.S", 5102),
("avr-libc/tests/simulate/avr/sfrasm-2.S", 5695),
("avr-libc/tests/simulate/avr/sfrasm-3.S", 1737),
("avr-libc/tests/simulate/fplib/add-01.c", 4872),
("avr-libc/tests/simulate/fplib/add-02.c", 5413),
("avr-libc/tests/simulate/fplib/add-03.c", 3750),
("avr-libc/tests/simulate/fplib/add-500.c", 52893),
("avr-libc/tests/simulate/fplib/add-inf.c", 5575),
("avr-libc/tests/simulate/fplib/add-nan.c", 4308),
("avr-libc/tests/simulate/fplib/cmp-01.c", 11218),
("avr-libc/tests/simulate/fplib/cmp-02.c", 5425),
("avr-libc/tests/simulate/fplib/cmp-03.c", 5702),
("avr-libc/tests/simulate/fplib/div-01.c", 5662),
("avr-libc/tests/simulate/fplib/div-02.c", 3744),
("avr-libc/tests/simulate/fplib/div-03.c", 3771),
("avr-libc/tests/simulate/fplib/div-500.c", 52831),
("avr-libc/tests/simulate/fplib/div-inf.c", 5178),
("avr-libc/tests/simulate/fplib/div-nan.c", 4569),
("avr-libc/tests/simulate/fplib/flt2llng-01.c", 7666),
("avr-libc/tests/simulate/fplib/flt2long-01.c", 5974),
("avr-libc/tests/simulate/fplib/flt2ulng-01.c", 6253),
("avr-libc/tests/simulate/fplib/flt2unll-01.c", 7941),
("avr-libc/tests/simulate/fplib/llng2flt-01.c", 13149),
("avr-libc/tests/simulate/fplib/llng2flt-500.c", 46407),
("avr-libc/tests/simulate/fplib/mul-01.c", 7492),
("avr-libc/tests/simulate/fplib/mul-02.c", 5524),
("avr-libc/tests/simulate/fplib/mul-03.c", 3070),
("avr-libc/tests/simulate/fplib/mul-500.c", 52902),
("avr-libc/tests/simulate/fplib/mul-inf.c", 5050),
("avr-libc/tests/simulate/fplib/mul-nan.c", 4575),
("avr-libc/tests/simulate/fplib/neg-01.c", 2996),
("avr-libc/tests/simulate/fplib/ulng2flt-01.c", 6817),
("avr-libc/tests/simulate/fplib/unll2flt-01.c", 12851),
("avr-libc/tests/simulate/fplib/unord-01.c", 8674),
("avr-libc/tests/simulate/fplib/unord-02.c", 4352),
("avr-libc/tests/simulate/fplib/unord-03.c", 4569),
("avr-libc/tests/simulate/known-to-fail.txt", 959),
("avr-libc/tests/simulate/math/acos-01.c", 3175),
("avr-libc/tests/simulate/math/acos-500.c", 38559),
("avr-libc/tests/simulate/math/all-float.c", 4688),
("avr-libc/tests/simulate/math/asin-01.c", 3296),
("avr-libc/tests/simulate/math/asin-500.c", 38563),
("avr-libc/tests/simulate/math/atan-01.c", 3333),
("avr-libc/tests/simulate/math/atan-500.c", 38563),
("avr-libc/tests/simulate/math/atan2-01.c", 5087),
("avr-libc/tests/simulate/math/atan2-500.c", 51660),
("avr-libc/tests/simulate/math/cbrt-01.c", 3209),
("avr-libc/tests/simulate/math/cbrt-02.c", 23035),
("avr-libc/tests/simulate/math/cbrt-03.c", 23032),
("avr-libc/tests/simulate/math/cbrt-500.c", 38800),
("avr-libc/tests/simulate/math/ceil-01.c", 7414),
("avr-libc/tests/simulate/math/copysign-01.c", 3355),
("avr-libc/tests/simulate/math/cos-01.c", 3284),
("avr-libc/tests/simulate/math/cos-02.c", 3433),
("avr-libc/tests/simulate/math/cos-500.c", 38576),
("avr-libc/tests/simulate/math/cosh-01.c", 3871),
("avr-libc/tests/simulate/math/cosh-02.c", 3074),
("avr-libc/tests/simulate/math/cosh-500.c", 38564),
("avr-libc/tests/simulate/math/exp-01.c", 3883),
("avr-libc/tests/simulate/math/exp-500.c", 38559),
("avr-libc/tests/simulate/math/fdim-01.c", 4334),
("avr-libc/tests/simulate/math/fdim-500.c", 51427),
("avr-libc/tests/simulate/math/floor-01.c", 7430),
("avr-libc/tests/simulate/math/fma-01.c", 3532),
("avr-libc/tests/simulate/math/fmax-01.c", 6519),
("avr-libc/tests/simulate/math/fmax-500.c", 51472),
("avr-libc/tests/simulate/math/fmin-01.c", 8171),
("avr-libc/tests/simulate/math/fmin-500.c", 51472),
("avr-libc/tests/simulate/math/fmod-01.c", 6593),
("avr-libc/tests/simulate/math/fmod-500.c", 51489),
("avr-libc/tests/simulate/math/fmod-nan.c", 3910),
("avr-libc/tests/simulate/math/fp_mpack-01.c", 4402),
("avr-libc/tests/simulate/math/frexp-01.c", 5753),
("avr-libc/tests/simulate/math/hypot-01.c", 7762),
("avr-libc/tests/simulate/math/hypot-500.c", 51614),
("avr-libc/tests/simulate/math/isfinite-01.c", 3592),
("avr-libc/tests/simulate/math/isinf-01.c", 4177),
("avr-libc/tests/simulate/math/isnan-01.c", 2983),
("avr-libc/tests/simulate/math/ldexp-01.c", 5690),
("avr-libc/tests/simulate/math/ldexp-nan.c", 2681),
("avr-libc/tests/simulate/math/log-01.c", 2684),
("avr-libc/tests/simulate/math/log-02.c", 4112),
("avr-libc/tests/simulate/math/log-500.c", 38558),
("avr-libc/tests/simulate/math/log-nan.c", 2778),
("avr-libc/tests/simulate/math/log10-01.c", 2748),
("avr-libc/tests/simulate/math/lrint-01.c", 9598),
("avr-libc/tests/simulate/math/lround-01.c", 8666),
("avr-libc/tests/simulate/math/modf-01.c", 8483),
("avr-libc/tests/simulate/math/modf-nan.c", 2535),
("avr-libc/tests/simulate/math/modf-np.c", 2121),
("avr-libc/tests/simulate/math/pow-01.c", 20000),
("avr-libc/tests/simulate/math/pow-02.c", 4072),
("avr-libc/tests/simulate/math/pow-500.c", 51689),
("avr-libc/tests/simulate/math/pow-nan.c", 6011),
("avr-libc/tests/simulate/math/round-01.c", 7883),
("avr-libc/tests/simulate/math/signbit-01.c", 4234),
("avr-libc/tests/simulate/math/sin-01.c", 3399),
("avr-libc/tests/simulate/math/sin-500.c", 38575),
("avr-libc/tests/simulate/math/sinh-01.c", 3744),
("avr-libc/tests/simulate/math/sinh-02.c", 3445),
("avr-libc/tests/simulate/math/sinh-500.c", 38564),
("avr-libc/tests/simulate/math/sqrt-01.c", 5447),
("avr-libc/tests/simulate/math/sqrt-500.c", 39434),
("avr-libc/tests/simulate/math/tan-01.c", 3322),
("avr-libc/tests/simulate/math/tan-500.c", 38558),
("avr-libc/tests/simulate/math/tanh-01.c", 3544),
("avr-libc/tests/simulate/math/tanh-02.c", 3251),
("avr-libc/tests/simulate/math/tanh-500.c", 38563),
("avr-libc/tests/simulate/math/trunc-01.c", 7465),
("avr-libc/tests/simulate/math/xxx-inf.c", 2415),
("avr-libc/tests/simulate/math/xxx-nan.c", 2816),
("avr-libc/tests/simulate/other/alloca.c", 3445),
("avr-libc/tests/simulate/other/progtype-1.c", 7449),
("avr-libc/tests/simulate/other/progtype-2.c", 2372),
("avr-libc/tests/simulate/other/progtype-3.c", 2207),
("avr-libc/tests/simulate/pmstring/memccpy_P.c", 6198),
("avr-libc/tests/simulate/pmstring/memchr_P.c", 3276),
("avr-libc/tests/simulate/pmstring/memcmp_P.c", 3509),
("avr-libc/tests/simulate/pmstring/memmem_P.c", 7678),
("avr-libc/tests/simulate/pmstring/memrchr_P.c", 4535),
("avr-libc/tests/simulate/pmstring/strcasecmp_P.c", 3164),
("avr-libc/tests/simulate/pmstring/strcasestr_P.c", 5931),
("avr-libc/tests/simulate/pmstring/strchr_P.c", 2995),
("avr-libc/tests/simulate/pmstring/strchrnul_P.c", 4070),
("avr-libc/tests/simulate/pmstring/strcmp_P.c", 3012),
("avr-libc/tests/simulate/pmstring/strcspn_P.c", 4380),
("avr-libc/tests/simulate/pmstring/strlcat_P.c", 12894),
("avr-libc/tests/simulate/pmstring/strlcpy_P.c", 7569),
("avr-libc/tests/simulate/pmstring/strncasecmp_P.c", 4952),
("avr-libc/tests/simulate/pmstring/strpbrk_P.c", 3192),
("avr-libc/tests/simulate/pmstring/strrchr_P.c", 3087),
("avr-libc/tests/simulate/pmstring/strsep_P.c", 5235),
("avr-libc/tests/simulate/pmstring/strspn_P.c", 4067),
("avr-libc/tests/simulate/pmstring/strstr_P.c", 3456),
("avr-libc/tests/simulate/pmstring/strtok_P.c", 5527),
("avr-libc/tests/simulate/printf/snprintf_all-P.c", 1671),
("avr-libc/tests/simulate/printf/snprintf_all.c", 4380),
("avr-libc/tests/simulate/printf/sprintf-1.c", 4990),
("avr-libc/tests/simulate/printf/sprintf-2.c", 5974),
("avr-libc/tests/simulate/printf/sprintf-3.c", 5989),
("avr-libc/tests/simulate/printf/sprintf-4.c", 6025),
("avr-libc/tests/simulate/printf/sprintf-5.c", 3078),
("avr-libc/tests/simulate/printf/sprintf-inv.c", 3942),
("avr-libc/tests/simulate/printf/sprintf_flt-big.c", 6981),
("avr-libc/tests/simulate/printf/sprintf_flt-e01.c", 4800),
("avr-libc/tests/simulate/printf/sprintf_flt-f01.c", 5796),
("avr-libc/tests/simulate/printf/sprintf_flt-g01.c", 5927),
("avr-libc/tests/simulate/printf/sprintf_flt-g02.c", 7912),
("avr-libc/tests/simulate/printf/sprintf_flt-nan.c", 3836),
("avr-libc/tests/simulate/printf/sprintf_min-1.c", 4850),
("avr-libc/tests/simulate/printf/sprintf_min-2.c", 5311),
("avr-libc/tests/simulate/printf/sprintf_min-3.c", 5331),
("avr-libc/tests/simulate/printf/sprintf_min-4.c", 5344),
("avr-libc/tests/simulate/printf/sprintf_min-5.c", 3436),
("avr-libc/tests/simulate/printf/sprintf_min-int.c", 1656),
("avr-libc/tests/simulate/printf/sprintf_min-inv.c", 4771),
("avr-libc/tests/simulate/printf/sprintf_std-int.c", 4977),
("avr-libc/tests/simulate/printf/sprintf_std-inv.c", 3640),
("avr-libc/tests/simulate/printf/vsnprintf_all-P.c", 1673),
("avr-libc/tests/simulate/printf/vsnprintf_all.c", 4296),
("avr-libc/tests/simulate/progmem.h", 2365),
("avr-libc/tests/simulate/readcore.py", 6618),
("avr-libc/tests/simulate/regression/20080322-isinf.c", 2416),
("avr-libc/tests/simulate/regression/20080323-jmpbuf.c", 2252),
("avr-libc/tests/simulate/regression/20080405-eeprom.c", 1797),
("avr-libc/tests/simulate/regression/20081221-ffs.c", 1901),
("avr-libc/tests/simulate/regression/bug-01929.c", 1799),
("avr-libc/tests/simulate/regression/bug-11511.c", 1842),
("avr-libc/tests/simulate/regression/bug-11820.c", 1905),
("avr-libc/tests/simulate/regression/bug-13330.c", 1845),
("avr-libc/tests/simulate/regression/bug-18899.c", 1844),
("avr-libc/tests/simulate/regression/bug-19079.c", 2232),
("avr-libc/tests/simulate/regression/bug-19134.c", 1935),
("avr-libc/tests/simulate/regression/bug-19135.c", 1848),
("avr-libc/tests/simulate/regression/bug-19280.c", 1855),
("avr-libc/tests/simulate/regression/bug-19281.c", 1770),
("avr-libc/tests/simulate/regression/bug-21872-1.c", 2841),
("avr-libc/tests/simulate/regression/bug-21872-2.c", 2852),
("avr-libc/tests/simulate/regression/bug-21905-scanf_flt.c", 2093),
("avr-libc/tests/simulate/regression/bug-21906-scanf_flt.c", 1938),
("avr-libc/tests/simulate/regression/bug-21955.c", 5407),
("avr-libc/tests/simulate/regression/bug-22593.c", 2407),
("avr-libc/tests/simulate/regression/bug-22800.c", 2759),
("avr-libc/tests/simulate/regression/bug-22828.c", 2209),
("avr-libc/tests/simulate/regression/bug-25048.cpp", 1984),
("avr-libc/tests/simulate/regression/bug-25723.c", 2315),
("avr-libc/tests/simulate/regression/bug-27235-1.c", 2473),
("avr-libc/tests/simulate/regression/bug-27242.c", 2783),
("avr-libc/tests/simulate/regression/bug-28135.c", 2904),
("avr-libc/tests/simulate/regression/bug-31644.c", 3294),
("avr-libc/tests/simulate/regression/bug-35093.c", 2978),
("avr-libc/tests/simulate/regression/bug-35366-1-printf_flt.c", 2598),
("avr-libc/tests/simulate/regression/bug-35366-2-printf_flt.c", 2363),
("avr-libc/tests/simulate/regression/bug-37778.c", 1857),
("avr-libc/tests/simulate/runtest.sh", 9619),
("avr-libc/tests/simulate/scanf/scanf-nul.c", 4179),
("avr-libc/tests/simulate/scanf/scanf_brk-nul.c", 3850),
("avr-libc/tests/simulate/scanf/scanf_flt-nul.c", 3586),
("avr-libc/tests/simulate/scanf/sscanf-1.c", 4522),
("avr-libc/tests/simulate/scanf/sscanf-2.c", 3683),
("avr-libc/tests/simulate/scanf/sscanf-c1.c", 4730),
("avr-libc/tests/simulate/scanf/sscanf-c2.c", 4281),
("avr-libc/tests/simulate/scanf/sscanf-d1.c", 5321),
("avr-libc/tests/simulate/scanf/sscanf-d2.c", 4829),
("avr-libc/tests/simulate/scanf/sscanf-eof.c", 3761),
("avr-libc/tests/simulate/scanf/sscanf-eon.c", 3706),
("avr-libc/tests/simulate/scanf/sscanf-h.c", 4550),
("avr-libc/tests/simulate/scanf/sscanf-hh.c", 5108),
("avr-libc/tests/simulate/scanf/sscanf-i.c", 4307),
("avr-libc/tests/simulate/scanf/sscanf-l.c", 5499),
("avr-libc/tests/simulate/scanf/sscanf-o1.c", 5064),
("avr-libc/tests/simulate/scanf/sscanf-o2.c", 4797),
("avr-libc/tests/simulate/scanf/sscanf-s1.c", 4967),
("avr-libc/tests/simulate/scanf/sscanf-s2.c", 5222),
("avr-libc/tests/simulate/scanf/sscanf-x1.c", 5746),
("avr-libc/tests/simulate/scanf/sscanf-x2.c", 4584),
("avr-libc/tests/simulate/scanf/sscanf-x3.c", 4527),
("avr-libc/tests/simulate/scanf/sscanf_brk-1.c", 4805),
("avr-libc/tests/simulate/scanf/sscanf_brk-2.c", 4120),
("avr-libc/tests/simulate/scanf/sscanf_brk-3.c", 5088),
("avr-libc/tests/simulate/scanf/sscanf_brk-4.c", 4956),
("avr-libc/tests/simulate/scanf/sscanf_flt-f1.c", 5398),
("avr-libc/tests/simulate/scanf/sscanf_flt-f2.c", 5639),
("avr-libc/tests/simulate/scanf/sscanf_flt-f3.c", 5269),
("avr-libc/tests/simulate/scanf/sscanf_flt-fnn.c", 5375),
("avr-libc/tests/simulate/scanf/sscanf_flt-fw.c", 4499),
("avr-libc/tests/simulate/stdlib/abort-1.c", 1911),
("avr-libc/tests/simulate/stdlib/atoi-1.c", 3094),
("avr-libc/tests/simulate/stdlib/atol-1.c", 3298),
("avr-libc/tests/simulate/stdlib/atol-2.c", 3533),
("avr-libc/tests/simulate/stdlib/bsearch-1.c", 2657),
("avr-libc/tests/simulate/stdlib/bsearch-2.c", 3034),
("avr-libc/tests/simulate/stdlib/bsearch-3.c", 2533),
("avr-libc/tests/simulate/stdlib/dtostre-01.c", 2154),
("avr-libc/tests/simulate/stdlib/dtostre-02.c", 6260),
("avr-libc/tests/simulate/stdlib/dtostre-03.c", 5274),
("avr-libc/tests/simulate/stdlib/dtostre-04.c", 5269),
("avr-libc/tests/simulate/stdlib/dtostre-05.c", 4885),
("avr-libc/tests/simulate/stdlib/dtostre-06.c", 3788),
("avr-libc/tests/simulate/stdlib/dtostre-expm00.c", 2604),
("avr-libc/tests/simulate/stdlib/dtostre-minmax.c", 2961),
("avr-libc/tests/simulate/stdlib/dtostre-nans.c", 2857),
("avr-libc/tests/simulate/stdlib/dtostre-subnrm.c", 2678),
("avr-libc/tests/simulate/stdlib/dtostre-zero.c", 2512),
("avr-libc/tests/simulate/stdlib/dtostre.h", 3867),
("avr-libc/tests/simulate/stdlib/dtostrf-01.c", 2411),
("avr-libc/tests/simulate/stdlib/dtostrf-big.c", 3942),
("avr-libc/tests/simulate/stdlib/dtostrf-minmax.c", 3196),
("avr-libc/tests/simulate/stdlib/dtostrf-nans.c", 2823),
("avr-libc/tests/simulate/stdlib/dtostrf-round.c", 3260),
("avr-libc/tests/simulate/stdlib/dtostrf-width.c", 3083),
("avr-libc/tests/simulate/stdlib/dtostrf.h", 3464),
("avr-libc/tests/simulate/stdlib/exit-1.c", 1869),
("avr-libc/tests/simulate/stdlib/isalnum-1.c", 2884),
("avr-libc/tests/simulate/stdlib/isalpha-1.c", 2847),
("avr-libc/tests/simulate/stdlib/isascii-1.c", 2817),
("avr-libc/tests/simulate/stdlib/isblank-1.c", 2820),
("avr-libc/tests/simulate/stdlib/iscntrl-1.c", 2831),
("avr-libc/tests/simulate/stdlib/isdigit-1.c", 2828),
("avr-libc/tests/simulate/stdlib/isgraph-1.c", 2813),
("avr-libc/tests/simulate/stdlib/islower-1.c", 2819),
("avr-libc/tests/simulate/stdlib/isprint-1.c", 2815),
("avr-libc/tests/simulate/stdlib/ispunct-1.c", 2915),
("avr-libc/tests/simulate/stdlib/isspace-1.c", 2824),
("avr-libc/tests/simulate/stdlib/isupper-1.c", 2819),
("avr-libc/tests/simulate/stdlib/isxdigit-1.c", 2847),
("avr-libc/tests/simulate/stdlib/itoa-1.c", 4214),
("avr-libc/tests/simulate/stdlib/itoa-2.c", 3869),
("avr-libc/tests/simulate/stdlib/ltoa-1.c", 4242),
("avr-libc/tests/simulate/stdlib/ltoa-2.c", 4131),
("avr-libc/tests/simulate/stdlib/malloc-1.c", 3193),
("avr-libc/tests/simulate/stdlib/malloc-2.c", 2818),
("avr-libc/tests/simulate/stdlib/malloc-3.c", 3037),
("avr-libc/tests/simulate/stdlib/malloc-4.c", 2596),
("avr-libc/tests/simulate/stdlib/malloc-5.c", 2552),
("avr-libc/tests/simulate/stdlib/malloc-6.c", 2505),
("avr-libc/tests/simulate/stdlib/malloc-7.c", 3122),
("avr-libc/tests/simulate/stdlib/malloc-8.c", 7073),
("avr-libc/tests/simulate/stdlib/realloc-1.c", 2912),
("avr-libc/tests/simulate/stdlib/realloc-2.c", 3502),
("avr-libc/tests/simulate/stdlib/realloc-3.c", 6506),
("avr-libc/tests/simulate/stdlib/setjmp-1.c", 3110),
("avr-libc/tests/simulate/stdlib/setjmp-2.c", 3600),
("avr-libc/tests/simulate/stdlib/setjmp-3.c", 2550),
("avr-libc/tests/simulate/stdlib/setjmp-4.c", 2441),
("avr-libc/tests/simulate/stdlib/setjmp-5.c", 2214),
("avr-libc/tests/simulate/stdlib/strtod-1.c", 4827),
("avr-libc/tests/simulate/stdlib/strtod-2.c", 4694),
("avr-libc/tests/simulate/stdlib/strtod-3.c", 3771),
("avr-libc/tests/simulate/stdlib/strtol-1.c", 3411),
("avr-libc/tests/simulate/stdlib/strtol-2.c", 2752),
("avr-libc/tests/simulate/stdlib/strtol-3.c", 2842),
("avr-libc/tests/simulate/stdlib/strtol-4.c", 3230),
("avr-libc/tests/simulate/stdlib/strtol.h", 2412),
("avr-libc/tests/simulate/stdlib/strtoul-1.c", 3784),
("avr-libc/tests/simulate/stdlib/strtoul-2.c", 4227),
("avr-libc/tests/simulate/stdlib/strtoul-3.c", 3253),
("avr-libc/tests/simulate/stdlib/strtoul.h", 2407),
("avr-libc/tests/simulate/stdlib/tolower-1.c", 2838),
("avr-libc/tests/simulate/stdlib/toupper-1.c", 2838),
("avr-libc/tests/simulate/stdlib/ultoa-1.c", 4190),
("avr-libc/tests/simulate/stdlib/ultoa-2.c", 6306),
("avr-libc/tests/simulate/stdlib/ultoa-3.c", 4050),
("avr-libc/tests/simulate/stdlib/utoa-1.c", 4124),
("avr-libc/tests/simulate/stdlib/utoa-2.c", 4798),
("avr-libc/tests/simulate/stdlib/utoa-3.c", 3985),
("avr-libc/tests/simulate/string/ffs-1.c", 2830),
("avr-libc/tests/simulate/string/ffs_macro.c", 3581),
("avr-libc/tests/simulate/string/ffsl-1.c", 2368),
("avr-libc/tests/simulate/string/ffsll-1.c", 3102),
("avr-libc/tests/simulate/string/memccpy.c", 6271),
("avr-libc/tests/simulate/string/memchr.c", 3319),
("avr-libc/tests/simulate/string/memcmp.c", 3555),
("avr-libc/tests/simulate/string/memmem.c", 6118),
("avr-libc/tests/simulate/string/memrchr.c", 4700),
("avr-libc/tests/simulate/string/strcasecmp.c", 3209),
("avr-libc/tests/simulate/string/strcasestr.c", 7203),
("avr-libc/tests/simulate/string/strchr.c", 2964),
("avr-libc/tests/simulate/string/strchrnul.c", 4225),
("avr-libc/tests/simulate/string/strcmp.c", 3091),
("avr-libc/tests/simulate/string/strcspn.c", 4431),
("avr-libc/tests/simulate/string/strlcat.c", 12885),
("avr-libc/tests/simulate/string/strlcpy.c", 7678),
("avr-libc/tests/simulate/string/strlwr.c", 2950),
("avr-libc/tests/simulate/string/strncasecmp.c", 3740),
("avr-libc/tests/simulate/string/strpbrk.c", 3243),
("avr-libc/tests/simulate/string/strrchr.c", 3078),
("avr-libc/tests/simulate/string/strrev.c", 5109),
("avr-libc/tests/simulate/string/strsep.c", 5280),
("avr-libc/tests/simulate/string/strspn.c", 4120),
("avr-libc/tests/simulate/string/strstr.c", 3509),
("avr-libc/tests/simulate/string/strtok.c", 5521),
("avr-libc/tests/simulate/string/strupr.c", 2950),
("avr-libc/tests/simulate/time/aux.c", 1808),
("avr-libc/tests/simulate/time/declination.c", 2955),
("avr-libc/tests/simulate/time/equation.c", 2837),
("avr-libc/tests/simulate/time/isotime.c", 2183),
("avr-libc/tests/simulate/time/mktime.c", 1911),
("avr-libc/tests/simulate/time/sidereal.c", 2130),
("avr-libc/tests/simulate/time/tick.c", 1987),
("avr-libc/tests/simulate/util/crc16-1.c", 2681),
("avr-libc/tests/simulate/util/crc16-2.c", 2533),
("avr-libc/tests/simulate/util/crc16-3.c", 2421),
("avr-libc/tests/simulate/util/crc16-4.c", 2441),
("avr-libc/tests/simulate/util/crc16-5.c", 2535)
]
.iter()
.map(|(p, s)| (p.to_string(), *s))
.collect(),
suggested_fix: Some(Fix::NewInclude {
include: vec![
"src/**/*",
"LICENSE",
"README.md",
"avr-libc/*",
"build.rs",
"!**/*.jpg",
"!**/doc/**/*",
"!**/tests/**/*",
]
.iter()
.map(|s| s.to_string())
.collect(),
has_build_script: true
})
}
);
}
#[test]
fn fermium_build_script() {
assert_eq!(
Report::from_package("a", "1", tar_package("fermium-20.12.0-alpha2-extract_crate-1.0.0")),
Report::Version {
crate_name: "a".into(),
crate_version: "1".to_string(),
total_size_in_bytes: 73827916,
total_files: 1345,
wasted_files: [
(".gitignore", 107),
(".travis.yml", 1116),
("appveyor.yml", 1124),
("examples/window.rs", 4394),
("linux_bindings.sh", 82),
("mac_bindings.sh", 127),
("tests/functions.rs", 78203),
("tests/notes.txt", 2164),
("win32-dynamic-link-files/x64/SDL2.dll", 1471488),
("win32-dynamic-link-files/x64/SDL2.lib", 150888),
("win32-dynamic-link-files/x86/SDL2.dll", 1220096),
("win32-dynamic-link-files/x86/SDL2.lib", 154422),
("win32_bindings.bat", 191),
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | true |
the-lean-crate/criner | https://github.com/the-lean-crate/criner/blob/a075e734dede8e1de5fe1652ec86f42da0162c41/criner-waste-report/src/test/mod.rs | criner-waste-report/src/test/mod.rs | mod from_package;
| rust | MIT | a075e734dede8e1de5fe1652ec86f42da0162c41 | 2026-01-04T20:17:28.169370Z | false |
MaikKlein/pyro | https://github.com/MaikKlein/pyro/blob/5b32b724c4e4656160248164a251a62e29954c5f/pyro/src/slice.rs | pyro/src/slice.rs | //! Temporary helper module until raw slices `*mut [T]` are on stable, or until `&[T]` is not UB
//! anymore for unitialized memory.
use std::marker::PhantomData;
pub enum Mutable {}
pub enum Immutable {}
mod sealed {
pub trait Sealed {}
}
pub trait Mutability: sealed::Sealed {}
impl sealed::Sealed for Mutable {}
impl sealed::Sealed for Immutable {}
impl Mutability for Mutable {}
impl Mutability for Immutable {}
pub struct RawSlice<'a, M: Mutability, T> {
pub start: *mut T,
pub len: usize,
_marker: PhantomData<&'a M>,
}
unsafe impl<M: Mutability, T: Send> Send for RawSlice<'_, M, T> {}
unsafe impl<M: Mutability, T: Sync> Sync for RawSlice<'_, M, T> {}
pub type Slice<'a, T> = RawSlice<'a, Immutable, T>;
pub type SliceMut<'a, T> = RawSlice<'a, Mutable, T>;
impl<'a, M, T> RawSlice<'a, M, T>
where
M: Mutability,
{
#[inline]
pub unsafe fn get_unchecked(&self, idx: usize) -> *const T {
self.start.add(idx) as *const T
}
#[inline]
pub fn get(&self, idx: usize) -> *const T {
assert!(idx < self.len);
unsafe { self.get_unchecked(idx) }
}
#[inline]
pub fn try_get(&self, idx: usize) -> Option<*const T> {
let len = self.len;
if idx >= len {
return None;
}
Some(unsafe { self.get_unchecked(idx) })
}
}
impl<'a, T> RawSlice<'a, Immutable, T> {
pub fn split_at(self, idx: usize) -> (Self, Self) {
unsafe {
let left = Slice::from_raw(self.start, idx);
let right = Slice::from_raw(self.start.add(idx), self.len - idx);
(left, right)
}
}
pub fn from_slice(slice: &'a [T]) -> Self {
Self::from_raw(slice.as_ptr(), slice.len())
}
pub fn from_raw(start: *const T, len: usize) -> Self {
Self {
start: start as _,
len,
_marker: PhantomData,
}
}
}
impl<'a, T> RawSlice<'a, Mutable, T> {
#[inline]
pub unsafe fn get_unchecked_mut(&self, idx: usize) -> *mut T {
self.start.add(idx)
}
#[inline]
pub fn get_mut(&self, idx: usize) -> *mut T {
assert!(idx < self.len);
unsafe { self.get_unchecked_mut(idx) }
}
#[inline]
pub fn try_get_mut(&mut self, idx: usize) -> Option<*mut T> {
let len = self.len;
if idx >= len {
return None;
}
Some(unsafe { self.get_unchecked_mut(idx) })
}
pub fn from_slice(slice: &'a mut [T]) -> Self {
Self::from_raw(slice.as_mut_ptr(), slice.len())
}
pub fn from_raw(start: *mut T, len: usize) -> Self {
Self {
start,
len,
_marker: PhantomData,
}
}
pub fn split_at_mut(self, idx: usize) -> (Self, Self) {
unsafe {
let left = SliceMut::from_raw(self.start, idx);
let right = SliceMut::from_raw(self.start.add(idx), self.len - idx);
(left, right)
}
}
}
| rust | MIT | 5b32b724c4e4656160248164a251a62e29954c5f | 2026-01-04T20:17:30.804484Z | false |
MaikKlein/pyro | https://github.com/MaikKlein/pyro/blob/5b32b724c4e4656160248164a251a62e29954c5f/pyro/src/lib.rs | pyro/src/lib.rs | //! # What is an Entity Component System?
//! An Entity Component System or *ECS* is very similar to a relational database like *SQL*. The
//! [`World`] is the data store where game objects (also known as [`Entity`]) live. An [`Entity`]
//! contains data or [`Component`]s.
//! The *ECS* can efficiently query those components.
//!
//! > Give me all entities that have a position and velocity component, and then update the position
//! based on the velocity.
//!
//! ```rust,ignore
//! type PosVelQuery = (Write<Pos>, Read<Vel>);
//! // ^^^^^ ^^^^
//! // Mutable Immutable
//! world.matcher::<PosVelQuery>().for_each(|(pos, vel)|{
//! pos += vel;
//! })
//! ```
//!
//! # Internals
//! ## Overview
//! * Iteration is always **linear**.
//! * Different component combinations live in a separate storage
//! * Removing entities does not create holes.
//! * All operations are designed to be used in bulk.
//! * Borrow rules are enforced at runtime. See [`RuntimeBorrow`]
//! * [`Entity`] is using a wrapping generational index. See [`Entity::version`]
//!
//! ```ignore
//!// A Storage that contains `Pos`, `Vel`, `Health`.
//!(
//! [Pos1, Pos2, Pos3, .., PosN],
//! [Vel1, Vel2, Vel3, .., VelN],
//! [Health1, Health2, Health3, .., HealthN],
//!)
//!
//!// A Storage that contains `Pos`, `Vel`.
//!(
//! [Pos1, Pos2, Pos3, .., PosM]
//! [Vel1, Vel2, Vel3, .., VelM]
//!)
//!
//! ```
//!
//! Iteration is fully linear with the exception of jumping to different storages.
//!
//! The iteration pattern from the query above would be
//!
//!
//! ```ignore
//! positions: [Pos1, Pos2, Pos3, .., PosN], [Pos1, Pos2, Pos3, .., PosM]
//! velocities: [Vel1, Vel2, Vel3, .., VelN], [Vel1, Vel2, Vel3, .., VelM]
//! ^
//! Jump occurs here
//! ```
//! The jump is something like a chain of two iterators. We look at all the storages
//! that match specific query. If the query would be `Write<Position>`, then we would
//! look for all the storages that contain a position array, extract the iterators and chain them
//!
//! Every combination of components will be in a separate storage. This guarantees that iteration
//! will always be linear.
//!
//! # Benchmarks
//!
//! 
//!
//! # Getting started
//!
//! ```
//! extern crate pyro;
//! use pyro::{ World, Entity, Read, Write};
//! struct Position;
//! struct Velocity;
//!
//!
//! let mut world: World = World::new();
//! let add_pos_vel = (0..99).map(|_| (Position{}, Velocity{}));
//! // ^^^^^^^^^^^^^^^^^^^^^^^^
//! // A tuple of (Position, Velocity),
//! // Note: Order does *not* matter
//!
//! // Appends 99 entities with a Position and Velocity component.
//! world.append_components(add_pos_vel);
//!
//! // Appends a single entity
//! world.append_components(Some((Position{}, Velocity{})));
//!
//! // Requests a mutable borrow to Position, and an immutable borrow to Velocity.
//! // Common queries can be reused with a typedef like this but it is not necessary.
//! type PosVelQuery = (Write<Position>, Read<Velocity>);
//!
//! // Retrieves all entities that have a Position and Velocity component as an iterator.
//! world.matcher::<PosVelQuery>().for_each(|(pos, vel)|{
//! // ...
//! });
//!
//! // The same query as above but also retrieves the entities and collects the entities into a
//! // `Vec<Entity>`.
//! let entities: Vec<Entity> =
//! world.matcher_with_entities::<PosVelQuery>()
//! .filter_map(|(entity, (pos, vel))|{
//! Some(entity)
//! }).collect();
//!
//! // Removes all the entities
//! world.remove_entities(entities);
//! let count = world.matcher::<PosVelQuery>().count();
//! assert_eq!(count, 0);
//! ```
mod chunk;
mod slice;
mod zip;
use chunk::{MetadataMap, Storage};
use log::debug;
use parking_lot::Mutex;
#[cfg(feature = "threading")]
pub use rayon::iter::{plumbing::UnindexedConsumer, IntoParallelRefIterator, ParallelIterator};
use slice::{Slice, SliceMut};
use std::{
collections::HashSet,
iter::{ExactSizeIterator, FusedIterator, IntoIterator},
marker::PhantomData,
num::Wrapping,
};
use typedef::TypeDef;
use vec_map::VecMap;
use zip::ZipSlice;
pub type StorageId = u16;
pub type ComponentId = u32;
pub type Version = u16;
pub trait Index<'a>: Sized {
type Item;
/// # Safety
unsafe fn get_unchecked(&self, idx: usize) -> Self::Item;
fn split_at(self, idx: usize) -> (Self, Self);
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
}
/// The [`Iterator`] is used to end a borrow from a query like [`World::matcher`].
pub struct BorrowIter<'s, I> {
world: &'s World,
iter: I,
}
#[cfg(feature = "threading")]
impl<'s, I> ParallelIterator for BorrowIter<'s, Option<I>>
where
I: ParallelIterator,
{
type Item = I::Item;
fn drive_unindexed<C>(mut self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
self.iter.take().unwrap().drive_unindexed(consumer)
}
}
impl<'s, I> Iterator for BorrowIter<'s, I>
where
I: Iterator,
{
type Item = I::Item;
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
impl<'s, I> FusedIterator for BorrowIter<'s, I> where I: FusedIterator {}
impl<'s, I> ExactSizeIterator for BorrowIter<'s, I>
where
I: ExactSizeIterator,
{
fn len(&self) -> usize {
self.iter.len()
}
}
impl<'s, I> Drop for BorrowIter<'s, I> {
fn drop(&mut self) {
self.world.runtime_borrow.lock().pop_access();
}
}
/// Serves as an ID to lookup components for entities which can be in
/// different storages.
// [TODO]: Make `Entity` generic.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct Entity {
/// Removing entities will increment the versioning. Accessing an [`Entity`] with an
/// outdated version will result in a `panic`. `version` does wrap on overflow.
version: Wrapping<Version>,
/// The id of the storage where the [`Entity`] lives in
storage_id: StorageId,
/// The actual id inside a storage
id: ComponentId,
}
/// [`World`] is the heart of this library. It owns all the [`Component`]s and [`Storage`]s.
/// It also manages entities and allows [`Component`]s to be safely queried.
pub struct World {
/// Storages need to be linear, that is why deletion will use [`Vec::swap_remove`] under the
/// hood. But this moves the components around and we need to keep track of those swaps. This
/// map is then used to find the correct [`ComponentId`] for an [`Entity`]. This maps the
/// entity id to the real storage id.
component_map: Vec<VecMap<ComponentId>>,
/// This is the opposite of `component_map`. This maps the storage id to the entity id.
component_map_inv: Vec<VecMap<ComponentId>>,
/// When we remove an [`Entity`], we will put it in this free map to be reused.
free_map: Vec<Vec<ComponentId>>,
version: Vec<Vec<Wrapping<Version>>>,
storages: Vec<Storage>,
/// The runtime borrow system. See [`RuntimeBorrow`] for more information. It is also wrapped
/// in a Mutex so that we can keep track of multiple borrows on different threads.
runtime_borrow: Mutex<RuntimeBorrow>,
}
impl Default for World {
fn default() -> Self {
Self::new()
}
}
impl World {
/// Creates an empty [`World`].
pub fn new() -> Self {
World {
runtime_borrow: Mutex::new(RuntimeBorrow::new()),
component_map: Vec::new(),
component_map_inv: Vec::new(),
free_map: Vec::new(),
version: Vec::new(),
storages: Vec::new(),
}
}
}
impl World {
/// Creates an `Iterator` over every [`Entity`] inside [`World`]. The entities are
/// not ordered.
pub fn entities<'s>(&'s self) -> impl Iterator<Item = Entity> + 's {
self.component_map
.iter()
.enumerate()
.flat_map(move |(idx, inner)| {
let storage_id = idx as StorageId;
inner.keys().map(move |component_id| Entity {
storage_id,
id: component_id as ComponentId,
version: self.version[storage_id as usize][component_id as usize],
})
})
}
fn entities_storage<'s>(&'s self, storage_id: StorageId) -> impl Iterator<Item = Entity> + 's {
// We iterate with the `component_map_inv`, because that is the order of the real storage.
self.component_map_inv[storage_id as usize]
.values()
.map(move |&id| Entity {
storage_id,
id,
version: self.version[storage_id as usize][id as usize],
})
}
/// Pushes a new borrow on the stack and checks if there are any illegal overlapping borrows
/// such as Write/Write and Read/Write.
fn borrow_and_validate<Borrow: RegisterBorrow>(&self) {
let mut borrow = self.runtime_borrow.lock();
borrow.push_access::<Borrow>();
// TODO: Implement a better error message.
if let Err(overlapping_borrows) = borrow.validate() {
panic!("Detected multiple active borrows of: {:?}", {
overlapping_borrows
.iter()
.map(|ty| ty.get_str())
.collect::<Vec<_>>()
});
}
}
#[cfg(feature = "threading")]
pub fn par_matcher<'s, Q>(
&'s self,
) -> impl ParallelIterator<Item = <<Q as ParQuery<'s>>::Iter as ParallelIterator>::Item> + 's
where
Q: ParQuery<'s> + Matcher,
Q::Borrow: RegisterBorrow,
{
let iter = unsafe {
self.storages
.par_iter()
.filter(|&storage| Q::is_match(storage))
.map(|storage| Q::query(storage))
.flat_map(|iter| iter)
};
BorrowIter {
world: self,
iter: Some(iter),
}
}
/// Uses [`Query`] and [`Matcher`] to access the correct components. [`Read`] will borrow the
/// component immutable while [`Write`] will borrow the component mutable.
/// ```rust,ignore
/// fn update(world: &mut World) {
/// world
/// .matcher::<(Write<Position>, Read<Velocity>)>()
/// .for_each(|(p, v)| {
/// p.x += v.dx;
/// p.y += v.dy;
/// });
/// }
/// ```
pub fn matcher<'s, Q>(
&'s self,
) -> impl Iterator<Item = <<Q as Query<'s>>::Iter as Iterator>::Item> + 's
where
Q: Query<'s> + Matcher,
Q::Borrow: RegisterBorrow,
{
self.borrow_and_validate::<Q::Borrow>();
let iter = unsafe {
self.storages
.iter()
.filter(|&storage| Q::is_match(storage))
.map(|storage| Q::query(storage))
.flatten()
};
BorrowIter { world: self, iter }
}
/// Same as [`World::matcher`] but also returns the corresponding [`Entity`].
/// ```rust,ignore
/// fn update(world: &mut World) {
/// world
/// .matcher_with_entities::<(Write<Position>, Read<Velocity>)>()
/// .for_each(|(entity, (p, v))| {
/// p.x += v.dx;
/// p.y += v.dy;
/// });
/// }
/// ```
pub fn matcher_with_entities<'s, Q>(
&'s self,
) -> impl Iterator<Item = (Entity, <<Q as Query<'s>>::Iter as Iterator>::Item)> + 's
where
Q: Query<'s> + Matcher,
Q::Borrow: RegisterBorrow,
{
self.borrow_and_validate::<Q::Borrow>();
let iter = self
.storages
.iter()
.enumerate()
.filter(|&(_, storage)| Q::is_match(storage))
.flat_map(move |(id, storage)| {
let query = unsafe { Q::query(storage) };
let entities = self.entities_storage(id as StorageId);
Iterator::zip(entities, query)
});
BorrowIter { world: self, iter }
}
}
impl World {
/// Appends the components and also creates the necessary [`Entity`]s behind the scenes.
/// If you only want to append a single set of components then you can do
/// ```rust,ignore
/// world.append_components(Some((a, b, c)));
/// ```
pub fn append_components<A, I>(&mut self, i: I)
where
A: AppendComponents + BuildStorage,
I: IntoIterator<Item = A>,
{
// Try to find a matching storage, and insert the components
let (storage_id, insert_count) = if let Some((id, storage)) = self
.storages
.iter_mut()
.enumerate()
.find(|(_, storage)| A::is_match(storage))
{
let len = A::append_components(i, storage);
(id as StorageId, len)
} else {
// if we did not find a storage, we need to create one
let id = self.storages.len() as StorageId;
let mut storage = <A as BuildStorage>::build();
let len = A::append_components(i, &mut storage);
self.storages.push(storage);
// Also we need to add an entity Vec for that storage
self.component_map.push(VecMap::default());
self.component_map_inv.push(VecMap::default());
self.free_map.push(Vec::new());
self.version.push(Vec::new());
(id, len)
};
let storage_index = storage_id as usize;
if insert_count == 0 {
return;
}
// Inserting components is not enough, we also need to create the entity ids
// for those components.
let start_len = self.component_map[storage_index].len() as ComponentId;
let end_len = start_len + insert_count as ComponentId;
debug!("Append to Storage: {}", storage_id);
debug!("- Insert count: {}", insert_count);
debug!(
"- Map length before: {}",
self.component_map[storage_id as usize].len()
);
for component_id in start_len..end_len {
if let Some(insert_at) = self.free_map[storage_index].pop() {
// When we create a new entity that has already been deleted once, we need to
// increment the version.
self.insert_component_map(storage_id, insert_at, component_id);
} else {
// If the free list is empty, then we can insert it at the end.
let insert_at = self.component_map[storage_index].len() as ComponentId;
self.version[storage_index].push(Wrapping(0));
self.insert_component_map(storage_id, insert_at, component_id);
}
}
assert_eq!(
self.component_map[storage_index].len(),
self.storages[storage_index].len(),
"The size of the component map and storage map should be equal"
);
}
/// Compares the version of the entity with the version in [`World`] and returns true if they
/// match. Because `version` wraps around this is not a hard guarantee.
pub fn is_entity_valid(&self, entity: Entity) -> bool {
self.version[entity.storage_id as usize]
.get(entity.id as usize)
.map(|&version| version == entity.version)
.unwrap_or(false)
}
fn insert_component_map(
&mut self,
storage_id: StorageId,
id: ComponentId,
component_id: ComponentId,
) {
self.component_map[storage_id as usize].insert(id as usize, component_id);
self.component_map_inv[storage_id as usize].insert(component_id as usize, id);
}
/// Returns true if the entity owns the requested component.
pub fn has_component<C: Component>(&self, e: Entity) -> bool {
self.get_component::<C>(e).is_some()
}
/// Retrieves a component for a specific [`Entity`].
pub fn get_component<C: Component>(&self, e: Entity) -> Option<&C> {
let storage = &self.storages[e.storage_id as usize];
if !storage.contains::<C>() || !self.is_entity_valid(e) {
return None;
}
let component_id = self.component_map[e.storage_id as usize][e.id as usize];
storage
.components_raw::<C>()
.try_get(component_id as usize)
.map(|ptr| unsafe { &*ptr })
}
/// Same as [`World::get_component`] but mutable.
// [TODO]: Possibly make this immutable and add the runtime borrow system if &mut isn't
// flexible enough.
pub fn get_component_mut<C: Component>(&mut self, e: Entity) -> Option<&mut C> {
let storage = &self.storages[e.storage_id as usize];
if !storage.contains::<C>() || !self.is_entity_valid(e) {
return None;
}
let component_id = self.component_map[e.storage_id as usize][e.id as usize];
storage
.components_mut_raw::<C>()
.try_get_mut(component_id as usize)
.map(|ptr| unsafe { &mut *ptr })
}
/// Removes the specified entities from [`World`]. Those entities are now considered invalid,
/// which can be checked with [`World::is_entity_valid`].
pub fn remove_entities<I>(&mut self, entities: I)
where
I: IntoIterator<Item = Entity>,
{
for entity in entities {
debug!("Removing {:?}", entity);
let storage_id = entity.storage_id as usize;
let is_valid = self.is_entity_valid(entity);
if !is_valid {
continue;
}
let component_id = *self.component_map[storage_id]
.get(entity.id as usize)
.expect("component id");
// [FIXME]: This uses dynamic dispatch so we might want to batch entities
// together to reduce the overhead.
let swap = self.storages[storage_id].swap_remove(component_id as _) as ComponentId;
// We need to keep track which entity was deleted and which was swapped.
debug!(
"- Entitiy id: {}, Component id: {}, Swap: {}, Map length: {}, Storage length: {}",
entity.id,
component_id,
swap,
self.storages[storage_id].len() + 1,
self.component_map[storage_id].len()
);
// We need to look up the id that got swapped
let key = self.component_map_inv[storage_id][swap as usize];
debug!("- Updating {} to {}", key, component_id);
// The id that was swapped should now point to the component_id that was removed
self.insert_component_map(storage_id as StorageId, key, component_id);
debug!("- Removing {} from `component_map`", entity.id);
// Now we consider the id to be deleted and remove it from the `component_map`.
self.component_map[storage_id as usize].remove(entity.id as usize);
// We also need to update our `component_inverse_map`. `swap` was the real location
// that was deleted in the underlying `storage` and we need to remove it.
self.component_map_inv[storage_id as usize].remove(swap as usize);
// And we need to append the remove id to the free map so we can reuse it again when we
// `append_components`.
self.free_map[storage_id].push(entity.id);
self.version[storage_id][entity.id as usize] += Wrapping(1);
}
}
}
pub trait RegisterBorrow {
/// Creates a new borrow
fn register_borrow() -> Borrow;
}
/// Is implemented for [`Read`] and [`Write`] and is used to insert reads and writes into the
/// correct [`HashSet`].
pub trait PushBorrow {
/// Inserts a new borrow and returns true if it was successful.
fn push_borrow(acccess: &mut Borrow) -> bool;
}
impl<T: Component> PushBorrow for Write<T> {
/// If a `Write` was already in a set, then we have detected multiple writes and this is not
/// allows.
fn push_borrow(borrow: &mut Borrow) -> bool {
borrow.writes.insert(TypeDef::of::<T>())
}
}
impl<T: Component> PushBorrow for &'_ mut T {
/// If a `Write` was already in a set, then we have detected multiple writes and this is not
/// allows.
fn push_borrow(borrow: &mut Borrow) -> bool {
borrow.writes.insert(TypeDef::of::<T>())
}
}
impl<T: Component> PushBorrow for Read<T> {
/// Multiple reads are always allowed and therefor we can always return true
fn push_borrow(borrow: &mut Borrow) -> bool {
borrow.reads.insert(TypeDef::of::<T>());
true
}
}
impl<T: Component> PushBorrow for &'_ T {
/// Multiple reads are always allowed and therefor we can always return true
fn push_borrow(borrow: &mut Borrow) -> bool {
borrow.reads.insert(TypeDef::of::<T>());
true
}
}
#[macro_export]
macro_rules! expand {
($m: ident, $ty: ident) => {
$m!{$ty}
};
($m: ident, $ty: ident, $($tt: ident),*) => {
$m!{$ty, $($tt),*}
expand!{$m, $($tt),*}
};
}
macro_rules! impl_register_borrow{
($($ty: ident),*) => {
impl<$($ty,)*> RegisterBorrow for ($($ty,)*)
where
$(
$ty: PushBorrow,
)*
{
fn register_borrow() -> Borrow {
let mut borrow = Borrow::new();
let success =
$(
$ty::push_borrow(&mut borrow)
)&&*;
// TODO: Output a more meaningful error
assert!(success, "Detected multiple writes");
borrow
}
}
}
}
expand!(impl_register_borrow, A, B, C, D, E, F, G, H);
/// Rust's borrowing rules are not flexible enough for an *ECS*. Often it would preferred to nest multiple
/// queries like [`World::matcher`], but this is not possible if both borrows would be mutable.
/// Instead we track active borrows at runtime. Multiple reads are allowed but `read/write` and
/// `write/write` are not.
pub struct RuntimeBorrow {
borrows: Vec<Borrow>,
}
impl Default for RuntimeBorrow {
fn default() -> Self {
Self::new()
}
}
impl RuntimeBorrow {
pub fn new() -> Self {
Self {
borrows: Vec::new(),
}
}
/// Creates and pushes an [`Borrow`] on to the stack.
pub fn push_access<R: RegisterBorrow>(&mut self) {
let borrow = R::register_borrow();
self.borrows.push(borrow);
}
/// Removes latest [`Borrow`]. This is usually called when an [`BorrowIter`] is dropped.
pub fn pop_access(&mut self) {
self.borrows.pop();
}
/// Validates the borrows. Multiple reads are allowed but Read/Write and Write/Write are not.
pub fn validate(&self) -> Result<(), Vec<TypeDef>> {
let overlapping_borrows: Vec<_> = self
.borrows
.iter()
.enumerate()
.flat_map(|(idx, borrow)| {
let reads = borrow.writes.intersection(&borrow.reads).cloned();
let rest: Vec<_> = self
.borrows
.iter()
.skip(idx + 1)
.flat_map(|next_access| {
let writes = borrow.writes.intersection(&next_access.writes).cloned();
let reads = borrow.writes.intersection(&next_access.reads).cloned();
writes.chain(reads)
})
.collect();
reads.chain(rest)
})
.collect();
if overlapping_borrows.is_empty() {
Ok(())
} else {
Err(overlapping_borrows)
}
}
}
pub struct Borrow {
reads: HashSet<TypeDef>,
writes: HashSet<TypeDef>,
}
impl Default for Borrow {
fn default() -> Self {
Self::new()
}
}
impl Borrow {
pub fn new() -> Self {
Self {
reads: HashSet::new(),
writes: HashSet::new(),
}
}
}
pub trait Component: Send + 'static {}
impl<C: 'static + Send> Component for C {}
/// Implements [`Fetch`] and allows components to be borrowed immutable.
pub struct Read<C>(PhantomData<C>);
/// Implements [`Fetch`] and allows components to be borrowed mutable.
pub struct Write<C>(PhantomData<C>);
/// A helper trait that works in lockstep with [`Read`] and [`Write`] to borrow components either
/// mutable or immutable.
pub trait Fetch<'s> {
type Component: Component;
type Iter: Index<'s>;
/// # Safety
unsafe fn fetch(storage: &'s Storage) -> Self::Iter;
}
impl<'s, C: Component> Fetch<'s> for Read<C> {
type Component = C;
type Iter = Slice<'s, C>;
unsafe fn fetch(storage: &'s Storage) -> Self::Iter {
storage.components_raw::<C>()
}
}
impl<'s, C: Component> Fetch<'s> for &'_ C {
type Component = C;
type Iter = Slice<'s, C>;
unsafe fn fetch(storage: &'s Storage) -> Self::Iter {
storage.components_raw::<C>()
}
}
impl<'s, C: Component> Fetch<'s> for Write<C> {
type Component = C;
type Iter = SliceMut<'s, C>;
unsafe fn fetch(storage: &'s Storage) -> Self::Iter {
storage.components_mut_raw::<C>()
}
}
impl<'s, C: Component> Fetch<'s> for &'_ mut C {
type Component = C;
type Iter = SliceMut<'s, C>;
unsafe fn fetch(storage: &'s Storage) -> Self::Iter {
storage.components_mut_raw::<C>()
}
}
/// Allows to match over different [`Storage`]s.
pub trait Matcher {
fn is_match(storage: &Storage) -> bool;
}
/// Allows to query multiple components from a [`Storage`].
pub trait Query<'s> {
type Borrow;
type Iter: Iterator + 's;
/// # Safety
unsafe fn query(storage: &'s Storage) -> Self::Iter;
}
/// Allows to query multiple components from a [`Storage`] in parallel.
#[cfg(feature = "threading")]
pub trait ParQuery<'s> {
type Borrow;
type Iter: ParallelIterator + 's;
unsafe fn query(storage: &'s Storage) -> Self::Iter;
}
impl<'a, T> Index<'a> for Slice<'a, T>
where
T: 'a,
{
type Item = &'a T;
#[inline]
unsafe fn get_unchecked(&self, idx: usize) -> Self::Item {
&*self.start.add(idx)
}
#[inline]
fn len(&self) -> usize {
self.len
}
fn split_at(self, idx: usize) -> (Self, Self) {
Slice::split_at(self, idx)
}
}
impl<'a, T> Index<'a> for SliceMut<'a, T>
where
T: 'a,
{
type Item = &'a mut T;
#[inline]
unsafe fn get_unchecked(&self, idx: usize) -> Self::Item {
&mut *self.start.add(idx)
}
#[inline]
fn len(&self) -> usize {
self.len
}
fn split_at(self, idx: usize) -> (Self, Self) {
SliceMut::split_at_mut(self, idx)
}
}
macro_rules! impl_matcher_default {
($($ty: ident),*) => {
impl<$($ty,)*> Matcher for ($($ty,)*)
where
$(
$ty: for<'s> Fetch<'s>,
)*
{
fn is_match(storage: &Storage) -> bool {
$(
storage.contains::<$ty::Component>()
)&&*
}
}
impl<'s, $($ty,)*> Query<'s> for ($($ty,)*)
where
$(
$ty: Fetch<'s> + 's,
)*
{
type Borrow = ($($ty,)*);
type Iter = ZipSlice<'s, ($($ty::Iter,)*)>;
unsafe fn query(storage: &'s Storage) -> Self::Iter {
ZipSlice::new(($($ty::fetch(storage),)*))
}
}
#[cfg(feature = "threading")]
impl<'s, $($ty,)*> ParQuery<'s> for ($($ty,)*)
where
$(
$ty: Fetch<'s> + Send + Sync + 's,
<$ty as Fetch<'s>>::Iter: Send + Sync,
<<$ty as Fetch<'s>>::Iter as Index<'s>>::Item: Send + Sync,
)*
{
type Borrow = ($($ty,)*);
type Iter = ZipSlice<'s, ($($ty::Iter,)*)>;
unsafe fn query(storage: &'s Storage) -> Self::Iter {
ZipSlice::new(($($ty::fetch(storage),)*))
}
}
}
}
expand!(impl_matcher_default, A, B, C, D, E, F, G, H, I);
impl<'s, A> Query<'s> for A
where
A: Fetch<'s> + 's,
{
type Borrow = A;
type Iter = ZipSlice<'s, (A::Iter,)>;
unsafe fn query(storage: &'s Storage) -> Self::Iter {
ZipSlice::new((A::fetch(storage),))
}
}
impl<A> Matcher for A
where
A: for<'s> Fetch<'s>,
{
fn is_match(storage: &Storage) -> bool {
storage.contains::<A::Component>()
}
}
/// [`BuildStorage`] is used to create different [`Storage`]s at runtime. See also
/// [`AppendComponents`] and [`World::append_components`]
pub trait BuildStorage {
fn build() -> Storage;
}
macro_rules! impl_build_storage {
($($ty: ident),*) => {
impl<$($ty),*> BuildStorage for ($($ty,)*)
where
$(
$ty:Component,
)*
{
fn build() -> Storage {
let mut meta = MetadataMap::new();
$(
meta.insert::<$ty>();
)*
Storage::new(meta)
}
}
}
}
expand!(impl_build_storage, A, B, C, D, E, F, G, H, I);
pub trait AppendComponents: Sized {
fn is_match(storage: &Storage) -> bool;
fn append_components<I>(items: I, storage: &mut Storage) -> usize
where
I: IntoIterator<Item = Self>;
}
macro_rules! impl_append_components {
($($ty: ident),*) => {
impl<$($ty),*> AppendComponents for ($($ty,)*)
where
$(
$ty: Component,
)*
{
fn is_match(storage: &Storage) -> bool{
$(
storage.contains::<$ty>()
)&&*
}
fn append_components<Iter>(items: Iter, storage: &mut Storage) -> usize
where
Iter: IntoIterator<Item = Self>,
{
let mut count = 0;
let iter = items.into_iter().map(|item| {
count += 1;
item
});
storage.extend(iter);
count
}
}
}
}
expand!(impl_append_components, A, B, C, D, E, F, G);
| rust | MIT | 5b32b724c4e4656160248164a251a62e29954c5f | 2026-01-04T20:17:30.804484Z | false |
MaikKlein/pyro | https://github.com/MaikKlein/pyro/blob/5b32b724c4e4656160248164a251a62e29954c5f/pyro/src/chunk.rs | pyro/src/chunk.rs | use crate::{expand, Component, Slice, SliceMut};
use std::{
alloc::{alloc, dealloc, realloc, Layout},
any::TypeId,
collections::{HashMap, HashSet},
ptr::NonNull,
};
unsafe fn drop_generic<T>(ptr: *mut u8) {
std::ptr::drop_in_place(ptr as *mut T);
}
#[derive(Copy, Clone)]
pub struct Metadata {
layout: Layout,
drop_fn: unsafe fn(ptr: *mut u8),
}
impl Metadata {
pub fn of<C: Component>() -> Self {
Self {
layout: Layout::new::<C>(),
drop_fn: drop_generic::<C>,
}
}
}
#[derive(Default, Clone)]
pub struct MetadataMap(HashMap<TypeId, Metadata>);
impl MetadataMap {
pub fn new() -> Self {
Default::default()
}
pub fn insert<C: Component>(&mut self) {
self.0.insert(TypeId::of::<C>(), Metadata::of::<C>());
}
}
/// A runtime SoA storage. It stands for **S**tructure **o**f **A**rrays.
///
/// ```rust,ignore
/// struct Test {
/// foo: Foo,
/// bar: Bar,
/// baz: Baz,
/// }
/// let test: Vec<Test> = ...; // Array of Structs (*AoS*) layout
///
/// struct Test {
/// foo: Vec<Foo>,
/// bar: Vec<Bar>,
/// baz: Vec<Baz>,
/// }
/// let test: Test = ...; // SoA layout
// ```
/// Users do not interact with this storage directly, instead [`World`] will use this storage
/// behind the scenes. In the future there will be other storages such as *AoSoA*, which is a fancy
/// way of saying *SoA* but with arrays that a limited to a size of `8`.
pub struct Storage {
types: HashSet<TypeId>,
meta: MetadataMap,
storages: HashMap<TypeId, NonNull<u8>>,
// The maxiumum number of **elements** the storage can hold.
capacity: usize,
// The number of elements in the storate. `len <= capacity`
len: usize,
}
unsafe impl Sync for Storage {}
impl Storage {
pub fn meta(&self) -> &MetadataMap {
&self.meta
}
pub fn contains<C: Component>(&self) -> bool {
self.types.contains(&TypeId::of::<C>())
}
pub fn new(meta: MetadataMap) -> Self {
Self {
types: meta.0.keys().copied().collect(),
meta,
storages: HashMap::new(),
capacity: 0,
len: 0,
}
}
pub fn components_raw<C: Component>(&self) -> Slice<C> {
let allocation = self.storages.get(&TypeId::of::<C>()).unwrap();
Slice::from_raw(allocation.as_ptr() as *const C, self.len())
}
pub fn components_mut_raw<C: Component>(&self) -> SliceMut<C> {
let allocation = self
.storages
.get(&TypeId::of::<C>())
.expect("Unknown type id");
SliceMut::from_raw(allocation.as_ptr() as *mut C, self.len())
}
fn grow(&mut self) {
if self.capacity == 0 {
self.capacity = 1;
} else {
self.capacity *= 2;
}
let capacity = self.capacity;
for (&id, &meta) in &self.meta.0 {
let allocation = self
.storages
.entry(id)
.or_insert_with(|| NonNull::new(unsafe { alloc(meta.layout) }).unwrap());
if self.capacity > 1 {
let size = meta.layout.size() * capacity;
unsafe {
let new_allocation =
NonNull::new(realloc(allocation.as_ptr(), meta.layout, size)).unwrap();
*allocation = new_allocation;
}
}
}
}
pub fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
// TODO: Opt
unsafe fn get_dyanmic_unchecked(&self, id: TypeId, idx: usize) -> *mut u8 {
let size = self.meta.0.get(&id).unwrap().layout.size();
let allocation = self.storages.get(&id).unwrap().as_ptr();
allocation.add(size * idx)
}
pub fn swap_remove(&mut self, idx: usize) -> usize {
assert!(!self.is_empty());
let last_idx = self.len - 1;
for (&id, meta) in &self.meta.0 {
// First we swap the elements
let ptr_to_delete = unsafe {
let last_ptr = self.get_dyanmic_unchecked(id, last_idx);
let cur_ptr = self.get_dyanmic_unchecked(id, idx);
std::ptr::swap(last_ptr, cur_ptr);
last_ptr
};
// Then we call drop on the last element
unsafe {
(meta.drop_fn)(ptr_to_delete);
}
}
self.len -= 1;
last_idx
}
}
impl Drop for Storage {
fn drop(&mut self) {
for (ty, &meta) in &self.meta.0 {
let allocation = self.storages.get(ty).unwrap();
let size = meta.layout.size();
// First we need to try to call drop on all the elements inside an allocation
for idx in 0..self.len {
unsafe {
let ptr = allocation.as_ptr().add(size * idx);
(meta.drop_fn)(ptr);
}
}
unsafe {
dealloc(allocation.as_ptr(), meta.layout);
}
}
}
}
// [UNSOUND]: Unsound if not all components are initialized. Eg Storage with (A, B ,C), but extend
// is called with (A, B). This leaves C to be uninitialized, which is unsound in this abstraction
macro_rules! impl_extend {
($($ty: ident),*) => {
impl<$($ty,)*> std::iter::Extend<($($ty,)*)> for Storage
where
$($ty: Component,)* {
#[allow(non_snake_case)]
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = ($($ty,)*)> {
let mut idx = self.len();
for ($($ty,)*) in iter {
// TODO: Specialize on known length
if idx >= self.capacity {
self.grow();
}
//TODO Opt lookup
$(
unsafe {
let ptr = self.components_mut_raw::<$ty>().get_unchecked_mut(idx);
ptr.write($ty);
}
)*
idx +=1;
}
self.len = idx;
}
}
}
}
expand!(impl_extend, A, B, C, D, E, F, G, H);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn swap_remove() {
use std::sync::atomic::{AtomicUsize, Ordering};
static mut DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
struct Int(u32);
impl Drop for Int {
fn drop(&mut self) {
unsafe {
DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
}
}
}
{
let mut meta = MetadataMap::new();
meta.insert::<u32>();
meta.insert::<Int>();
let mut storage = Storage::new(meta);
storage.extend((0..10).map(|i| (i as u32, Int(i as _))));
// We remove the first one which swaps it with the last one
storage.swap_remove(0);
unsafe {
let last: u32 = *storage.components_raw().get(8);
assert_eq!(last, 8);
let first: u32 = *storage.components_raw().get(0);
assert_eq!(first, 9);
assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 1)
}
}
unsafe { assert_eq!(DROP_COUNTER.load(Ordering::SeqCst), 10) }
}
#[test]
fn storage_drop() {
use std::sync::atomic::{AtomicUsize, Ordering};
static mut COUNTER: AtomicUsize = AtomicUsize::new(0);
{
let mut meta = MetadataMap::new();
meta.insert::<Int>();
meta.insert::<f32>();
let mut storage = Storage::new(meta);
struct Int(u32);
impl Drop for Int {
fn drop(&mut self) {
unsafe {
COUNTER.fetch_add(1, Ordering::SeqCst);
}
}
}
storage.extend((0..10).map(|i| (Int(i), i as f32)));
}
unsafe {
assert_eq!(COUNTER.load(Ordering::SeqCst), 10);
}
}
}
| rust | MIT | 5b32b724c4e4656160248164a251a62e29954c5f | 2026-01-04T20:17:30.804484Z | false |
MaikKlein/pyro | https://github.com/MaikKlein/pyro/blob/5b32b724c4e4656160248164a251a62e29954c5f/pyro/src/zip.rs | pyro/src/zip.rs | use crate::{expand, Index};
#[cfg(feature = "threading")]
use rayon::iter::{
plumbing::{bridge, Consumer, Producer, ProducerCallback, UnindexedConsumer},
IndexedParallelIterator, ParallelIterator,
};
use std::marker::PhantomData;
pub struct ZipSlice<'a, Tuple> {
tuple: Tuple,
idx: usize,
_m: std::marker::PhantomData<&'a ()>,
}
impl<Tuple> ZipSlice<'_, Tuple> {
pub fn new(tuple: Tuple) -> Self {
ZipSlice {
tuple,
idx: 0,
_m: PhantomData,
}
}
}
#[cfg(feature = "threading")]
struct ZipProducer<'a, Tuple>(ZipSlice<'a, Tuple>);
macro_rules! impl_zip_iterator {
($($ty: ident),*) => {
impl<'a, $($ty,)*> Iterator for ZipSlice<'a, ($($ty,)*)>
where
$(
$ty: Index<'a>,
)*
{
type Item = ($($ty::Item,)*);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let idx = self.idx;
let len = self.tuple.0.len();
if idx >= len {
return None;
}
#[allow(non_snake_case)]
let ($($ty,)*) = &self.tuple;
let r = unsafe {
Some(
($($ty.get_unchecked(idx),)*)
)
};
self.idx += 1;
r
}
}
impl<'a, $($ty,)*> ExactSizeIterator for ZipSlice<'a, ($($ty,)*)>
where
$(
$ty: Index<'a>,
)*
{
fn len(&self) -> usize {
self.tuple.0.len()
}
}
impl<'a, $($ty,)*> DoubleEndedIterator for ZipSlice<'a, ($($ty,)*)>
where
$(
$ty: Index<'a>,
)*
{
fn next_back(&mut self) -> Option<Self::Item> {
unimplemented!()
}
}
#[cfg(feature = "threading")]
impl<'a, $($ty,)*> Producer for ZipProducer<'a, ($($ty,)*)>
where
$(
$ty: Index<'a> + Send + Sync,
)*
{
type Item = <Self::IntoIter as Iterator>::Item;
type IntoIter = ZipSlice<'a, ($($ty,)*)>;
fn into_iter(self) -> Self::IntoIter {
self.0
}
fn split_at(self, index: usize) -> (Self, Self) {
#[allow(non_snake_case)]
let ($($ty,)*) = self.0.tuple;
$(
#[allow(non_snake_case)]
let $ty = $ty.split_at(index);
)*
let left = ZipProducer(ZipSlice::new(($($ty.0,)*)));
let right = ZipProducer(ZipSlice::new(($($ty.1,)*)));
(left, right)
}
}
#[cfg(feature = "threading")]
impl<'a, $($ty,)*> ParallelIterator for ZipSlice<'a, ($($ty,)*)>
where
$(
$ty: Index<'a> + Sync + Send,
$ty::Item: Sync + Send,
)*
<Self as Iterator>::Item: Sync + Send,
{
type Item = <Self as Iterator>::Item;
fn drive_unindexed<C1>(self, consumer: C1) -> C1::Result
where
C1: UnindexedConsumer<Self::Item>,
{
bridge(self, consumer)
}
fn opt_len(&self) -> Option<usize> {
Some(ExactSizeIterator::len(self))
}
}
#[cfg(feature = "threading")]
impl<'a, $($ty,)*> IndexedParallelIterator for ZipSlice<'a, ($($ty,)*)>
where
$(
$ty: Index<'a> + Sync + Send,
$ty::Item: Sync + Send,
)*
{
fn drive<C1>(self, consumer: C1) -> C1::Result
where
C1: Consumer<Self::Item>,
{
bridge(self, consumer)
}
fn len(&self) -> usize {
ExactSizeIterator::len(self)
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
callback.callback(ZipProducer(self))
}
}
};
}
expand!(impl_zip_iterator, A, B, C, D, E, F, G, H, I);
| rust | MIT | 5b32b724c4e4656160248164a251a62e29954c5f | 2026-01-04T20:17:30.804484Z | false |
MaikKlein/pyro | https://github.com/MaikKlein/pyro/blob/5b32b724c4e4656160248164a251a62e29954c5f/pyro/src/bin/example.rs | pyro/src/bin/example.rs | extern crate pyro;
use pyro::{Entity, Read, World, Write};
#[derive(Debug)]
struct Position(f32);
struct Velocity;
fn main() {
// By default creates a world backed by a [`SoaStorage`]
let mut world: World = World::new();
let add_pos_vel = (0..10).map(|i| (Position(i as f32), Velocity {}));
// ^^^^^^^^^^^^^^^^^^^^^^^^
// A tuple of (Position, Velocity),
// Note: Order does *not* matter
// Appends 99 entities with a Position and Velocity component.
world.append_components(add_pos_vel);
// Appends a single entity
world.append_components(Some((Position(42.0), Velocity {})));
// // Requests a mutable borrow to Position, and an immutable borrow to Velocity.
// // Common queries can be reused with a typedef like this but it is not necessary.
type PosVelQuery = (Write<Position>, Read<Velocity>);
// Retrieves all entities that have a Position and Velocity component as an iterator.
world
.matcher::<(&mut Position, &Velocity)>()
.for_each(|(pos, _vel)| {
println!("{:?}", pos)
});
// The same query as above but also retrieves the entities and collects the entities into a
// `Vec<Entity>`.
let entities: Vec<Entity> = world
.matcher_with_entities::<PosVelQuery>()
.map(|(entity, _)| entity)
.collect();
// Removes all the entities
world.remove_entities(entities);
let count = world.matcher::<PosVelQuery>().count();
assert_eq!(count, 0);
}
| rust | MIT | 5b32b724c4e4656160248164a251a62e29954c5f | 2026-01-04T20:17:30.804484Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/build.rs | src-tauri/build.rs | fn main() {
tauri_build::build()
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/main.rs | src-tauri/src/main.rs | #![cfg_attr(
all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows"
)]
mod data_harvester;
mod utils;
use std::sync::Mutex;
use crate::utils::error;
use data_harvester::{Data, DataCollector};
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu};
#[cfg(target_family = "windows")]
pub type Pid = usize;
#[cfg(target_family = "unix")]
pub type Pid = libc::pid_t;
#[tauri::command]
fn collect_data(data_state: tauri::State<Mutex<DataCollector>>) -> Data {
futures::executor::block_on(data_state.lock().unwrap().update_data());
data_state.lock().unwrap().data.clone()
}
fn main() {
let mut data_state = DataCollector::new();
data_state.init();
let preferences = CustomMenuItem::new("preferences", "Open Preferences").accelerator("cmd+,");
let submenu = Submenu::new(
"Menu",
Menu::new()
.add_item(preferences)
.add_native_item(MenuItem::SelectAll)
.add_native_item(MenuItem::Copy)
.add_native_item(MenuItem::Quit)
.add_native_item(MenuItem::About(
"ToeRings".to_string(),
AboutMetadata::new()
.version(env!("CARGO_PKG_VERSION"))
.authors(
env!("CARGO_PKG_AUTHORS")
.split(":")
.map(String::from)
.collect(),
)
.license("MIT"),
)),
);
let menu = Menu::new().add_submenu(submenu);
tauri::Builder::default()
.manage(Mutex::new(data_state))
.menu(menu)
.on_menu_event(|event| match event.menu_item_id() {
"preferences" => event.window().emit("openPreferences", ()).unwrap(),
_ => {}
})
.invoke_handler(tauri::generate_handler![collect_data])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester.rs | src-tauri/src/data_harvester.rs | //! This is the main file to house data collection functions.
use std::{
net::IpAddr,
time::{Duration, Instant},
};
use futures::join;
#[cfg(target_os = "linux")]
use fxhash::FxHashMap;
use serde::Serialize;
#[cfg(feature = "battery")]
use starship_battery::{Battery, Manager};
use sysinfo::{System, SystemExt};
#[cfg(feature = "nvidia")]
pub mod nvidia;
#[cfg(feature = "battery")]
pub mod batteries;
pub mod cpu;
pub mod disks;
pub mod memory;
pub mod network;
pub mod processes;
pub mod temperature;
#[derive(Clone, Debug, Serialize)]
pub struct Data {
#[serde(with = "serde_millis")]
pub last_collection_time: Instant,
pub cpu: Option<cpu::CpuHarvest>,
pub load_avg: Option<cpu::LoadAvgHarvest>,
pub memory: Option<memory::MemHarvest>,
pub swap: Option<memory::MemHarvest>,
pub temperature_sensors: Option<Vec<temperature::TempHarvest>>,
pub network: Option<network::NetworkHarvest>,
pub list_of_processes: Option<Vec<processes::ProcessHarvest>>,
pub disks: Option<Vec<disks::DiskHarvest>>,
pub io: Option<disks::IoHarvest>,
#[serde(with = "humantime_serde")]
#[serde(default)]
pub uptime: Duration,
pub hostname: Option<String>,
pub kernel_name: Option<String>,
pub kernel_version: Option<String>,
pub os_version: Option<String>,
pub local_ip: Option<IpAddr>,
#[cfg(feature = "battery")]
pub list_of_batteries: Option<Vec<batteries::BatteryHarvest>>,
#[cfg(feature = "zfs")]
pub arc: Option<memory::MemHarvest>,
#[cfg(feature = "gpu")]
pub gpu: Option<Vec<(String, memory::MemHarvest)>>,
}
impl Default for Data {
fn default() -> Self {
Data {
last_collection_time: Instant::now(),
cpu: None,
load_avg: None,
memory: None,
swap: None,
temperature_sensors: None,
list_of_processes: None,
disks: None,
io: None,
network: None,
uptime: Duration::ZERO,
hostname: None,
kernel_name: None,
kernel_version: None,
os_version: None,
local_ip: None,
#[cfg(feature = "battery")]
list_of_batteries: None,
#[cfg(feature = "zfs")]
arc: None,
#[cfg(feature = "gpu")]
gpu: None,
}
}
}
impl Data {
pub fn cleanup(&mut self) {
self.io = None;
self.temperature_sensors = None;
self.list_of_processes = None;
self.disks = None;
self.memory = None;
self.swap = None;
self.cpu = None;
self.load_avg = None;
if let Some(network) = &mut self.network {
network.first_run_cleanup();
}
#[cfg(feature = "zfs")]
{
self.arc = None;
}
#[cfg(feature = "gpu")]
{
self.gpu = None;
}
}
}
#[derive(Debug)]
pub struct DataCollector {
pub data: Data,
sys: System,
previous_cpu_times: Vec<(cpu::PastCpuWork, cpu::PastCpuTotal)>,
previous_average_cpu_time: Option<(cpu::PastCpuWork, cpu::PastCpuTotal)>,
#[cfg(target_os = "linux")]
pid_mapping: FxHashMap<crate::Pid, processes::PrevProcDetails>,
#[cfg(target_os = "linux")]
prev_idle: f64,
#[cfg(target_os = "linux")]
prev_non_idle: f64,
mem_total_kb: u64,
use_current_cpu_total: bool,
unnormalized_cpu: bool,
last_collection_time: Instant,
total_rx: u64,
total_tx: u64,
show_average_cpu: bool,
#[cfg(feature = "battery")]
battery_manager: Option<Manager>,
#[cfg(feature = "battery")]
battery_list: Option<Vec<Battery>>,
#[cfg(target_family = "unix")]
user_table: self::processes::UserTable,
}
impl DataCollector {
pub fn new() -> Self {
DataCollector {
data: Data::default(),
sys: System::new_with_specifics(sysinfo::RefreshKind::new()),
previous_cpu_times: vec![],
previous_average_cpu_time: None,
#[cfg(target_os = "linux")]
pid_mapping: FxHashMap::default(),
#[cfg(target_os = "linux")]
prev_idle: 0_f64,
#[cfg(target_os = "linux")]
prev_non_idle: 0_f64,
mem_total_kb: 0,
use_current_cpu_total: false,
unnormalized_cpu: false,
last_collection_time: Instant::now(),
total_rx: 0,
total_tx: 0,
show_average_cpu: false,
#[cfg(feature = "battery")]
battery_manager: None,
#[cfg(feature = "battery")]
battery_list: None,
#[cfg(target_family = "unix")]
user_table: Default::default(),
}
}
pub fn init(&mut self) {
#[cfg(target_os = "linux")]
{
futures::executor::block_on(self.initialize_memory_size());
}
#[cfg(not(target_os = "linux"))]
{
self.sys.refresh_memory();
self.mem_total_kb = self.sys.total_memory();
// TODO: Would be good to get this and network list running on a timer instead...?
// Refresh components list once...
self.sys.refresh_components_list();
// Refresh network list once...
if cfg!(target_os = "windows") {
self.sys.refresh_networks_list();
}
self.sys.refresh_cpu();
// Refresh disk list once...
if cfg!(target_os = "freebsd") {
self.sys.refresh_disks_list();
}
}
#[cfg(feature = "battery")]
{
if let Ok(battery_manager) = Manager::new() {
if let Ok(batteries) = battery_manager.batteries() {
let battery_list: Vec<Battery> = batteries.filter_map(Result::ok).collect();
if !battery_list.is_empty() {
self.battery_list = Some(battery_list);
self.battery_manager = Some(battery_manager);
}
}
}
}
futures::executor::block_on(self.update_data());
std::thread::sleep(std::time::Duration::from_millis(250));
self.data.cleanup();
}
#[cfg(target_os = "linux")]
async fn initialize_memory_size(&mut self) {
self.mem_total_kb = if let Ok(mem) = heim::memory::memory().await {
mem.total().get::<heim::units::information::kilobyte>()
} else {
1
};
}
pub async fn update_data(&mut self) {
#[cfg(not(target_os = "linux"))]
{
self.sys.refresh_cpu();
self.sys.refresh_processes();
self.sys.refresh_components();
#[cfg(target_os = "windows")]
{
self.sys.refresh_networks();
}
#[cfg(target_os = "freebsd")]
{
self.sys.refresh_disks();
self.sys.refresh_memory();
}
}
let current_instant = std::time::Instant::now();
// CPU
#[cfg(not(target_os = "freebsd"))]
{
if let Ok(cpu_data) = cpu::get_cpu_data_list(
self.show_average_cpu,
&mut self.previous_cpu_times,
&mut self.previous_average_cpu_time,
)
.await
{
self.data.cpu = Some(cpu_data);
}
}
#[cfg(target_os = "freebsd")]
{
if let Ok(cpu_data) = cpu::get_cpu_data_list(
&self.sys,
self.show_average_cpu,
&mut self.previous_cpu_times,
&mut self.previous_average_cpu_time,
)
.await
{
self.data.cpu = Some(cpu_data);
}
}
#[cfg(target_family = "unix")]
{
// Load Average
if let Ok(load_avg_data) = cpu::get_load_avg().await {
self.data.load_avg = Some(load_avg_data);
}
}
// Batteries
#[cfg(feature = "battery")]
{
if let Some(battery_manager) = &self.battery_manager {
if let Some(battery_list) = &mut self.battery_list {
self.data.list_of_batteries =
Some(batteries::refresh_batteries(battery_manager, battery_list));
}
}
}
if let Ok(mut process_list) = {
#[cfg(target_os = "linux")]
{
// Must do this here since we otherwise have to make `get_process_data` async.
use self::processes::CpuUsageStrategy;
let normalize_cpu = if self.unnormalized_cpu {
heim::cpu::logical_count()
.await
.map(|v| CpuUsageStrategy::NonNormalized(v as f64))
.unwrap_or(CpuUsageStrategy::Normalized)
} else {
CpuUsageStrategy::Normalized
};
processes::get_process_data(
&mut self.prev_idle,
&mut self.prev_non_idle,
&mut self.pid_mapping,
self.use_current_cpu_total,
normalize_cpu,
current_instant
.duration_since(self.last_collection_time)
.as_secs(),
self.mem_total_kb,
&mut self.user_table,
)
}
#[cfg(not(target_os = "linux"))]
{
#[cfg(target_family = "unix")]
{
processes::get_process_data(
&self.sys,
self.use_current_cpu_total,
self.unnormalized_cpu,
self.mem_total_kb,
&mut self.user_table,
)
}
#[cfg(not(target_family = "unix"))]
{
processes::get_process_data(
&self.sys,
self.use_current_cpu_total,
self.unnormalized_cpu,
self.mem_total_kb,
)
}
}
} {
// NB: To avoid duplicate sorts on rerenders/events, we sort the processes by PID here.
// We also want to avoid re-sorting *again* later on if we're sorting by PID, since we already
// did it here!
process_list.sort_unstable_by_key(|p| p.pid);
self.data.list_of_processes = Some(process_list);
}
#[cfg(not(target_os = "linux"))]
{
if let Ok(data) = temperature::get_temperature_data(&self.sys) {
self.data.temperature_sensors = data;
}
}
#[cfg(target_os = "linux")]
{
if let Ok(data) = temperature::get_temperature_data() {
self.data.temperature_sensors = data;
}
}
let network_data_fut = {
#[cfg(any(target_os = "windows", target_os = "freebsd"))]
{
network::get_network_data(
&self.sys,
self.last_collection_time,
&mut self.total_rx,
&mut self.total_tx,
current_instant,
)
}
#[cfg(not(any(target_os = "windows", target_os = "freebsd")))]
{
network::get_network_data(
self.last_collection_time,
&mut self.total_rx,
&mut self.total_tx,
current_instant,
)
}
};
let mem_data_fut = {
#[cfg(not(target_os = "freebsd"))]
{
memory::get_mem_data()
}
#[cfg(target_os = "freebsd")]
{
memory::get_mem_data(&self.sys)
}
};
let disk_data_fut = disks::get_disk_usage();
let disk_io_usage_fut = disks::get_io_usage();
let (net_data, mem_res, disk_res, io_res) = join!(
network_data_fut,
mem_data_fut,
disk_data_fut,
disk_io_usage_fut,
);
if let Ok(net_data) = net_data {
if let Some(net_data) = &net_data {
self.total_rx = net_data.total_rx;
self.total_tx = net_data.total_tx;
}
self.data.network = net_data;
}
if let Ok(memory) = mem_res.ram {
self.data.memory = memory;
}
if let Ok(swap) = mem_res.swap {
self.data.swap = swap;
}
#[cfg(feature = "zfs")]
if let Ok(arc) = mem_res.arc {
self.data.arc = arc;
}
#[cfg(feature = "gpu")]
if let Ok(gpu) = mem_res.gpus {
self.data.gpu = gpu;
}
if let Ok(disks) = disk_res {
self.data.disks = disks;
}
if let Ok(io) = io_res {
self.data.io = io;
}
self.data.uptime = Duration::from_secs(self.sys.uptime());
self.data.hostname = self.sys.host_name();
self.data.kernel_name = self.sys.name();
self.data.kernel_version = self.sys.kernel_version();
self.data.os_version = self.sys.long_os_version();
self.data.local_ip = local_ip_address::local_ip().ok();
// Update time
self.data.last_collection_time = current_instant;
self.last_collection_time = current_instant;
}
}
#[cfg(target_os = "freebsd")]
/// Deserialize [libxo](https://www.freebsd.org/cgi/man.cgi?query=libxo&apropos=0&sektion=0&manpath=FreeBSD+13.1-RELEASE+and+Ports&arch=default&format=html) JSON data
fn deserialize_xo<T>(key: &str, data: &[u8]) -> Result<T, std::io::Error>
where
T: serde::de::DeserializeOwned,
{
let mut value: serde_json::Value = serde_json::from_slice(data)?;
value
.as_object_mut()
.and_then(|map| map.remove(key))
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::Other, "key not found"))
.and_then(|val| serde_json::from_value(val).map_err(|err| err.into()))
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/utils/error.rs | src-tauri/src/utils/error.rs | use std::{borrow::Cow, result};
use thiserror::Error;
#[cfg(target_os = "linux")]
use procfs::ProcError;
/// A type alias for handling errors related to Bottom.
pub type Result<T> = result::Result<T, ToeError>;
/// An error that can occur while Bottom runs.
#[derive(Debug, Error, PartialEq, Eq)]
pub enum ToeError {
/// An error when there is an IO exception.
#[error("IO exception, {0}")]
InvalidIo(String),
/// An error when the heim library encounters a problem.
#[error("Error caused by Heim, {0}")]
InvalidHeim(String),
/// An error when the Crossterm library encounters a problem.
#[error("Error caused by Crossterm, {0}")]
CrosstermError(String),
/// An error to represent generic errors.
#[error("Error, {0}")]
GenericError(String),
/// An error to represent errors with fern.
#[error("Fern error, {0}")]
FernError(String),
/// An error to represent errors with the config.
#[error("Configuration file error, {0}")]
ConfigError(String),
/// An error to represent errors with converting between data types.
#[error("Conversion error, {0}")]
ConversionError(String),
/// An error to represent errors with querying.
#[error("Query error, {0}")]
QueryError(Cow<'static, str>),
/// An error that just signifies something minor went wrong; no message.
#[error("Minor error.")]
MinorError,
/// An error to represent errors with procfs
#[cfg(target_os = "linux")]
#[error("Procfs error, {0}")]
ProcfsError(String),
}
impl From<std::io::Error> for ToeError {
fn from(err: std::io::Error) -> Self {
ToeError::InvalidIo(err.to_string())
}
}
#[cfg(not(target_os = "freebsd"))]
impl From<heim::Error> for ToeError {
fn from(err: heim::Error) -> Self {
ToeError::InvalidHeim(err.to_string())
}
}
impl From<std::num::ParseIntError> for ToeError {
fn from(err: std::num::ParseIntError) -> Self {
ToeError::ConfigError(err.to_string())
}
}
impl From<std::string::String> for ToeError {
fn from(err: std::string::String) -> Self {
ToeError::GenericError(err)
}
}
#[cfg(feature = "fern")]
impl From<fern::InitError> for ToeError {
fn from(err: fern::InitError) -> Self {
ToeError::FernError(err.to_string())
}
}
impl From<std::str::Utf8Error> for ToeError {
fn from(err: std::str::Utf8Error) -> Self {
ToeError::ConversionError(err.to_string())
}
}
impl From<std::string::FromUtf8Error> for ToeError {
fn from(err: std::string::FromUtf8Error) -> Self {
ToeError::ConversionError(err.to_string())
}
}
#[cfg(target_os = "linux")]
impl From<ProcError> for ToeError {
fn from(err: ProcError) -> Self {
match err {
ProcError::PermissionDenied(p) => {
ToeError::ProcfsError(format!("Permission denied for {:?}", p))
}
ProcError::NotFound(p) => ToeError::ProcfsError(format!("{:?} not found", p)),
ProcError::Incomplete(p) => ToeError::ProcfsError(format!("{:?} incomplete", p)),
ProcError::Io(e, p) => ToeError::ProcfsError(format!("io error: {:?} for {:?}", e, p)),
ProcError::Other(s) => ToeError::ProcfsError(format!("Other procfs error: {}", s)),
ProcError::InternalError(e) => {
ToeError::ProcfsError(format!("procfs internal error: {:?}", e))
}
}
}
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/utils/mod.rs | src-tauri/src/utils/mod.rs | pub mod error;
pub mod logging;
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/utils/logging.rs | src-tauri/src/utils/logging.rs | #[cfg(feature = "fern")]
pub fn init_logger(
min_level: log::LevelFilter,
debug_file_name: &std::ffi::OsStr,
) -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| {
// Note we aren't using local time since it only works on single-threaded processes.
// If that ever does get patched in again, enable the "local-offset" feature.
let offset = time::OffsetDateTime::now_utc();
out.finish(format_args!(
"{}[{}][{}] {}",
offset
.format(&time::macros::format_description!(
// The weird "[[[" is because we need to escape a bracket ("[[") to show one "[".
// See https://time-rs.github.io/book/api/format-description.html
"[[[year]-[month]-[day]][[[hour]:[minute]:[second][subsecond digits:9]]"
))
.unwrap(),
record.target(),
record.level(),
message
))
})
.level(min_level)
.chain(fern::log_file(debug_file_name)?)
.apply()?;
Ok(())
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/network.rs | src-tauri/src/data_harvester/network.rs | //! Data collection for network usage/IO.
//!
//! For Linux and macOS, this is handled by Heim.
//! For Windows, this is handled by sysinfo.
use serde::Serialize;
cfg_if::cfg_if! {
if #[cfg(any(target_os = "linux", target_os = "macos"))] {
pub mod heim;
pub use self::heim::*;
} else if #[cfg(any(target_os = "freebsd", target_os = "windows"))] {
pub mod sysinfo;
pub use self::sysinfo::*;
}
}
#[derive(Default, Clone, Debug, Serialize)]
/// All units in bits.
pub struct NetworkHarvest {
pub rx: u64,
pub tx: u64,
pub total_rx: u64,
pub total_tx: u64,
}
impl NetworkHarvest {
pub fn first_run_cleanup(&mut self) {
self.rx = 0;
self.tx = 0;
}
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/memory.rs | src-tauri/src/data_harvester/memory.rs | //! Data collection for memory.
//!
//! For Linux, macOS, and Windows, this is handled by Heim. On FreeBSD it is handled by sysinfo.
cfg_if::cfg_if! {
if #[cfg(any(target_os = "freebsd", target_os = "linux", target_os = "macos", target_os = "windows"))] {
pub mod general;
pub use self::general::*;
}
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/disks.rs | src-tauri/src/data_harvester/disks.rs | //! Data collection for disks (IO, usage, space, etc.).
//!
//! For Linux, macOS, and Windows, this is handled by heim. For FreeBSD there is a custom
//! implementation.
use serde::Serialize;
cfg_if::cfg_if! {
if #[cfg(any(target_os = "linux", target_os = "macos", target_os = "windows"))] {
pub mod heim;
pub use self::heim::*;
} else if #[cfg(target_os = "freebsd")] {
pub mod freebsd;
pub use self::freebsd::*;
}
}
#[derive(Debug, Clone, Default, Serialize)]
pub struct DiskHarvest {
pub name: String,
pub mount_point: String,
pub free_space: Option<u64>,
pub used_space: Option<u64>,
pub total_space: Option<u64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct IoData {
pub read_bytes: u64,
pub write_bytes: u64,
}
pub type IoHarvest = std::collections::HashMap<String, Option<IoData>>;
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/temperature.rs | src-tauri/src/data_harvester/temperature.rs | //! Data collection for temperature metrics.
//!
//! For Linux and macOS, this is handled by Heim.
//! For Windows, this is handled by sysinfo.
cfg_if::cfg_if! {
if #[cfg(target_os = "linux")] {
pub mod linux;
pub use self::linux::*;
} else if #[cfg(any(target_os = "freebsd", target_os = "macos", target_os = "windows"))] {
pub mod sysinfo;
pub use self::sysinfo::*;
}
}
#[cfg(feature = "nvidia")]
pub mod nvidia;
use serde::Serialize;
#[derive(Default, Debug, Clone, Serialize)]
pub struct TempHarvest {
pub name: String,
pub temperature: f32,
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/nvidia.rs | src-tauri/src/data_harvester/nvidia.rs | use nvml_wrapper::{error::NvmlError, Nvml};
use once_cell::sync::Lazy;
pub static NVML_DATA: Lazy<Result<Nvml, NvmlError>> = Lazy::new(Nvml::init);
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/processes.rs | src-tauri/src/data_harvester/processes.rs | //! Data collection for processes.
//!
//! For Linux, this is handled by a custom set of functions.
//! For Windows and macOS, this is handled by sysinfo.
cfg_if::cfg_if! {
if #[cfg(target_os = "linux")] {
pub mod linux;
pub use self::linux::*;
} else if #[cfg(target_os = "macos")] {
pub mod macos;
mod macos_freebsd;
pub use self::macos::*;
} else if #[cfg(target_os = "windows")] {
pub mod windows;
pub use self::windows::*;
} else if #[cfg(target_os = "freebsd")] {
pub mod freebsd;
mod macos_freebsd;
pub use self::freebsd::*;
}
}
cfg_if::cfg_if! {
if #[cfg(target_family = "unix")] {
pub mod unix;
pub use self::unix::*;
}
}
use serde::Serialize;
use crate::Pid;
#[derive(Debug, Clone, Default, Serialize)]
pub struct ProcessHarvest {
/// The pid of the process.
pub pid: Pid,
/// The parent PID of the process. Remember, parent_pid 0 is root.
pub parent_pid: Option<Pid>,
/// CPU usage as a percentage.
pub cpu_usage_percent: f64,
/// Memory usage as a percentage.
pub mem_usage_percent: f64,
/// Memory usage as bytes.
pub mem_usage_bytes: u64,
/// The name of the process.
pub name: String,
/// The exact command for the process.
pub command: String,
/// Bytes read per second.
pub read_bytes_per_sec: u64,
/// Bytes written per second.
pub write_bytes_per_sec: u64,
/// The total number of bytes read by the process.
pub total_read_bytes: u64,
/// The total number of bytes written by the process.
pub total_write_bytes: u64,
/// The current state of the process (e.g. zombie, asleep)
pub process_state: (String, char),
/// This is the *effective* user ID of the process. This is only used on Unix platforms.
#[cfg(target_family = "unix")]
pub uid: Option<libc::uid_t>,
/// This is the process' user. This is only used on Unix platforms.
#[cfg(target_family = "unix")]
pub user: std::borrow::Cow<'static, str>,
// TODO: Additional fields
// pub rss_kb: u64,
// pub virt_kb: u64,
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu.rs | src-tauri/src/data_harvester/cpu.rs | //! Data collection for CPU usage and load average.
//!
//! For CPU usage, Linux, macOS, and Windows are handled by Heim, FreeBSD by sysinfo.
//!
//! For load average, macOS and Linux are supported through Heim, FreeBSD by sysinfo.
use serde::Serialize;
cfg_if::cfg_if! {
if #[cfg(any(target_os = "linux", target_os = "macos", target_os = "windows"))] {
pub mod heim;
pub use self::heim::*;
} else if #[cfg(target_os = "freebsd")] {
pub mod sysinfo;
pub use self::sysinfo::*;
}
}
pub type LoadAvgHarvest = [f32; 3];
#[derive(Debug, Clone, Copy, Serialize)]
pub enum CpuDataType {
Avg,
Cpu(usize),
}
#[derive(Debug, Clone, Serialize)]
pub struct CpuData {
pub data_type: CpuDataType,
pub cpu_usage: f64,
}
pub type CpuHarvest = Vec<CpuData>;
pub type PastCpuWork = f64;
pub type PastCpuTotal = f64;
pub type Point = (f64, f64);
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/batteries.rs | src-tauri/src/data_harvester/batteries.rs | //! Data collection for batteries.
//!
//! For Linux, macOS, Windows, FreeBSD, Dragonfly, and iOS, this is handled by the battery crate.
cfg_if::cfg_if! {
if #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux", target_os = "freebsd", target_os = "dragonfly", target_os = "ios"))] {
pub mod battery;
pub use self::battery::*;
}
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/batteries/battery.rs | src-tauri/src/data_harvester/batteries/battery.rs | //! Uses the battery crate from svartalf.
//! Covers battery usage for:
//! - Linux 2.6.39+
//! - MacOS 10.10+
//! - iOS
//! - Windows 7+
//! - FreeBSD
//! - DragonFlyBSD
//!
//! For more information, refer to the [starship_battery](https://github.com/starship/rust-battery) repo/docs.
use starship_battery::{
units::{power::watt, ratio::percent, time::second},
Battery, Manager,
};
#[derive(Debug, Clone, Serialize)]
pub struct BatteryHarvest {
pub charge_percent: f64,
pub secs_until_full: Option<i64>,
pub secs_until_empty: Option<i64>,
pub power_consumption_rate_watts: f64,
pub health_percent: f64,
}
pub fn refresh_batteries(manager: &Manager, batteries: &mut [Battery]) -> Vec<BatteryHarvest> {
batteries
.iter_mut()
.filter_map(|battery| {
if manager.refresh(battery).is_ok() {
Some(BatteryHarvest {
secs_until_full: {
let optional_time = battery.time_to_full();
optional_time.map(|time| f64::from(time.get::<second>()) as i64)
},
secs_until_empty: {
let optional_time = battery.time_to_empty();
optional_time.map(|time| f64::from(time.get::<second>()) as i64)
},
charge_percent: f64::from(battery.state_of_charge().get::<percent>()),
power_consumption_rate_watts: f64::from(battery.energy_rate().get::<watt>()),
health_percent: f64::from(battery.state_of_health().get::<percent>()),
})
} else {
None
}
})
.collect::<Vec<_>>()
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu/heim.rs | src-tauri/src/data_harvester/cpu/heim.rs | //! CPU stats through heim.
//! Supports macOS, Linux, and Windows.
cfg_if::cfg_if! {
if #[cfg(target_os = "linux")] {
pub mod linux;
pub use linux::*;
} else if #[cfg(any(target_os = "macos", target_os = "windows"))] {
pub mod windows_macos;
pub use windows_macos::*;
}
}
cfg_if::cfg_if! {
if #[cfg(target_family = "unix")] {
pub mod unix;
pub use unix::*;
}
}
use std::collections::VecDeque;
use futures::StreamExt;
use crate::data_harvester::cpu::{
CpuData, CpuDataType, CpuHarvest, PastCpuTotal, PastCpuWork, Point,
};
pub async fn get_cpu_data_list(
show_average_cpu: bool,
previous_cpu_times: &mut Vec<(PastCpuWork, PastCpuTotal)>,
previous_average_cpu_time: &mut Option<(PastCpuWork, PastCpuTotal)>,
) -> crate::error::Result<CpuHarvest> {
fn calculate_cpu_usage_percentage(
(previous_working_time, previous_total_time): Point,
(current_working_time, current_total_time): Point,
) -> f64 {
((if current_working_time > previous_working_time {
current_working_time - previous_working_time
} else {
0.0
}) * 100.0)
/ (if current_total_time > previous_total_time {
current_total_time - previous_total_time
} else {
1.0
})
}
// Get all CPU times...
let cpu_times = heim::cpu::times().await?;
futures::pin_mut!(cpu_times);
let mut cpu_deque: VecDeque<CpuData> = if previous_cpu_times.is_empty() {
// Must initialize ourselves. Use a very quick timeout to calculate an initial.
futures_timer::Delay::new(std::time::Duration::from_millis(100)).await;
let second_cpu_times = heim::cpu::times().await?;
futures::pin_mut!(second_cpu_times);
let mut new_cpu_times: Vec<(PastCpuWork, PastCpuTotal)> = Vec::new();
let mut cpu_deque: VecDeque<CpuData> = VecDeque::new();
let mut collected_zip = cpu_times.zip(second_cpu_times).enumerate(); // Gotta move it here, can't on while line.
while let Some((itx, (past, present))) = collected_zip.next().await {
if let (Ok(past), Ok(present)) = (past, present) {
let present_times = convert_cpu_times(&present);
new_cpu_times.push(present_times);
cpu_deque.push_back(CpuData {
data_type: CpuDataType::Cpu(itx),
cpu_usage: calculate_cpu_usage_percentage(
convert_cpu_times(&past),
present_times,
),
});
} else {
new_cpu_times.push((0.0, 0.0));
cpu_deque.push_back(CpuData {
data_type: CpuDataType::Cpu(itx),
cpu_usage: 0.0,
});
}
}
*previous_cpu_times = new_cpu_times;
cpu_deque
} else {
let (new_cpu_times, cpu_deque): (Vec<(PastCpuWork, PastCpuTotal)>, VecDeque<CpuData>) =
cpu_times
.collect::<Vec<_>>()
.await
.iter()
.zip(&*previous_cpu_times)
.enumerate()
.map(|(itx, (current_cpu, (past_cpu_work, past_cpu_total)))| {
if let Ok(cpu_time) = current_cpu {
let present_times = convert_cpu_times(cpu_time);
(
present_times,
CpuData {
data_type: CpuDataType::Cpu(itx),
cpu_usage: calculate_cpu_usage_percentage(
(*past_cpu_work, *past_cpu_total),
present_times,
),
},
)
} else {
(
(*past_cpu_work, *past_cpu_total),
CpuData {
data_type: CpuDataType::Cpu(itx),
cpu_usage: 0.0,
},
)
}
})
.unzip();
*previous_cpu_times = new_cpu_times;
cpu_deque
};
// Get average CPU if needed... and slap it at the top
if show_average_cpu {
let cpu_time = heim::cpu::time().await?;
let (cpu_usage, new_average_cpu_time) = if let Some((past_cpu_work, past_cpu_total)) =
previous_average_cpu_time
{
let present_times = convert_cpu_times(&cpu_time);
(
calculate_cpu_usage_percentage((*past_cpu_work, *past_cpu_total), present_times),
present_times,
)
} else {
// Again, we need to do a quick timeout...
futures_timer::Delay::new(std::time::Duration::from_millis(100)).await;
let second_cpu_time = heim::cpu::time().await?;
let present_times = convert_cpu_times(&second_cpu_time);
(
calculate_cpu_usage_percentage(convert_cpu_times(&cpu_time), present_times),
present_times,
)
};
*previous_average_cpu_time = Some(new_average_cpu_time);
cpu_deque.push_front(CpuData {
data_type: CpuDataType::Avg,
cpu_usage,
})
}
// Ok(Vec::from(cpu_deque.drain(0..3).collect::<Vec<_>>())) // For artificially limiting the CPU results
Ok(Vec::from(cpu_deque))
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu/sysinfo.rs | src-tauri/src/data_harvester/cpu/sysinfo.rs | //! CPU stats through sysinfo.
//! Supports FreeBSD.
use std::collections::VecDeque;
use sysinfo::{CpuExt, LoadAvg, System, SystemExt};
use super::{CpuData, CpuDataType, CpuHarvest, PastCpuTotal, PastCpuWork};
use crate::data_harvester::cpu::LoadAvgHarvest;
pub async fn get_cpu_data_list(
sys: &sysinfo::System,
show_average_cpu: bool,
_previous_cpu_times: &mut [(PastCpuWork, PastCpuTotal)],
_previous_average_cpu_time: &mut Option<(PastCpuWork, PastCpuTotal)>,
) -> crate::error::Result<CpuHarvest> {
let mut cpu_deque: VecDeque<_> = sys
.cpus()
.iter()
.enumerate()
.map(|(i, cpu)| CpuData {
data_type: CpuDataType::Cpu(i),
cpu_usage: cpu.cpu_usage() as f64,
})
.collect();
if show_average_cpu {
let cpu = sys.global_cpu_info();
cpu_deque.push_front(CpuData {
data_type: CpuDataType::Avg,
cpu_usage: cpu.cpu_usage() as f64,
})
}
Ok(Vec::from(cpu_deque))
}
pub async fn get_load_avg() -> crate::error::Result<LoadAvgHarvest> {
let sys = System::new();
let LoadAvg { one, five, fifteen } = sys.load_average();
Ok([one as f32, five as f32, fifteen as f32])
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu/heim/unix.rs | src-tauri/src/data_harvester/cpu/heim/unix.rs | //! Unix-specific functions regarding CPU usage.
use crate::data_harvester::cpu::LoadAvgHarvest;
pub async fn get_load_avg() -> crate::error::Result<LoadAvgHarvest> {
let (one, five, fifteen) = heim::cpu::os::unix::loadavg().await?;
Ok([
one.get::<heim::units::ratio::ratio>(),
five.get::<heim::units::ratio::ratio>(),
fifteen.get::<heim::units::ratio::ratio>(),
])
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu/heim/windows_macos.rs | src-tauri/src/data_harvester/cpu/heim/windows_macos.rs | //! Windows and macOS-specific functions regarding CPU usage.
use crate::data_harvester::cpu::Point;
pub fn convert_cpu_times(cpu_time: &heim::cpu::CpuTime) -> Point {
let working_time: f64 =
(cpu_time.user() + cpu_time.system()).get::<heim::units::time::second>();
(
working_time,
working_time + cpu_time.idle().get::<heim::units::time::second>(),
)
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
acarl005/toerings | https://github.com/acarl005/toerings/blob/24a55caf70cb19b0e7f5f88119012d8ae503b5d8/src-tauri/src/data_harvester/cpu/heim/linux.rs | src-tauri/src/data_harvester/cpu/heim/linux.rs | //! Linux-specific functions regarding CPU usage.
use heim::cpu::os::linux::CpuTimeExt;
use crate::data_harvester::cpu::Point;
pub fn convert_cpu_times(cpu_time: &heim::cpu::CpuTime) -> Point {
let working_time: f64 = (cpu_time.user()
+ cpu_time.nice()
+ cpu_time.system()
+ cpu_time.irq()
+ cpu_time.soft_irq()
+ cpu_time.steal())
.get::<heim::units::time::second>();
(
working_time,
working_time + (cpu_time.idle() + cpu_time.io_wait()).get::<heim::units::time::second>(),
)
}
| rust | MIT | 24a55caf70cb19b0e7f5f88119012d8ae503b5d8 | 2026-01-04T20:17:23.288089Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.