repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/dargo/docs.rs | src/dargo/docs.rs | use colored::Colorize;
use duckwind::EmitEnv;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use std::{
fs,
path::{Path, PathBuf},
sync::mpsc,
};
use crate::{
cli::go_cli::GoCliErrKind, dargo::cli::DocsGenerateArgs, lex, parse_src_file, tags::Tag,
typecheck,
};
#[derive(Debug)]
pub enum DocsErrKind {
CorruptedFileName,
TargetPathIsDirectory,
FileNotFound,
CannotReadFile,
GoCli(GoCliErrKind),
}
lazy_static! {
static ref COMPILE_TAG: String = " docs ".on_bright_black().bright_white().to_string();
}
pub struct DocsOutput {
pub json_output_path: PathBuf,
pub fn_docs: Vec<FunctionDoc>,
pub struct_docs: Vec<StructDoc>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DocsField {
// pub comments: Vec<String>,
pub field_name: String,
pub type_annotation: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ExtensionsDoc {
pub target_type_annotation: String,
pub comments: Vec<String>,
pub function_docs: Vec<FunctionDoc>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StructDoc {
pub struct_name: String,
pub fields: Vec<DocsField>,
pub comments: Vec<String>,
pub function_docs: Vec<FunctionDoc>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FunctionDoc {
pub function_name: String,
pub function_annotation: String,
pub comments: Vec<String>,
}
pub fn generate(generate_args: DocsGenerateArgs) -> Result<DocsOutput, (String, DocsErrKind)> {
let src_file: PathBuf = generate_args.file;
if src_file.is_dir() {
let message = format!(
"{}{} the path you provided is a directory. You need to provide a .duck file",
*COMPILE_TAG,
Tag::Err,
);
return Err((message, DocsErrKind::TargetPathIsDirectory));
}
if src_file
.extension()
.ok_or_else(|| {
format!(
"{}{} couldn't extract file extension from provided source file",
*COMPILE_TAG,
Tag::Err,
)
})
.unwrap()
!= "duck"
{
let message = format!(
"{}{} the path you provided is not a valid duck source file. You need to provide a .duck file",
*COMPILE_TAG,
Tag::Err,
);
return Err((message, DocsErrKind::TargetPathIsDirectory));
}
let src_file_name: &'static str = src_file
.file_name()
.ok_or_else(|| {
(
format!(
"{}{} couldn't get the filename from given ",
*COMPILE_TAG,
Tag::Err
),
DocsErrKind::CorruptedFileName,
)
})?
.to_str()
.ok_or_else(|| {
(
format!(
"{}{} the filename is an invalid utf-8 string",
*COMPILE_TAG,
Tag::Err
),
DocsErrKind::CorruptedFileName,
)
})?
.to_string()
.leak();
let src_file_file_contents: &'static str = fs::read_to_string(&src_file)
.map_err(|err| {
(
format!(
"{}{} couldn't read file '{}'. msg='{}'",
*COMPILE_TAG,
Tag::Err,
src_file.to_string_lossy().bright_blue(),
err.to_string().bright_red()
),
DocsErrKind::CannotReadFile,
)
})?
.to_string()
.leak();
let tokens = lex(src_file_name, src_file_file_contents);
let mut src_file_ast = parse_src_file(&src_file, src_file_name, src_file_file_contents, tokens);
let (tailwind_worker_send, tailwind_worker_receive) = mpsc::channel::<String>();
let (tailwind_result_send, _tailwind_result_receive) = mpsc::channel::<String>();
let tailwind_prefix = None::<String>;
std::thread::spawn(move || {
let mut emit_env = EmitEnv::new_with_default_config();
// emit_env.parse_full_string(src_file_file_contents);
loop {
let s = tailwind_worker_receive.recv();
match s {
Ok(s) => emit_env.parse_full_string(tailwind_prefix.as_deref(), s.as_str()),
Err(_) => break,
}
}
let _ = tailwind_result_send.send(emit_env.to_css_stylesheet(true));
});
let mut fn_docs = vec![];
let mut struct_docs = vec![];
let mut extensions_docs = vec![];
let type_env = typecheck(&mut src_file_ast, &tailwind_worker_send);
type_env
.struct_definitions
.iter()
.for_each(|struct_definition| {
let mut fn_docs = vec![];
struct_definition.methods.iter().for_each(|function_def| {
if !function_def.comments.is_empty() {
fn_docs.push(FunctionDoc {
function_name: function_def.name.clone(),
function_annotation: function_def
.type_expr()
.0
.as_clean_user_faced_type_name(),
comments: function_def.comments.iter().map(|c| c.0.clone()).collect(),
});
}
});
if !(fn_docs.is_empty() && struct_definition.doc_comments.is_empty()) {
struct_docs.push(StructDoc {
function_docs: fn_docs,
struct_name: struct_definition.name.clone(),
comments: struct_definition
.doc_comments
.iter()
.map(|c| c.0.clone())
.collect(),
fields: struct_definition
.fields
.iter()
.map(|field| DocsField {
field_name: field.name.clone(),
type_annotation: field.type_expr.0.as_clean_user_faced_type_name(),
})
.collect(),
});
}
});
src_file_ast
.extensions_defs
.iter()
.for_each(|extensions_def| {
let mut fn_docs = vec![];
extensions_def
.function_definitions
.iter()
.for_each(|(function_def, _)| {
if !function_def.comments.is_empty() {
fn_docs.push(FunctionDoc {
function_name: function_def.name.clone(),
function_annotation: function_def
.type_expr()
.0
.as_clean_user_faced_type_name(),
comments: function_def.comments.iter().map(|c| c.0.clone()).collect(),
});
}
});
if !(fn_docs.is_empty() && extensions_def.doc_comments.is_empty()) {
extensions_docs.push(ExtensionsDoc {
target_type_annotation: extensions_def
.target_type_expr
.0
.as_clean_user_faced_type_name(),
function_docs: fn_docs,
comments: extensions_def
.doc_comments
.iter()
.map(|c| c.0.clone())
.collect(),
});
}
});
type_env
.function_definitions
.iter()
.for_each(|function_def| {
if !function_def.comments.is_empty() {
fn_docs.push(FunctionDoc {
function_name: function_def.name.clone(),
function_annotation: function_def.type_expr().0.as_clean_user_faced_type_name(),
comments: function_def.comments.iter().map(|c| c.0.clone()).collect(),
});
println!()
}
});
println!(
"{}{}{} Successfully generated docs",
Tag::Dargo,
*COMPILE_TAG,
Tag::Check,
);
let json_output = serde_json::to_string(&struct_docs).unwrap();
dbg!(json_output);
let json_output = serde_json::to_string(&fn_docs).unwrap();
dbg!(json_output);
let html = layout_html(&fn_docs, &struct_docs, &extensions_docs);
// println!("{}", layout_html(&fn_docs, &struct_docs, &extensions_docs));
let file = Path::new("./docs_output.html");
fs::write(file, html).expect("couldn't write docs");
return Ok(DocsOutput {
json_output_path: Path::new("here").to_path_buf(),
fn_docs,
struct_docs,
});
}
fn layout_html(
fn_docs: &[FunctionDoc],
struct_docs: &[StructDoc],
extensions_docs: &[ExtensionsDoc],
) -> String {
let sidebar_html = render_sidebar(fn_docs, struct_docs, extensions_docs);
let structs_html = struct_docs
.iter()
.map(render_struct)
.collect::<Vec<_>>()
.join("\n");
let extensions_html = extensions_docs
.iter()
.map(render_extension)
.collect::<Vec<_>>()
.join("\n");
let fns_html = fn_docs
.iter()
.map(|f| render_function(f, false))
.collect::<Vec<_>>()
.join("\n");
let empty_state_html = r#"
<div id="no-results" class="hidden text-center py-12 text-[#a89984]">
<p class="text-xl">No results found matching your search.</p>
</div>
"#;
let body_content = format!(
r#"
<div class="flex flex-col md:flex-row h-screen w-full bg-[#282828] overflow-hidden font-sans text-[#ebdbb2]">
<div class="md:hidden flex items-center justify-between p-4 bg-[#1d2021] border-b border-[#3c3836] shrink-0 z-40">
<h1 class="text-lg font-bold text-[#fabd2f] tracking-wide">Duck<span class="text-[#ebdbb2]">Docs</span></h1>
<button onclick="toggleSidebar()" class="text-[#ebdbb2] focus:outline-none p-2 rounded hover:bg-[#32302f]">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 6h16M4 12h16M4 18h16"></path>
</svg>
</button>
</div>
<div id="sidebar-overlay" onclick="closeSidebar()" class="fixed inset-0 bg-[#1d2021] bg-opacity-90 z-40 hidden transition-opacity opacity-0 md:hidden glass-blur"></div>
<aside id="sidebar" class="fixed inset-y-0 left-0 z-50 w-72 h-full bg-[#1d2021] border-r border-[#3c3836] flex flex-col transform -translate-x-full transition-transform duration-300 ease-in-out md:relative md:translate-x-0 md:inset-auto shadow-2xl md:shadow-none shrink-0">
<div class="p-5 border-b border-[#3c3836] flex justify-between items-center bg-[#1d2021] shrink-0">
<div class="w-full">
<div class="flex justify-between items-center mb-3">
<h1 class="text-xl font-bold text-[#fabd2f] tracking-wide hidden md:block">Duck<span class="text-[#ebdbb2]">Docs</span></h1>
<h1 class="text-xl font-bold text-[#fabd2f] tracking-wide md:hidden">Menu</h1>
<button onclick="closeSidebar()" class="md:hidden text-[#a89984] hover:text-[#fabd2f]">
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path>
</svg>
</button>
</div>
<input
type="text"
id="search-input"
placeholder="Search docs..."
class="w-full px-3 py-2 bg-[#32302f] border border-[#504945] rounded text-[#ebdbb2] placeholder-[#a89984] focus:outline-none focus:border-[#d79921] focus:ring-1 focus:ring-[#d79921] transition-all text-sm"
onkeyup="filterDocs()"
/>
</div>
</div>
<nav class="flex-1 overflow-y-auto p-4 space-y-8 scrollbar-thin scrollbar-thumb-[#504945] scrollbar-track-transparent" id="sidebar-nav">
{sidebar_html}
</nav>
</aside>
<main class="flex-1 h-full overflow-y-auto bg-[#282828] scroll-smooth scrollbar-thin scrollbar-thumb-[#504945] relative w-full">
<div class="max-w-5xl mx-auto p-6 md:p-10 space-y-12 md:space-y-16 pb-24" id="main-content">
<section>
<h2 class="text-2xl md:text-3xl font-bold text-[#fabd2f] mb-6 md:mb-8 border-b border-[#3c3836] pb-3">Structs</h2>
<div class="space-y-12 md:space-y-16">
{structs_html}
</div>
</section>
<section>
<h2 class="text-2xl md:text-3xl font-bold text-[#8ec07c] mb-6 md:mb-8 border-b border-[#3c3836] pb-3">Extensions</h2>
<div class="space-y-12 md:space-y-16">
{extensions_html}
</div>
</section>
<section>
<h2 class="text-2xl md:text-3xl font-bold text-[#b8bb26] mb-6 md:mb-8 border-b border-[#3c3836] pb-3">Global Functions</h2>
<div class="space-y-8 md:space-y-10">
{fns_html}
</div>
</section>
{empty_state_html}
</div>
<footer class="p-10 text-center text-[#a89984] text-sm border-t border-[#3c3836] mt-20">
Generated by Dargo
</footer>
</main>
</div>
"#
);
let mut duckwind_emit_env = duckwind::EmitEnv::new_with_default_config();
duckwind_emit_env.parse_full_string(None, &body_content);
let output_css = duckwind_emit_env.to_css_stylesheet(true);
let script = r#"
<script>
function filterDocs() {
const input = document.getElementById('search-input');
const filter = input.value.toLowerCase();
const sidebarLinks = document.querySelectorAll('#sidebar-nav a');
sidebarLinks.forEach(link => {
const text = link.innerText.toLowerCase();
link.parentElement.style.display = text.includes(filter) ? "" : "none";
});
const contentItems = document.querySelectorAll('.doc-item');
let hasVisibleItems = false;
contentItems.forEach(item => {
const name = item.getAttribute('data-name').toLowerCase();
if (name.includes(filter)) {
item.style.display = "";
hasVisibleItems = true;
} else {
item.style.display = "none";
}
});
document.getElementById('no-results').style.display = hasVisibleItems ? 'none' : 'block';
}
const sidebar = document.getElementById('sidebar');
const overlay = document.getElementById('sidebar-overlay');
function toggleSidebar() {
const isClosed = sidebar.classList.contains('-translate-x-full');
if (isClosed) {
openSidebar();
} else {
closeSidebar();
}
}
function openSidebar() {
sidebar.classList.remove('-translate-x-full');
overlay.classList.remove('hidden');
setTimeout(() => {
overlay.classList.remove('opacity-0');
}, 10);
}
function closeSidebar() {
sidebar.classList.add('-translate-x-full');
overlay.classList.add('opacity-0');
setTimeout(() => {
overlay.classList.add('hidden');
}, 300);
}
document.querySelectorAll('#sidebar-nav a').forEach(link => {
link.addEventListener('click', () => {
if (window.innerWidth < 768) {
closeSidebar();
}
});
});
</script>
"#;
format!(
"<!doctype html>
<html lang='en'>
<head>
<meta charset='UTF-8'>
<meta name='viewport' content='width=device-width, initial-scale=1.0'>
<title>Duck Documentation</title>
<style>
body {{ margin: 0; background-color: #282828; color: #ebdbb2; }}
::-webkit-scrollbar {{ width: 8px; }}
::-webkit-scrollbar-track {{ background: #1d2021; }}
::-webkit-scrollbar-thumb {{ background: #504945; border-radius: 4px; }}
::-webkit-scrollbar-thumb:hover {{ background: #665c54; }}
{output_css}
</style>
</head>
<body>
{body_content}
{script}
</body>
</html>"
)
}
fn render_sidebar(
fn_docs: &[FunctionDoc],
struct_docs: &[StructDoc],
extensions_docs: &[ExtensionsDoc],
) -> String {
let struct_links = struct_docs.iter().map(|s| {
format!(
"<li><a href='#struct-{}' class='block text-[#a89984] hover:text-[#fabd2f] hover:bg-[#32302f] px-2 py-1.5 rounded transition-colors duration-200'>{}</a></li>",
s.struct_name, s.struct_name
)
}).collect::<Vec<_>>().join("");
let extension_links = extensions_docs.iter().map(|e| {
format!(
"<li><a href='#ext-{}' class='block text-[#a89984] hover:text-[#8ec07c] hover:bg-[#32302f] px-2 py-1.5 rounded transition-colors duration-200'>{}</a></li>",
e.target_type_annotation, e.target_type_annotation
)
}).collect::<Vec<_>>().join("");
let fn_links = fn_docs.iter().map(|f| {
format!(
"<li><a href='#fn-{}' class='block text-[#a89984] hover:text-[#b8bb26] hover:bg-[#32302f] px-2 py-1.5 rounded transition-colors duration-200'>{}</a></li>",
f.function_name, f.function_name
)
}).collect::<Vec<_>>().join("");
format!(
r#"
<div>
<h3 class="font-bold text-[#fabd2f] uppercase tracking-wider text-xs mb-3 ml-2">Structs</h3>
<ul class="space-y-0.5 text-sm">{struct_links}</ul>
</div>
<div>
<h3 class="font-bold text-[#8ec07c] uppercase tracking-wider text-xs mb-3 ml-2">Extensions</h3>
<ul class="space-y-0.5 text-sm">{extension_links}</ul>
</div>
<div>
<h3 class="font-bold text-[#b8bb26] uppercase tracking-wider text-xs mb-3 ml-2">Functions</h3>
<ul class="space-y-0.5 text-sm">{fn_links}</ul>
</div>
"#
)
}
fn render_struct(doc: &StructDoc) -> String {
let comments_html = doc
.comments
.iter()
.map(|c| format!("<p class='text-[#ebdbb2] opacity-80 mb-2 leading-relaxed'>{c}</p>"))
.collect::<Vec<_>>()
.join("");
let fields_html = if doc.fields.is_empty() {
String::new()
} else {
let rows = doc.fields.iter().map(|field| {
format!(
"<tr class='border-b border-[#3c3836] last:border-0 hover:bg-[#32302f] transition-colors'>
<td class='py-3 px-4 font-mono text-sm text-[#83a598]'>{}</td>
<td class='py-3 px-4 font-mono text-sm text-[#d3869b]'>{}</td>
</tr>",
field.field_name, field.type_annotation
)
}).collect::<Vec<_>>().join("");
format!(
r#"
<div class="mt-5 mb-8 bg-[#1d2021] rounded border border-[#3c3836] overflow-x-auto">
<table class="w-full text-left min-w-[300px]">
<thead class="bg-[#32302f] border-b border-[#3c3836]">
<tr>
<th class="py-2 px-4 text-xs font-bold text-[#a89984] uppercase tracking-wider">Field</th>
<th class="py-2 px-4 text-xs font-bold text-[#a89984] uppercase tracking-wider">Type</th>
</tr>
</thead>
<tbody>
{rows}
</tbody>
</table>
</div>
"#
)
};
let methods_html = if doc.function_docs.is_empty() {
String::new()
} else {
let methods = doc
.function_docs
.iter()
.map(|f| render_function(f, true))
.collect::<Vec<_>>()
.join("\n");
format!(
r#"
<div class="mt-8 pl-4 md:pl-5 border-l-2 border-[#504945]">
<h4 class="text-sm font-bold text-[#fe8019] uppercase tracking-widest mb-6">Methods</h4>
<div class="space-y-8">
{methods}
</div>
</div>
"#
)
};
format!(
r#"
<div id="struct-{name}" class="doc-item scroll-mt-24" data-name="{name}">
<div class="flex items-center gap-3 mb-4">
<span class="text-xs font-bold text-[#1d2021] bg-[#fabd2f] px-2 py-0.5 rounded uppercase tracking-wide">Struct</span>
<h3 class="text-xl md:text-2xl font-bold text-[#fbf1c7] tracking-tight break-all">{name}</h3>
</div>
<div class="prose max-w-none mb-4 text-[#ebdbb2]">
{comments}
</div>
{fields}
{methods}
</div>
"#,
name = doc.struct_name,
comments = comments_html,
fields = fields_html,
methods = methods_html
)
}
fn render_extension(doc: &ExtensionsDoc) -> String {
let comments_html = doc
.comments
.iter()
.map(|c| format!("<p class='text-[#ebdbb2] opacity-80 mb-2 leading-relaxed'>{c}</p>"))
.collect::<Vec<_>>()
.join("");
let methods_html = if doc.function_docs.is_empty() {
String::new()
} else {
let methods = doc
.function_docs
.iter()
.map(|f| render_function(f, true))
.collect::<Vec<_>>()
.join("\n");
format!(
r#"
<div class="mt-8 pl-4 md:pl-5 border-l-2 border-[#504945]">
<h4 class="text-sm font-bold text-[#fe8019] uppercase tracking-widest mb-6">Extension Methods</h4>
<div class="space-y-8">
{methods}
</div>
</div>
"#
)
};
format!(
r#"
<div id="ext-{name}" class="doc-item scroll-mt-24" data-name="{name}">
<div class="flex items-center gap-3 mb-4">
<span class="text-xs font-bold text-[#1d2021] bg-[#8ec07c] px-2 py-0.5 rounded uppercase tracking-wide">Extension</span>
<h3 class="text-xl md:text-2xl font-bold text-[#8ec07c] tracking-tight break-all">on {name}</h3>
</div>
<div class="prose max-w-none mb-4 text-[#ebdbb2]">
{comments}
</div>
{methods}
</div>
"#,
name = doc.target_type_annotation,
comments = comments_html,
methods = methods_html
)
}
fn render_function(doc: &FunctionDoc, is_method: bool) -> String {
let comments_html = doc
.comments
.iter()
.map(|c| format!("<p class='text-[#ebdbb2] opacity-80 mb-2 leading-relaxed'>{c}</p>"))
.collect::<Vec<_>>()
.join("");
let (badge, title_color) = if is_method {
(
r#"<span class="text-xs font-bold text-[#a89984] bg-[#32302f] px-2 py-0.5 rounded border border-[#504945] uppercase tracking-wide">Method</span>"#,
"text-[#ebdbb2]",
)
} else {
(
r#"<span class="text-xs font-bold text-[#1d2021] bg-[#b8bb26] px-2 py-0.5 rounded uppercase tracking-wide">Fn</span>"#,
"text-[#fbf1c7]",
)
};
let id_prefix = if is_method { "method" } else { "fn" };
format!(
r#"
<div id="{id_prefix}-{name}" class="doc-item scroll-mt-24" data-name="{name}">
<div class="flex flex-wrap items-center gap-3 mb-3">
{badge}
<h3 class="text-lg md:text-xl font-bold {title_color} font-mono tracking-tight break-all">{name}</h3>
</div>
<div class="bg-[#32302f] rounded p-4 font-mono text-sm overflow-x-auto shadow-sm mb-4 border border-[#3c3836]">
<code class="text-[#8ec07c] whitespace-pre">{annotation}</code>
</div>
<div class="prose max-w-none text-[#ebdbb2]">
{comments}
</div>
</div>
"#,
id_prefix = id_prefix,
name = doc.function_name,
badge = badge,
title_color = title_color,
annotation = doc.function_annotation,
comments = comments_html
)
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/dargo/clean.rs | src/dargo/clean.rs | use std::{fs, io::ErrorKind as IOErrKind};
use crate::{DARGO_DOT_DIR, tags::Tag};
#[derive(Debug)]
pub enum CleanErrKind {
IOErr(IOErrKind),
}
pub fn clean() -> Result<(), (String, CleanErrKind)> {
fs::remove_dir_all(DARGO_DOT_DIR.as_path()).map_err(|err| {
(
format!("{}{} couldn't clean directory", Tag::IO, Tag::Err,),
CleanErrKind::IOErr(err.kind()),
)
})?;
Ok(())
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/dargo/new.rs | src/dargo/new.rs | use colored::Colorize;
use lazy_static::lazy_static;
use std::{
fs::{self, create_dir},
io,
path::{Path, PathBuf},
};
use crate::{dargo::cli::NewArgs, duck_with_message, tags::Tag};
#[derive(Debug)]
pub enum NewErrKind {
CannotPrompt,
CannotWriteFile,
DirectoryAlreadyExists,
CannotCreateDir,
}
lazy_static! {
static ref NEW_TAG: String = " new ".on_green().bright_white().to_string();
}
pub fn generate_default_dargo_toml(project_name: impl Into<String>) -> String {
let dargo_toml = format!(
r#"
name = "{}"
version = "0.0.1"
[dependencies]
"#,
project_name.into()
);
return dargo_toml
.split("\n")
.map(|line| line.trim())
.collect::<Vec<_>>()
.join("\n")
.trim()
.to_string();
}
pub fn generate_default_main_duck() -> String {
return "use std::io::{println};\n\nfn main() {\n println(\"Hello, World!\");\n}"
.to_string();
}
pub fn new_project(
custom_dargo_toml_path: Option<PathBuf>,
new_args: NewArgs,
) -> Result<(), (String, NewErrKind)> {
let project_name = if let Some(project_name) = new_args.project_name {
project_name
} else {
println!("what do you want the project to be called?");
let mut buffer = String::new();
let stdin = io::stdin();
stdin.read_line(&mut buffer).map_err(|_| {
(
"couldn't prompt for a project name, please try again providing the projects name"
.to_string(),
NewErrKind::CannotPrompt,
)
})?;
buffer.clone()
};
let project_name: &'static str = String::from(project_name.trim()).leak();
let target_dir = Path::new(format!("./{project_name}/").leak());
if target_dir.exists() {
let message = format!(
"{}{} ./{project_name} already exists in current directory. Did you want to run init inside ./{project_name}?",
*NEW_TAG,
Tag::Err
);
return Err((message, NewErrKind::DirectoryAlreadyExists));
}
let mkdir_result = create_dir(target_dir);
if mkdir_result.is_err() {
let message = format!(
"{}{} couldn't create ./{project_name} directory, please try again.",
*NEW_TAG,
Tag::Err
);
return Err((message, NewErrKind::CannotCreateDir));
}
let dargo_toml_path =
custom_dargo_toml_path.unwrap_or_else(|| target_dir.join("./dargo.toml").to_path_buf());
let dargo_toml_content = generate_default_dargo_toml(project_name);
fs::write(&dargo_toml_path, dargo_toml_content).map_err(|write_error| {
let message = format!(
"{}{} Failed to create default dargo.toml file '{}': {}",
Tag::Err,
*NEW_TAG,
dargo_toml_path.display(),
write_error
);
(message, NewErrKind::CannotWriteFile)
})?;
let src_dir = target_dir.join("./src").to_path_buf();
if !src_dir.exists() {
let mkdir_result = create_dir(src_dir.clone());
if mkdir_result.is_ok() {
let main_src_file = {
let mut src_dir = src_dir.clone();
src_dir.push("main.duck");
src_dir
};
if !main_src_file.exists() {
// todo: this is currently a silent error - if there's one
let _ = fs::write(&main_src_file, generate_default_main_duck());
duck_with_message(
format!("You've sucessfully created a new project in ./{project_name}").leak(),
);
println!();
println!("Run following commands to get started");
println!(" cd {project_name}");
println!(" dargo run");
}
}
}
Ok(())
}
#[cfg(test)]
pub mod test {
use crate::dargo::init::generate_default_dargo_toml;
#[test]
pub fn test_dargo_toml_generation() {
let output = generate_default_dargo_toml("test");
assert!(output.contains("test"));
assert!(output.contains("0.0.1"));
assert!(output.contains("dependencies"));
assert_eq!(output.lines().count(), 4);
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/duckx_component_parser.rs | src/parse/duckx_component_parser.rs | use std::collections::HashSet;
use chumsky::{input::BorrowInput, prelude::*};
use tree_sitter::{Node, Parser as TSParser};
use crate::{
parse::{
SS, Spanned,
type_parser::{Duck, TypeExpr, type_expression_parser},
value_parser::{ValHtmlStringContents, ValueExpr, value_expr_parser},
},
semantics::type_resolve::TypeEnv,
};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct DuckxComponent {
pub name: String,
pub props_type: Spanned<TypeExpr>,
pub value_expr: Spanned<ValueExpr>,
}
#[derive(Debug, Clone, Default)]
pub struct DuckxComponentDependencies {
pub client_components: Vec<String>,
}
#[derive(Debug, Clone)]
pub enum HtmlStringSourceUnit {
JsxOpen,
JsxClose,
Ident,
}
// pub fn transform_html_string(html_string: &ValueExpr) -> ValueExpr {
// let ValueExpr::HtmlString(contents) = html_string else {
// panic!("not a html string")
// };
// let mut out = Vec::new();
// }
pub fn find_client_components(
obj: &Vec<ValHtmlStringContents>,
out: &mut HashSet<String>,
type_env: &mut TypeEnv,
) {
fn trav(n: &Node, t: &[u8], out: &mut HashSet<String>, type_env: &mut TypeEnv) {
if n.grammar_name() == "self_closing_tag" {
for comp in
type_env.get_full_component_dependencies(n.child(1).unwrap().utf8_text(t).unwrap())
{
out.insert(comp);
}
} else {
for i in 0..n.child_count() {
trav(&n.child(i).unwrap(), t, out, type_env);
}
}
}
let mut s = String::new();
for c in obj {
match c {
ValHtmlStringContents::Expr((e, _)) => {
if let ValueExpr::HtmlString(contents) = e {
find_client_components(contents, out, type_env);
}
s.push_str("\"\"");
}
ValHtmlStringContents::String(s_) => {
let mut x = s.as_str();
while let Some(idx) = x.find("<") {
x = &x[idx + 1..];
let end = x.find([' ', '>']).unwrap_or(x.len());
let mut between = &x[..end];
if between.ends_with("/") {
between = &between[..between.len() - 1];
}
if type_env.get_component(between).is_some() {
type_env
.get_full_component_dependencies(between)
.into_iter()
.for_each(|s| {
out.insert(s);
})
}
}
s.push_str(s_.as_str());
}
}
}
let mut parser = TSParser::new();
parser
.set_language(&tree_sitter_html::LANGUAGE.into())
.expect("Couldn't set js grammar");
let src = parser.parse(s.as_bytes(), None).unwrap();
let root_node = src.root_node();
trav(&root_node, s.as_bytes(), out, type_env);
}
pub fn duckx_component_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, DuckxComponent, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
just(Token::Template)
.ignore_then(
select_ref! { Token::Ident(identifier) => identifier.clone() }
.map_with(|ident, e| (ident, e.span())),
)
.then(
just(Token::Ident("props".to_string()))
.ignore_then(just(Token::ControlChar(':')))
.ignore_then(type_expression_parser())
.or_not()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')'))),
)
.then(value_expr_parser(make_input.clone()))
.map(
|(((ident, ident_span), props_type), src_tokens)| DuckxComponent {
name: ident.clone(),
props_type: props_type
.unwrap_or((TypeExpr::Duck(Duck { fields: Vec::new() }), ident_span)),
value_expr: (
ValueExpr::Return(Some(Box::new(src_tokens.clone()))),
src_tokens.1,
),
},
)
}
#[cfg(test)]
mod tests {
use crate::parse::{
lexer::lex_parser,
make_input,
value_parser::{
IntoReturn, empty_range, type_expr_into_empty_range, value_expr_into_empty_range,
},
};
use super::*;
#[test]
fn test_component_parser() {
let src_and_expected_ast = vec![(
"template T() duckx {}",
DuckxComponent {
name: "T".to_string(),
props_type: TypeExpr::Duck(Duck { fields: Vec::new() }).into_empty_span(),
value_expr: ValueExpr::Block(vec![]).into_empty_span().into_return(),
},
)];
for (src, expected_ast) in src_and_expected_ast {
println!("lexing {src}");
let lexer_parse_result = lex_parser("test", "").parse(src);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("parsing component statement {src}");
let component_parse_result = duckx_component_parser(make_input)
.parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(component_parse_result.has_errors(), false);
assert_eq!(component_parse_result.has_output(), true);
let Some(mut ast) = component_parse_result.into_output() else {
unreachable!()
};
value_expr_into_empty_range(&mut ast.value_expr);
type_expr_into_empty_range(&mut ast.props_type);
assert_eq!(ast, expected_ast);
}
let invalid_component_statements = vec![
"use x::;",
"use y::{};",
"use std::{}",
"use ::;",
"use :std:;",
"use :std::{};",
"use go x;",
"use go;",
"use go \"fmt\" as;",
"use go fmt as x",
"use go::x;",
"use go::x;",
"use go as;",
];
for invalid_component_statement in invalid_component_statements {
println!("lexing {invalid_component_statement}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_component_statement);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("component parser try invalid {invalid_component_statement}");
let component_parse_result = duckx_component_parser(make_input)
.parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(component_parse_result.has_errors(), true);
assert_eq!(component_parse_result.has_output(), false);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/lexer.rs | src/parse/lexer.rs | use std::fmt::Display;
use chumsky::{prelude::*, text::whitespace};
use crate::parse::{Context, SS, Spanned, value_parser::empty_range};
#[derive(Debug, PartialEq, Clone)]
pub enum RawFmtStringContents {
Char(&'static str),
Tokens(Vec<Spanned<Token>>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum FmtStringContents {
String(String),
Tokens(Vec<Spanned<Token>>),
}
#[derive(Debug, Clone, PartialEq)]
pub enum HtmlStringContents {
String(String),
Tokens(Vec<Spanned<Token>>),
}
#[derive(Debug, Clone, PartialEq)]
pub enum RawHtmlStringContents {
Char(char),
Tokens(Vec<Spanned<Token>>),
Sub(Token),
}
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Static,
Mut,
Use,
Type,
Go,
Struct,
Schema,
Impl,
With,
Extend,
Duck,
Function,
Test,
RefMut,
Return,
Ident(String),
ControlChar(char),
StringLiteral(String),
FormatStringLiteral(Vec<FmtStringContents>),
IntLiteral(u64),
BoolLiteral(bool),
CharLiteral(char),
HtmlString(Vec<HtmlStringContents>),
Equals,
NotEquals,
LessThanOrEquals,
GreaterThanOrEquals,
And,
Or,
Match,
If,
TypeOf,
KeyOf,
Else,
Let,
Const,
While,
Break,
Continue,
As,
InlineGo(String),
InlineJsx(String),
InlineDuckx(Vec<Spanned<Token>>),
Module,
ScopeRes,
ThinArrow,
ThickArrow,
Comment(String),
DocComment(String),
Async,
Component,
Template,
For,
In,
Defer,
PlusEquals,
SubEquals,
MulEquals,
DivEquals,
ModEquals,
ShiftRightEquals,
ShiftLeftEquals,
}
impl Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let t = match self {
Token::Static => "static",
Token::Defer => "defer",
Token::For => "for",
Token::In => "in",
Token::Mut => "mut",
Token::Const => "const",
Token::RefMut => "&mut",
Token::FormatStringLiteral(s) => &format!("f-string {s:?}"),
Token::Impl => "impl",
Token::With => "with",
Token::Extend => "extend",
Token::ScopeRes => "::",
Token::ThinArrow => "->",
Token::ThickArrow => "=>",
Token::Use => "use",
Token::Type => "type",
Token::Test => "test",
Token::Go => "go",
Token::TypeOf => "typeof",
Token::KeyOf => "keyof",
Token::Struct => "struct",
Token::Schema => "schema",
Token::Duck => "duck",
Token::Component => "component",
Token::Template => "template",
Token::Function => "fn",
Token::Return => "return",
Token::Ident(_) => "identifier",
Token::ControlChar(c) => &format!("{c}"),
Token::StringLiteral(s) => &format!("string {s}"),
Token::IntLiteral(_) => "int",
Token::BoolLiteral(_) => "bool",
Token::CharLiteral(_) => "char",
Token::Equals => "==",
Token::NotEquals => "!=",
Token::LessThanOrEquals => "<=",
Token::GreaterThanOrEquals => ">=",
Token::And => "and",
Token::Or => "or",
Token::If => "if",
Token::Else => "else",
Token::Let => "let",
Token::While => "while",
Token::Break => "break",
Token::Continue => "continue",
Token::As => "as",
Token::InlineGo(_) => "inline go",
Token::InlineJsx(_) => "inline jsx",
Token::InlineDuckx(_) => "inline duckx",
Token::Module => "module",
Token::Match => "match",
Token::HtmlString(..) => "html string",
Token::DocComment(comment) => &format!("/// {comment}"),
Token::Comment(comment) => &format!("// {comment}"),
Token::Async => "async",
Token::PlusEquals => "+=",
Token::SubEquals => "-=",
Token::MulEquals => "*=",
Token::DivEquals => "/=",
Token::ModEquals => "%=",
Token::ShiftLeftEquals => "<<=",
Token::ShiftRightEquals => ">>=",
};
write!(f, "{t}")
}
}
pub fn tokens_in_curly_braces<'a>(
lexer: impl Parser<'a, &'a str, Spanned<Token>, extra::Err<Rich<'a, char>>> + Clone + 'a,
) -> impl Parser<'a, &'a str, Vec<Spanned<Token>>, extra::Err<Rich<'a, char>>> + Clone {
recursive(|e| {
lexer
.clone()
.filter(|x| matches!(x.0, Token::ControlChar('{')))
.then(
choice((
just("{").rewind().ignore_then(e.clone()),
any().filter(|c: &char| c.is_whitespace()).to(Vec::new()),
any()
.filter(|c| *c != '{' && *c != '}')
.rewind()
.ignore_then(lexer.clone())
.map(|x| vec![x]),
))
.repeated()
.collect::<Vec<_>>(),
)
.then(just("}").map_with(|_, e| e.span()))
.map(|((a, x), b)| {
let mut v = Vec::new();
let b = (
Token::ControlChar('}'),
SS {
start: b.start,
end: b.end,
context: a.1.context,
},
);
v.push(a);
v.extend(x.into_iter().flatten());
v.push(b);
v
})
})
}
#[derive(Debug, Clone)]
pub struct HtmlAttribute {
pub name: String,
}
pub fn closing_tag<'a>() -> impl Parser<'a, &'a str, String, extra::Err<Rich<'a, char>>> + Clone {
just("</")
.then(
any()
.filter(|c: &char| *c != '>')
.repeated()
.collect::<String>(),
)
.then(just(">"))
.map(|((pre, main), close)| format!("{pre}{main}{close}"))
}
pub fn opening_self_closing<'a>()
-> impl Parser<'a, &'a str, String, extra::Err<Rich<'a, char>>> + Clone {
just("<")
.and_is(just("</").not())
.ignore_then(
any()
.and_is(just(">").not())
.and_is(just("/>").not())
.repeated()
.collect::<String>(),
)
.then_ignore(just("/>"))
.rewind()
.ignore_then(
just("<").and_is(just("</").not()).ignore_then(
any()
.and_is(just(" ").not())
.and_is(just("/>").not())
.repeated()
.collect::<String>(),
),
)
.map(|x| {
let complete = format!("<{x}");
complete
})
}
pub fn opening_tag<'a>() -> impl Parser<'a, &'a str, String, extra::Err<Rich<'a, char>>> + Clone {
just("<")
.and_is(just("</").not())
.and_is(opening_self_closing().not())
.then(
any()
.filter(|c: &char| *c != '>')
.repeated()
.collect::<String>(),
)
.then(just(">"))
.rewind()
.then(
just("<").and_is(just("</").not()).then(
any()
.filter(|c: &char| *c != ' ' && *c != '>')
.repeated()
.collect::<String>(),
),
)
.map(|(((_pre, _main), _close), (_, x))| {
let complete = format!("<{x}");
complete
})
}
pub fn special_tag<'a>() -> impl Parser<'a, &'a str, String, extra::Err<Rich<'a, char>>> + Clone {
opening_tag().filter(|s: &String| s.to_ascii_lowercase().starts_with("<!doctype"))
}
pub fn duckx_parse_html_string<'a>(
duckx_lexer: impl Parser<'a, &'a str, Vec<Spanned<Token>>, extra::Err<Rich<'a, char>>> + Clone + 'a,
_context: Context,
) -> impl Parser<'a, &'a str, Token, extra::Err<Rich<'a, char>>> + Clone {
recursive(|e| {
opening_tag()
.then(
choice((
just("{")
.rewind()
.ignore_then(duckx_lexer.clone())
.map(RawHtmlStringContents::Tokens),
special_tag().map(|x| {
RawHtmlStringContents::Sub(Token::HtmlString(vec![
HtmlStringContents::String(x),
]))
}),
opening_self_closing().map(|in_html| {
RawHtmlStringContents::Sub(Token::HtmlString(vec![
HtmlStringContents::String(in_html),
]))
}),
opening_tag()
.rewind()
.ignore_then(e.clone())
.map(RawHtmlStringContents::Sub),
any()
.and_is(closing_tag().not())
// .filter(|c: &char| *c != '{' && *c != '<')
.map(RawHtmlStringContents::Char),
))
.repeated()
.collect::<Vec<_>>(),
)
.then(closing_tag())
.map(
|((opening_tag, template_contents), closing_tag): (
(String, Vec<RawHtmlStringContents>),
String,
)| {
let mut new_out = Vec::new();
new_out.push(HtmlStringContents::String(opening_tag));
for c in template_contents {
match c {
RawHtmlStringContents::Tokens(t) => {
new_out.push(HtmlStringContents::Tokens(t));
}
RawHtmlStringContents::Char(c) => {
new_out.push(HtmlStringContents::String(c.to_string()));
}
RawHtmlStringContents::Sub(Token::HtmlString(sub)) => {
new_out.extend(sub);
}
_ => panic!("invalid"),
}
}
new_out.push(HtmlStringContents::String(closing_tag));
let mut s_buf = String::new();
let mut final_out = Vec::new();
for c in new_out {
match c {
HtmlStringContents::String(s) => {
s_buf.push_str(&s);
}
HtmlStringContents::Tokens(tok) => {
if !s_buf.is_empty() {
final_out.push(HtmlStringContents::String(s_buf));
s_buf = String::new();
}
final_out.push(HtmlStringContents::Tokens(tok));
}
}
}
if !s_buf.is_empty() {
final_out.push(HtmlStringContents::String(s_buf));
}
Token::HtmlString(final_out)
},
)
})
}
pub fn duckx_contents_in_curly_braces<'a>(
file_name: &'static str,
file_contents: &'static str,
lexer: impl Parser<'a, &'a str, Spanned<Token>, extra::Err<Rich<'a, char>>> + Clone + 'a,
) -> impl Parser<'a, &'a str, Vec<Spanned<Token>>, extra::Err<Rich<'a, char>>> + Clone {
recursive(|duckx_lexer| {
let context = Context {
file_name,
file_contents,
};
lexer
.clone()
.filter(|c: &Spanned<Token>| matches!(c.0, Token::ControlChar('{')))
.then(
choice((
just("{").rewind().ignore_then(duckx_lexer.clone()),
special_tag().map_with(move |x, e| {
vec![(
Token::HtmlString(vec![HtmlStringContents::String(x.to_string())]),
SS {
start: e.span().start,
end: e.span().end,
context,
},
)]
}),
opening_self_closing()
.then(
choice((
just("{")
.rewind()
.ignore_then(duckx_lexer.clone())
.map(RawHtmlStringContents::Tokens),
any()
.and_is(just("/>").not())
.map(RawHtmlStringContents::Char),
))
.repeated()
.collect::<Vec<_>>(),
)
.map_with(move |(x1, x2), e| {
let mut new_out = Vec::new();
new_out.push(HtmlStringContents::String(x1));
for c in x2 {
match c {
RawHtmlStringContents::Tokens(t) => {
new_out.push(HtmlStringContents::Tokens(t));
}
RawHtmlStringContents::Char(c) => {
new_out.push(HtmlStringContents::String(c.to_string()));
}
RawHtmlStringContents::Sub(Token::HtmlString(sub)) => {
new_out.extend(sub);
}
_ => panic!("invalid"),
}
}
new_out.push(HtmlStringContents::String("/>".to_string()));
let mut s_buf = String::new();
let mut final_out = Vec::new();
for c in new_out {
match c {
HtmlStringContents::String(s) => {
s_buf.push_str(&s);
}
HtmlStringContents::Tokens(tok) => {
if !s_buf.is_empty() {
final_out.push(HtmlStringContents::String(s_buf));
s_buf = String::new();
}
final_out.push(HtmlStringContents::Tokens(tok));
}
}
}
if !s_buf.is_empty() {
final_out.push(HtmlStringContents::String(s_buf));
}
vec![(
Token::HtmlString(final_out),
SS {
start: e.span().start,
end: e.span().end,
context,
},
)]
})
.then_ignore(just("/>")),
opening_tag()
.rewind()
.ignore_then(duckx_parse_html_string(duckx_lexer.clone(), context))
.map_with(move |x, e| {
vec![(
x,
SS {
start: e.span().start,
end: e.span().end,
context,
},
)]
}),
any().filter(|c: &char| c.is_whitespace()).to(Vec::new()),
any()
.and_is(choice((just("{"), just("}"))).not())
// .filter(|c| *c != '{' && *c != '}')
.rewind()
.ignore_then(lexer.clone())
.map(|x| vec![x]),
))
.repeated()
.collect::<Vec<_>>(),
)
.then(just("}").map_with(|_, e| e.span()))
.map(move |((a, x), b_span)| {
let mut v = x.into_iter().flatten().collect::<Vec<_>>();
v.insert(0, a);
let b = (
Token::ControlChar('}'),
SS {
start: b_span.start,
end: b_span.end,
context,
},
);
v.push(b);
v
})
})
}
pub fn oct_digit<'a>() -> impl Parser<'a, &'a str, char, extra::Err<Rich<'a, char>>> + Clone {
one_of("01234567")
}
pub fn lex_single<'a>(
file_name: &'static str,
file_contents: &'static str,
) -> impl Parser<'a, &'a str, Spanned<Token>, extra::Err<Rich<'a, char>>> + Clone {
recursive(|lexer| {
let keyword_or_ident = text::ident().map(|str| match str {
"static" => Token::Static,
"defer" => Token::Defer,
"for" => Token::For,
"in" => Token::In,
"mut" => Token::Mut,
"module" => Token::Module,
"use" => Token::Use,
"typeof" => Token::TypeOf,
"keyof" => Token::KeyOf,
"impl" => Token::Impl,
"extend" => Token::Extend,
"with" => Token::With,
"test" => Token::Test,
"type" => Token::Type,
"duck" => Token::Duck,
"go" => Token::Go,
"struct" => Token::Struct,
"schema" => Token::Schema,
"fn" => Token::Function,
"return" => Token::Return,
"component" => Token::Component,
"let" => Token::Let,
"const" => Token::Const,
"if" => Token::If,
"else" => Token::Else,
"while" => Token::While,
"break" => Token::Break,
"continue" => Token::Continue,
"as" => Token::As,
"match" => Token::Match,
"async" => Token::Async,
"and" => Token::And,
"or" => Token::Or,
"template" => Token::Template,
_ => Token::Ident(str.to_string()),
});
let ctrl = one_of("`!=:{};,&()-<>.+-*/%|[]@~^").map(Token::ControlChar);
let string = string_lexer();
let r#bool = choice((
just("true").to(Token::BoolLiteral(true)),
just("false").to(Token::BoolLiteral(false)),
));
let r#char = char_lexer();
let num = num_literal();
let equals = just("==").to(Token::Equals);
let not_equals = just("!=").to(Token::NotEquals);
let less_than_or_equals = just("<=").to(Token::LessThanOrEquals);
let greater_than_or_equals = just(">=").to(Token::GreaterThanOrEquals);
let scope_res = just("::").to(Token::ScopeRes);
let thin_arrow = just("->").to(Token::ThinArrow);
let thick_arrow = just("=>").to(Token::ThickArrow);
let assign_equals = choice((
just("+=").to(Token::PlusEquals),
just("-=").to(Token::SubEquals),
just("*=").to(Token::MulEquals),
just("/=").to(Token::DivEquals),
just("%=").to(Token::ModEquals),
just(">>=").to(Token::ShiftRightEquals),
just("<<=").to(Token::ShiftLeftEquals),
));
let doc_comment = just("///")
.ignore_then(
any()
.and_is(just('\n').not())
.repeated()
.collect::<Vec<_>>(),
)
.padded()
.map(|comment| {
Token::DocComment(comment.iter().collect::<String>().trim().to_string())
});
let comment = just("//")
.ignore_then(
any()
.and_is(just('\n').not())
.repeated()
.collect::<Vec<_>>(),
)
.padded()
.map(|comment| Token::Comment(comment.iter().collect::<String>().trim().to_string()));
let fmt_string = just("f")
.ignore_then(just('"'))
.ignore_then(
choice((
just("\\{").to(RawFmtStringContents::Char("{")),
just("{")
.rewind()
.ignore_then(tokens_in_curly_braces(lexer.clone()))
.map(|e| RawFmtStringContents::Tokens(e.to_vec())),
none_of("\\\"")
.map(|c: char| c.to_string().leak() as &'static str)
.or(choice((
just("\\0").to("\0"),
just("\\o")
.ignore_then(oct_digit())
.then(oct_digit())
.then(oct_digit())
.map(|((a, b), c)| format!("\\o{a}{b}{c}").leak() as &'static str),
just("\\\\").to("\\"),
just("\\{").to("{"),
just("\\n").to("\n"),
just("\\t").to("\t"),
just("\\\"").to("\""),
)))
.map(RawFmtStringContents::Char),
))
.repeated()
.collect::<Vec<_>>(),
)
.then_ignore(just('"'))
.map(|x| {
let mut s = String::new();
let mut xx = Vec::new();
for e in x {
match e {
RawFmtStringContents::Tokens(t) => {
if !s.is_empty() {
xx.push(FmtStringContents::String(s.clone()));
s.clear();
}
xx.push(FmtStringContents::Tokens(t));
}
RawFmtStringContents::Char(c) => s.push_str(c),
}
}
if !s.is_empty() {
xx.push(FmtStringContents::String(s));
}
Token::FormatStringLiteral(xx)
});
let token = inline_go_parser()
.or(inline_jsx_parser())
.or(just("duckx")
.ignore_then(whitespace().at_least(1))
.ignore_then(just("{").rewind())
.ignore_then(duckx_contents_in_curly_braces(
file_name,
file_contents,
lexer.clone(),
))
.map(Token::InlineDuckx))
.or(just("&mut")
.then_ignore(whitespace().at_least(1))
.to(Token::RefMut))
.or(doc_comment)
.or(assign_equals)
.or(comment)
.or(fmt_string)
.or(thin_arrow)
.or(thick_arrow)
.or(scope_res)
.or(r#bool)
.or(equals)
.or(not_equals)
.or(less_than_or_equals)
.or(greater_than_or_equals)
.or(keyword_or_ident)
.or(num)
.or(ctrl)
.or(string)
.or(r#char);
token
.map_with(move |t, e| {
(
t,
SS {
start: e.span().start,
end: e.span().end,
context: Context {
file_name,
file_contents,
},
},
)
})
.padded()
})
}
pub fn lex_parser<'src>(
file_name: &'static str,
file_contents: &'static str,
) -> impl Parser<'src, &'src str, Vec<Spanned<Token>>, extra::Err<Rich<'src, char>>> + Clone {
lex_single(file_name, file_contents)
.repeated()
.collect::<Vec<_>>()
}
fn go_text_parser<'src>()
-> impl Parser<'src, &'src str, String, extra::Err<Rich<'src, char>>> + Clone {
recursive(|e| {
just("{")
.ignore_then(
((just("{").rewind().ignore_then(e.clone()))
.or(any().filter(|c| *c != '{' && *c != '}').map(String::from)))
.repeated()
.collect::<Vec<_>>(),
)
.then_ignore(just("}"))
.map(|x| {
let x = x.join("");
format!("{}{x}{}", "{", "}")
})
})
}
fn inline_go_parser<'src>()
-> impl Parser<'src, &'src str, Token, extra::Err<Rich<'src, char>>> + Clone {
just("go")
.ignore_then(whitespace().at_least(1))
.ignore_then(just("{").rewind())
.ignore_then(go_text_parser())
.map(|x| Token::InlineGo(x[1..x.len() - 1].to_owned()))
}
fn num_literal<'src>() -> impl Parser<'src, &'src str, Token, extra::Err<Rich<'src, char>>> + Clone
{
text::int(10)
.to_slice()
.try_map(|s: &str, span| {
s.parse::<u64>()
.map_err(|_| Rich::custom(span, "Invalid integer"))
})
.map(Token::IntLiteral)
}
fn inline_jsx_parser<'src>()
-> impl Parser<'src, &'src str, Token, extra::Err<Rich<'src, char>>> + Clone {
just("jsx")
.ignore_then(whitespace().at_least(1))
.ignore_then(just("{").rewind())
.ignore_then(go_text_parser())
.map(|x| Token::InlineJsx(x[1..x.len() - 1].to_owned()))
}
fn char_lexer<'src>() -> impl Parser<'src, &'src str, Token, extra::Err<Rich<'src, char>>> + Clone {
just("'")
.ignore_then(none_of("\\\n\t'").or(choice((
just("\\0").to('\0'),
just("\\\\").to('\\'),
just("\\n").to('\n'),
just("\\t").to('\t'),
just("\\'").to('\''),
))))
.then_ignore(just("'"))
.map(Token::CharLiteral)
}
fn string_lexer<'a>() -> impl Parser<'a, &'a str, Token, extra::Err<Rich<'a, char>>> + Clone {
just('"')
.ignore_then(
choice((
just("\\0").to('\0'),
just("\\\\").to('\\'),
just("\\n").to('\n'),
just("\\t").to('\t'),
just("\\\"").to('"'),
any().and_is(just("\"").not()),
))
.repeated()
.collect::<String>(),
)
.then_ignore(just('"'))
.map(Token::StringLiteral)
}
pub fn token_empty_range(token_span: &mut Spanned<Token>) {
token_span.1 = empty_range();
if let Token::FormatStringLiteral(contents) = &mut token_span.0 {
for content in contents {
match content {
FmtStringContents::Tokens(tokens) => {
for token in tokens {
token_empty_range(token);
}
}
FmtStringContents::String(_) => {}
}
}
} else if let Token::InlineDuckx(contents) = &mut token_span.0 {
for content in contents {
token_empty_range(content);
}
} else if let Token::HtmlString(contents) = &mut token_span.0 {
for content in contents {
if let HtmlStringContents::Tokens(contents) = content {
for content in contents {
token_empty_range(content);
}
}
}
}
}
impl Token {
pub fn into_empty_span(&self) -> Spanned<Token> {
(self.clone(), empty_range())
}
}
#[cfg(test)]
mod tests {
use crate::{lex, parse::value_parser::empty_range};
use super::*;
fn all_empty(v: Vec<Token>) -> Vec<Spanned<Token>> {
v.iter().map(|x| x.into_empty_span()).collect()
}
fn ctrl(c: char) -> Token {
Token::ControlChar(c)
}
fn left_brace() -> Token {
ctrl('{')
}
fn right_brace() -> Token {
ctrl('}')
}
#[test]
fn test_lex() {
let test_cases = vec![
(
"duckx {<>{{}}</>}",
vec![Token::InlineDuckx(vec![
Token::ControlChar('{').into_empty_span(),
Token::HtmlString(vec![
HtmlStringContents::String("<>".to_string()),
HtmlStringContents::Tokens(vec![
Token::ControlChar('{').into_empty_span(),
Token::ControlChar('{').into_empty_span(),
Token::ControlChar('}').into_empty_span(),
Token::ControlChar('}').into_empty_span(),
]),
HtmlStringContents::String("</>".to_string()),
])
.into_empty_span(),
Token::ControlChar('}').into_empty_span(),
])],
),
(
"duckx {<Counter initial={10} />}",
vec![Token::InlineDuckx(
vec![
Token::ControlChar('{'),
Token::HtmlString(vec![
HtmlStringContents::String("<Counter initial=".to_string()),
HtmlStringContents::Tokens(all_empty(vec![
Token::ControlChar('{'),
Token::IntLiteral(10),
Token::ControlChar('}'),
])),
HtmlStringContents::String(" />".to_string()),
]),
Token::ControlChar('}'),
]
.into_iter()
.map(|x| x.into_empty_span())
.collect(),
)],
),
(
"duckx {let hello = <> <!doctype html>{<Counter initial={100}/>} </>;}",
vec![Token::InlineDuckx(all_empty(vec![
left_brace(),
Token::Let,
Token::Ident("hello".to_string()),
ctrl('='),
Token::HtmlString(vec![
HtmlStringContents::String("<> <!doctype html>".to_string()),
HtmlStringContents::Tokens(all_empty(vec![
left_brace(),
Token::HtmlString(vec![
HtmlStringContents::String("<Counter initial=".to_string()),
HtmlStringContents::Tokens(all_empty(vec![
left_brace(),
Token::IntLiteral(100),
right_brace(),
])),
HtmlStringContents::String("/>".to_string()),
]),
right_brace(),
])),
HtmlStringContents::String(" </>".to_string()),
]),
ctrl(';'),
right_brace(),
]))],
),
(
"duckx {let hello = <> {ti <span id={props.id} hello={123}></span> tle} <h1> hallo moin 123</h1> abc </>;}",
vec![Token::InlineDuckx(all_empty(vec![
left_brace(),
Token::Let,
Token::Ident("hello".to_string()),
ctrl('='),
Token::HtmlString(vec![
HtmlStringContents::String("<> ".to_string()),
HtmlStringContents::Tokens(all_empty(vec![
left_brace(),
Token::Ident("ti".to_string()),
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/use_statement_parser.rs | src/parse/use_statement_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use crate::parse::{SS, failure_with_occurence};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub enum UseStatement {
Regular(bool, Vec<String>),
Go(String, Option<String>),
}
fn regular_use_parser<'src, I>()
-> impl Parser<'src, I, Vec<UseStatement>, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
{
just(Token::Use)
.ignore_then(just(Token::ScopeRes).or_not().map(|x| x.is_some()))
.then(
choice((
select_ref! {Token::Ident(i) => i.to_string() }.map(|v| vec![v]),
select_ref! { Token::Ident(i) => i.to_string() }
.separated_by(just(Token::ControlChar(',')))
.at_least(1)
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}'))),
))
.map_with(|x, e| (x, e.span()))
.separated_by(just(Token::ScopeRes))
.at_least(1)
.collect::<Vec<_>>(),
)
.then_ignore(just(Token::ControlChar(';')))
.map(|(is_glob, v)| {
let mut base_path = Vec::new();
for item in v.iter().take(v.len() - 1) {
if item.0.len() != 1 {
let msg = "Only last part may specify multiple imports";
failure_with_occurence(msg, item.1, [(msg, item.1)]);
}
base_path.push(item.0[0].clone());
}
let mut out = Vec::new();
for end in v.last().unwrap().0.iter() {
let mut cloned_path = base_path.clone();
cloned_path.push(end.clone());
out.push(UseStatement::Regular(is_glob, cloned_path));
}
out
})
}
fn go_use_parser<'src, I>()
-> impl Parser<'src, I, UseStatement, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
{
(just(Token::Use).then(just(Token::Go)))
.ignore_then(select_ref! { Token::StringLiteral(s) => s.to_owned() })
.then(
just(Token::As)
.ignore_then(select_ref! { Token::Ident(i) => i.to_owned() })
.or_not(),
)
.then_ignore(just(Token::ControlChar(';')))
.map(|(package_name, alias)| UseStatement::Go(package_name, alias))
}
pub fn use_statement_parser<'src, I>()
-> impl Parser<'src, I, Vec<UseStatement>, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
{
choice((go_use_parser().map(|u| vec![u]), regular_use_parser()))
}
#[cfg(test)]
mod tests {
use crate::parse::{lexer::lex_parser, make_input, value_parser::empty_range};
use super::*;
#[test]
fn test_use_statement_parser() {
let src_and_expected_ast = vec![
(
"use std::abc;",
vec![UseStatement::Regular(
false,
vec!["std".to_string(), "abc".to_string()],
)],
),
(
"use ::std::abc;",
vec![UseStatement::Regular(
true,
vec!["std".to_string(), "abc".to_string()],
)],
),
(
"use ::std::{abc, xyz};",
vec![
UseStatement::Regular(true, vec!["std".to_string(), "abc".to_string()]),
UseStatement::Regular(true, vec!["std".to_string(), "xyz".to_string()]),
],
),
(
"use ::{assert};",
vec![UseStatement::Regular(true, vec!["assert".to_string()])],
),
];
for (src, expected_ast) in src_and_expected_ast {
println!("lexing {src}");
let lexer_parse_result = lex_parser("test", "").parse(src);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("parsing use statement {src}");
let use_statement_parse_result = use_statement_parser()
.parse(make_input(empty_range(), tokens.as_slice()))
.into_result()
.unwrap();
assert_eq!(use_statement_parse_result, expected_ast);
}
let invalid_use_statements = vec![
"use x::;",
"use y::{};",
"use std::{}",
"use ::;",
"use :std:;",
"use :std::{};",
"use go x;",
"use go;",
"use go \"fmt\" as;",
"use go fmt as x",
"use go::x;",
"use go::x;",
"use go as;",
];
for invalid_use_statement in invalid_use_statements {
println!("lexing {invalid_use_statement}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_use_statement);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {invalid_use_statement}");
let typedef_parse_result =
use_statement_parser().parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(typedef_parse_result.has_errors(), true);
assert_eq!(typedef_parse_result.has_output(), false);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/jsx_component_parser.rs | src/parse/jsx_component_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use tree_sitter::{Node, Parser as TSParser};
use crate::parse::{
SS, Spanned,
type_parser::{Duck, TypeExpr, type_expression_parser},
};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct JsxComponent {
pub name: String,
pub props_type: Spanned<TypeExpr>,
pub javascript_source: Spanned<String>,
}
#[derive(Debug, Clone, Default)]
pub struct JsxComponentDependencies {
pub client_components: Vec<String>,
}
#[derive(Debug, Clone)]
pub enum JsxSourceUnit {
Jsx,
OpeningJsx,
ClosingJsx,
Expression,
Ident,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Edit {
Insert(String),
Delete(usize),
}
pub fn do_edits(to_edit: &mut String, edits: &mut [(usize, Edit)]) {
edits.sort_by(|a, b| a.0.cmp(&b.0).then(a.1.cmp(&b.1)));
for (pos, edit) in edits.iter().rev() {
let pos = *pos;
match edit {
Edit::Insert(s) => to_edit.insert_str(pos, s.as_str()),
Edit::Delete(amount) => to_edit.drain(pos..(pos + *amount)).for_each(drop),
}
}
}
impl JsxComponent {
pub fn find_units(&self) -> Vec<(tree_sitter::Range, JsxSourceUnit)> {
let mut parser = TSParser::new();
parser
.set_language(&tree_sitter_javascript::LANGUAGE.into())
.expect("Couldn't set js grammar");
let src = parser
.parse(self.javascript_source.0.as_bytes(), None)
.unwrap();
let root_node = src.root_node();
fn trav(
node: &Node,
text: &[u8],
already_in_jsx: bool,
out: &mut Vec<(tree_sitter::Range, JsxSourceUnit)>,
) {
if node.grammar_name() == "identifier" {
out.push((node.range(), JsxSourceUnit::Ident));
} else if node.grammar_name() == "jsx_opening_element"
&& node.utf8_text(text).is_ok_and(|x| x == "<>")
{
out.push((node.range(), JsxSourceUnit::OpeningJsx));
} else if node.grammar_name() == "jsx_closing_element"
&& node.utf8_text(text).is_ok_and(|x| x == "</>")
{
out.push((node.range(), JsxSourceUnit::ClosingJsx));
} else if node.grammar_name() == "jsx_expression" {
out.push((node.range(), JsxSourceUnit::Expression));
for i in 0..node.child_count() {
trav(node.child(i).as_ref().unwrap(), text, false, out);
}
return;
}
if node.grammar_name().starts_with("jsx_") {
if !already_in_jsx {
out.push((node.range(), JsxSourceUnit::Jsx));
}
for i in 0..node.child_count() {
trav(node.child(i).as_ref().unwrap(), text, true, out);
}
} else {
for i in 0..node.child_count() {
trav(node.child(i).as_ref().unwrap(), text, already_in_jsx, out);
}
}
}
let mut out = Vec::new();
trav(
&root_node,
self.javascript_source.0.as_bytes(),
false,
&mut out,
);
out
}
}
pub fn jsx_component_parser<'src, I>()
-> impl Parser<'src, I, JsxComponent, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
{
// component Name {
// %javascript source
// }
just(Token::Component)
.ignore_then(
select_ref! { Token::Ident(identifier) => identifier.clone() }
.map_with(|ident, e| (ident, e.span())),
)
.then(
just(Token::Ident("props".to_string()))
.ignore_then(just(Token::ControlChar(':')))
.ignore_then(type_expression_parser())
.or_not()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')'))),
)
.then(
select_ref! { Token::InlineJsx(jsx_source) => jsx_source.clone() }
.map_with(|x, e| (x, e.span())),
)
.map(
|(((ident, ident_span), props_type), jsx_source)| JsxComponent {
name: ident.clone(),
props_type: props_type
.unwrap_or((TypeExpr::Duck(Duck { fields: Vec::new() }), ident_span)),
javascript_source: jsx_source,
},
)
}
#[cfg(test)]
mod tests {
use crate::parse::{
lexer::lex_parser,
make_input,
value_parser::{empty_range, type_expr_into_empty_range},
};
use super::*;
#[test]
fn test_component_parser() {
let src_and_expected_ast = vec![(
"component T() jsx {useState()}",
JsxComponent {
name: "T".to_string(),
props_type: TypeExpr::Duck(Duck { fields: Vec::new() }).into_empty_span(),
javascript_source: ("useState()".to_string(), empty_range()),
},
)];
for (src, expected_ast) in src_and_expected_ast {
println!("lexing {src}");
let lexer_parse_result = lex_parser("test", "").parse(src);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("parsing component statement {src}");
let component_parse_result =
jsx_component_parser().parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(component_parse_result.has_errors(), false);
assert_eq!(component_parse_result.has_output(), true);
let Some(ast) = component_parse_result.into_output() else {
unreachable!()
};
let mut ast = ast;
ast.javascript_source.1 = empty_range();
type_expr_into_empty_range(&mut ast.props_type);
assert_eq!(ast, expected_ast);
}
let invalid_component_statements = vec![
"use x::;",
"use y::{};",
"use std::{}",
"use ::;",
"use :std:;",
"use :std::{};",
"use go x;",
"use go;",
"use go \"fmt\" as;",
"use go fmt as x",
"use go::x;",
"use go::x;",
"use go as;",
];
for invalid_component_statement in invalid_component_statements {
println!("lexing {invalid_component_statement}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_component_statement);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("component parser try invalid {invalid_component_statement}");
let component_parse_result =
jsx_component_parser().parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(component_parse_result.has_errors(), true);
assert_eq!(component_parse_result.has_output(), false);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/struct_parser.rs | src/parse/struct_parser.rs | use std::collections::{HashMap, HashSet};
use chumsky::Parser;
use chumsky::input::BorrowInput;
use chumsky::prelude::*;
use crate::parse::{
Field, SS, Spanned, failure_with_occurence,
function_parser::{FunctionDefintion, function_definition_parser},
generics_parser::{Generic, generics_parser},
type_parser::type_expression_parser,
};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct NamedDuckDefinition {
pub name: String,
pub fields: Vec<Field>,
pub generics: Vec<Spanned<Generic>>,
}
#[derive(Copy, Debug, Clone, PartialEq, Eq, Hash)]
pub enum DerivableInterface {
Eq,
ToString,
Ord,
Clone,
ToJson,
Hash,
FromJson,
EmitJs,
}
#[derive(Debug, Clone, PartialEq)]
pub struct StructDefinition {
pub name: String,
pub fields: Vec<Field>,
pub methods: Vec<FunctionDefintion>,
pub mut_methods: HashSet<String>,
pub generics: Vec<Spanned<Generic>>,
pub doc_comments: Vec<Spanned<String>>,
pub derived: HashSet<DerivableInterface>,
}
pub fn struct_definition_parser<'src, M, I>(
make_input: M,
) -> impl Parser<'src, I, (StructDefinition, Vec<FunctionDefintion>), extra::Err<Rich<'src, Token, SS>>>
+ Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
let field_parser = select_ref! { Token::Ident(identifier) => identifier.clone() }
.then_ignore(just(Token::ControlChar(':')))
.then(type_expression_parser())
.map(|(identifier, type_expr)| Field::new(identifier, type_expr));
let impl_parser = just(Token::Impl)
.ignore_then(just(Token::ControlChar('{')))
.ignore_then(
(just(Token::Mut)
.or(just(Token::Static))
.or_not()
.then(function_definition_parser(make_input))
.map_with(|x, e| (x, e.span())))
.repeated()
.at_least(0)
.collect::<Vec<_>>(),
)
.then_ignore(just(Token::ControlChar('}')))
.or_not()
.map(|x| x.or_else(|| Some(vec![])).unwrap());
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq)]
enum StructAttribute {
Auto { impls: Vec<Spanned<String>> },
}
let with_parser = (just(Token::Ident("auto".to_string()))
.ignore_then(
select_ref! { Token::Ident(i) => i.to_string() }
.map_with(|x, e| (x, e.span()))
.separated_by(just(Token::ControlChar(',')))
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')'))),
)
.map(|x| StructAttribute::Auto { impls: x }))
.separated_by(just(Token::ControlChar(',')))
.at_least(1)
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('[')), just(Token::ControlChar(']')));
let doc_comments_parser = select_ref! { Token::DocComment(comment) => comment.to_string() }
.map_with(|comment, ctx| (comment, ctx.span()))
.repeated()
.collect()
.or_not();
doc_comments_parser
.then(with_parser.or_not())
.then_ignore(just(Token::Struct))
.then(select_ref! { Token::Ident(identifier) => identifier.to_string() })
.then(generics_parser().or_not())
.then_ignore(just(Token::ControlChar('{')))
.then(
field_parser
.map_with(|x, e| (x, e.span()))
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>(),
)
.then_ignore(just(Token::ControlChar('}')))
.then(impl_parser)
.map(
|(((((doc_comments, attributes), identifier), generics), fields), methods)| {
let mut names_with_spans = HashMap::new();
for f in &fields {
if let Some(decl_span) = names_with_spans.get(&f.0.name) {
let msg = format!("{} declared multiple times", f.0.name);
let declared_here_again_msg = format!("You declared {} here", f.0.name);
let other_msg = format!("But you already declared {} here", f.0.name);
failure_with_occurence(
msg.clone(),
f.1,
[(declared_here_again_msg, f.1), (other_msg, *decl_span)],
);
}
names_with_spans.insert(f.0.name.clone(), f.1);
}
for m in &methods {
if m.0.0.as_ref().is_some_and(|v| matches!(v, Token::Static)) {
continue;
}
if let Some(decl_span) = names_with_spans.get(&m.0.1.name) {
let msg = format!("{} declared multiple times", m.0.1.name);
let declared_here_again_msg = format!("You declared {} here", m.0.1.name);
let other_msg = format!("But you already declared {} here", m.0.1.name);
failure_with_occurence(
msg.clone(),
m.1,
[(declared_here_again_msg, m.1), (other_msg, *decl_span)],
);
}
names_with_spans.insert(m.0.1.name.clone(), m.1);
}
{
let mut names_with_spans = HashMap::new();
for m in &methods {
if !m.0.0.as_ref().is_some_and(|v| matches!(v, Token::Static)) {
continue;
}
if let Some(decl_span) = names_with_spans.get(&m.0.1.name) {
let msg = format!("{} declared multiple times", m.0.1.name);
let declared_here_again_msg =
format!("You declared {} here", m.0.1.name);
let other_msg = format!("But you already declared {} here", m.0.1.name);
failure_with_occurence(
msg.clone(),
m.1,
[(declared_here_again_msg, m.1), (other_msg, *decl_span)],
);
}
names_with_spans.insert(m.0.1.name.clone(), m.1);
}
}
let (mut_methods_names, methods, static_methods) = methods.into_iter().fold(
(HashSet::new(), Vec::new(), Vec::new()),
|(mut mut_method_names, mut methods, mut static_methods),
((modifier, elem), _)| {
if modifier.as_ref().is_some_and(|v| matches!(v, Token::Mut)) {
mut_method_names.insert(elem.name.clone());
}
if modifier
.as_ref()
.is_some_and(|v| matches!(v, Token::Static))
{
static_methods.push(elem);
} else {
methods.push(elem);
}
(mut_method_names, methods, static_methods)
},
);
let fields: Vec<Field> = fields.into_iter().map(|(f, _)| f).collect();
let mut derived = HashMap::new();
if let Some(attributes) = attributes {
for attribute in attributes {
#[allow(irrefutable_let_patterns)]
if let StructAttribute::Auto { impls } = attribute {
for (i, span) in impls {
let a = match i.as_str() {
"Eq" => DerivableInterface::Eq,
"ToString" => DerivableInterface::ToString,
"Ord" => DerivableInterface::Ord,
"Clone" => DerivableInterface::Clone,
"Hash" => DerivableInterface::Hash,
"ToJson" => DerivableInterface::ToJson,
"FromJson" => DerivableInterface::FromJson,
_ => {
let msg = &format!("Invalid with declaration {i}");
failure_with_occurence(msg, span, [(msg, span)]);
}
};
if let Some(decl_span) = derived.get(&a) {
let msg = format!("{i} already declared");
let other_msg = format!("{i} already declared here");
failure_with_occurence(
msg.clone(),
span,
[(msg, span), (other_msg, *decl_span)],
);
}
derived.insert(a, span);
}
}
}
}
let derived = derived.into_iter().fold(HashSet::new(), |mut acc, (e, _)| {
acc.insert(e);
acc
});
(
StructDefinition {
name: identifier,
fields,
methods,
mut_methods: mut_methods_names,
generics: generics.unwrap_or_default(),
doc_comments: doc_comments.unwrap_or_else(Vec::new),
derived,
},
static_methods,
)
},
)
}
#[cfg(test)]
pub mod tests {
use crate::parse::{
generics_parser::Generic, lexer::lex_parser, make_input, type_parser::TypeExpr,
value_parser::empty_range,
};
use chumsky::Parser;
use super::*;
fn strip_spans(spanned_type_expr: Spanned<TypeExpr>) -> Spanned<TypeExpr> {
let (expr, _span) = spanned_type_expr;
let stripped_expr = match expr {
TypeExpr::Duck(d) => TypeExpr::Duck(crate::parse::type_parser::Duck {
fields: d
.fields
.into_iter()
.map(|field| Field {
name: field.name,
type_expr: strip_spans(field.type_expr),
})
.collect(),
}),
TypeExpr::Tuple(t) => TypeExpr::Tuple(t.into_iter().map(strip_spans).collect()),
TypeExpr::Fun(params, return_type, is_mut) => TypeExpr::Fun(
params
.into_iter()
.map(|(name, param_type_expr)| (name, strip_spans(param_type_expr)))
.collect(),
Box::new(strip_spans(*return_type)),
is_mut,
),
TypeExpr::Or(variants) => TypeExpr::Or(variants.into_iter().map(strip_spans).collect()),
TypeExpr::TypeName(is_global, type_name, generics) => TypeExpr::TypeName(
is_global,
type_name,
generics
.into_iter()
.map(|generic| strip_spans(generic))
.collect::<Vec<_>>(),
),
TypeExpr::RawTypeName(is_global, raw_type_name, generics) => TypeExpr::RawTypeName(
is_global,
raw_type_name,
generics
.into_iter()
.map(|generic| strip_spans(generic))
.collect::<Vec<_>>(),
),
TypeExpr::Array(type_expr) => {
TypeExpr::Array(Box::new(strip_spans(type_expr.as_ref().clone())))
}
other => other,
};
(stripped_expr, empty_range())
}
fn strip_struct_definition_spans(mut def: StructDefinition) -> StructDefinition {
for comment in &mut def.doc_comments {
comment.1 = empty_range();
}
for field in &mut def.fields {
field.type_expr = strip_spans(field.type_expr.clone());
}
for generic in &mut def.generics {
generic.1 = empty_range();
}
def
}
fn assert_struct_definition(input_str: &str, expected_def: StructDefinition) {
println!("lexing and parsing struct definition: \"{}\"", input_str);
let lexer_parse_result = lex_parser("test", "").parse(input_str);
assert!(
!lexer_parse_result.has_errors(),
"lexing errors for \"{}\": {:?}",
input_str,
lexer_parse_result
.errors()
.map(|err| err.to_string())
.collect::<Vec<_>>()
);
let tokens = lexer_parse_result.output().unwrap();
let parse_result =
struct_definition_parser(make_input).parse(make_input(empty_range(), &tokens));
assert!(
!parse_result.has_errors(),
"parsing errors for \"{}\": {:?}",
input_str,
parse_result
.errors()
.map(|err| err.to_string())
.collect::<Vec<_>>()
);
let parsed_def = parse_result.output().unwrap().clone().0;
let stripped_parsed = strip_struct_definition_spans(parsed_def);
assert_eq!(
stripped_parsed.name, expected_def.name,
"Struct name mismatch for \"{}\"",
input_str
);
assert_eq!(
stripped_parsed.methods, expected_def.methods,
"Struct methods mismatch for \"{}\"",
input_str
);
assert_eq!(
stripped_parsed.generics, expected_def.generics,
"Struct generics mismatch for \"{}\"",
input_str
);
assert_eq!(
stripped_parsed.fields.len(),
expected_def.fields.len(),
"Field count mismatch for \"{}\"",
input_str
);
let fields_match = stripped_parsed
.fields
.iter()
.zip(expected_def.fields.iter())
.all(|(p, e)| p.name == e.name && p.type_expr.0 == e.type_expr.0);
assert!(
fields_match,
"Fields do not match for \"{}\".\nParsed: {:?}\nExpected: {:?}",
input_str, stripped_parsed.fields, expected_def.fields
);
}
#[test]
fn test_struct_definition_parser() {
assert_struct_definition(
"struct Point { x: Int, y: Int }",
StructDefinition {
name: "Point".to_string(),
fields: vec![
Field::new("x".to_string(), TypeExpr::Int.into_empty_span()),
Field::new("y".to_string(), TypeExpr::Int.into_empty_span()),
],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![],
doc_comments: vec![],
derived: Default::default(),
},
);
assert_struct_definition(
"struct Empty {}",
StructDefinition {
name: "Empty".to_string(),
fields: vec![],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![],
doc_comments: vec![],
derived: Default::default(),
},
);
assert_struct_definition(
"struct User { id: Int, name: String, }",
StructDefinition {
name: "User".to_string(),
fields: vec![
Field::new("id".to_string(), TypeExpr::Int.into_empty_span()),
Field::new("name".to_string(), TypeExpr::String(None).into_empty_span()),
],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![],
doc_comments: vec![],
derived: Default::default(),
},
);
assert_struct_definition(
"struct Option<T> { value: T }",
StructDefinition {
name: "Option".to_string(),
fields: vec![Field::new(
"value".to_string(),
TypeExpr::RawTypeName(false, vec!["T".to_string()], vec![]).into_empty_span(),
)],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![(
Generic {
name: "T".to_string(),
constraint: None,
},
empty_range(),
)],
doc_comments: vec![],
derived: Default::default(),
},
);
assert_struct_definition(
"[auto(Eq)] struct S {}",
StructDefinition {
name: "S".to_string(),
derived: {
let mut s = HashSet::new();
s.insert(DerivableInterface::Eq);
s
},
fields: vec![],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![],
doc_comments: vec![],
},
);
assert_struct_definition(
"struct Map<K, V> { entries: Entry<K, V>[] }",
StructDefinition {
name: "Map".to_string(),
derived: Default::default(),
fields: vec![Field::new(
"entries".to_string(),
TypeExpr::Array(Box::new(
TypeExpr::RawTypeName(
false,
vec!["Entry".to_string()],
vec![
TypeExpr::RawTypeName(false, vec!["K".to_string()], vec![])
.into_empty_span(),
TypeExpr::RawTypeName(false, vec!["V".to_string()], vec![])
.into_empty_span(),
],
)
.into_empty_span(),
))
.into_empty_span(),
)],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![
(
Generic {
name: "K".to_string(),
constraint: None,
},
empty_range(),
),
(
Generic {
name: "V".to_string(),
constraint: None,
},
empty_range(),
),
],
doc_comments: vec![],
},
);
assert_struct_definition(
"/// hello\nstruct Map<K, V> { entries: Entry<K, V>[] }",
StructDefinition {
name: "Map".to_string(),
derived: Default::default(),
fields: vec![Field::new(
"entries".to_string(),
TypeExpr::Array(Box::new(
TypeExpr::RawTypeName(
false,
vec!["Entry".to_string()],
vec![
TypeExpr::RawTypeName(false, vec!["K".to_string()], vec![])
.into_empty_span(),
TypeExpr::RawTypeName(false, vec!["V".to_string()], vec![])
.into_empty_span(),
],
)
.into_empty_span(),
))
.into_empty_span(),
)],
methods: vec![],
mut_methods: HashSet::new(),
generics: vec![
(
Generic {
name: "K".to_string(),
constraint: None,
},
empty_range(),
),
(
Generic {
name: "V".to_string(),
constraint: None,
},
empty_range(),
),
],
doc_comments: vec![("hello".to_string(), empty_range())],
},
);
let invalid_structs = vec![
"type MissingBody = ;",
"type MissingSemi = { x: Int }",
"type BadField = { x: Int, y };",
"type BadComma = { x: Int,, y: Bool };",
"type X = String;",
];
for invalid in invalid_structs {
println!("testing invalid struct: \"{}\"", invalid);
let lexer_parse_result = lex_parser("test", "").parse(invalid);
let tokens = lexer_parse_result.output().unwrap();
let parse_result =
struct_definition_parser(make_input).parse(make_input(empty_range(), &tokens));
assert!(parse_result.has_errors());
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/type_parser.rs | src/parse/type_parser.rs | use std::fmt::{Display, Formatter, Result};
use chumsky::Parser;
use chumsky::input::BorrowInput;
use chumsky::prelude::*;
use crate::{
parse::{
Field, SS, Spanned,
generics_parser::{Generic, generics_parser},
value_parser::{TypeParam, empty_range},
},
semantics::{ident_mangler::mangle, type_resolve::TypeEnv},
};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct TypeDefinition {
pub name: String,
pub type_expression: Spanned<TypeExpr>,
pub generics: Vec<Spanned<Generic>>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Duck {
pub fields: Vec<Field>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Struct {
pub fields: Vec<Field>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum TypeExpr {
Statement,
Never,
Html,
TemplParam(String),
Any,
Byte,
Struct {
name: String,
type_params: Vec<Spanned<TypeExpr>>,
},
Go(String),
Duck(Duck),
NamedDuck {
name: String,
type_params: Vec<Spanned<TypeExpr>>,
},
Tuple(Vec<Spanned<TypeExpr>>),
RawTypeName(bool, Vec<String>, Vec<Spanned<TypeParam>>),
TypeName(bool, String, Vec<Spanned<TypeParam>>),
Tag(String),
String(Option<String>),
Int,
UInt,
Bool(Option<bool>),
Char,
Float,
Or(Vec<Spanned<TypeExpr>>),
And(Vec<Spanned<TypeExpr>>),
Fun(
Vec<(Option<String>, Spanned<TypeExpr>)>, // params
Box<Spanned<TypeExpr>>, // return type
bool, // is mut
),
Array(Box<Spanned<TypeExpr>>),
TypeOf(String),
KeyOf(Box<Spanned<TypeExpr>>),
Ref(Box<Spanned<TypeExpr>>),
RefMut(Box<Spanned<TypeExpr>>),
}
impl TypeExpr {
pub fn ord_result() -> Self {
TypeExpr::Or(vec![
TypeExpr::Tag("greater".to_string()).into_empty_span(),
TypeExpr::Tag("smaller".to_string()).into_empty_span(),
TypeExpr::Tag("equal".to_string()).into_empty_span(),
])
}
pub fn call_ord(&self, param1: &str, param2: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
let param2 = &format!("({param2})");
match self {
TypeExpr::Int => format!("Int_Ord({param1}, &{param2})"),
TypeExpr::Byte => format!("Byte_Ord({param1}, &{param2})"),
TypeExpr::UInt => format!("UInt_Ord({param1}, &{param2})"),
TypeExpr::Float => format!("Float_Ord({param1}, &{param2})"),
TypeExpr::Bool(..) => format!("Bool_Ord({param1}, &{param2})"),
TypeExpr::Char => format!("Char_Ord({param1}, &{param2})"),
TypeExpr::String(..) => format!("String_Ord({param1}, &{param2})"),
TypeExpr::Tuple(..) => format!("{param1}.ord(&{param2})"),
TypeExpr::Array(..) => format!(
"{}_Ord({param1}, &{param2})",
self.as_clean_go_type_name(type_env)
),
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.ord(&{param2})"),
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
inner.0.call_ord(
&format!("{derefs_str}{param1}"),
&format!("{derefs_str}{param2}"),
type_env,
)
}
_ => panic!("Compiler Bug: cannot call ord method on {self:?}"),
}
}
pub fn call_eq(&self, param1: &str, param2: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
let param2 = &format!("({param2})");
match self {
TypeExpr::Int
| TypeExpr::Byte
| TypeExpr::Float
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Tag(..)
| TypeExpr::UInt
| TypeExpr::String(..) => format!("{param1} == {param2}"),
TypeExpr::Tuple(..) => format!("{param1}.eq(&{param2})"),
TypeExpr::Array(..) => format!(
"{}_Eq({param1}, {param2})",
self.as_clean_go_type_name(type_env)
),
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.eq(&{param2})"),
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
var p1 any = {param1}
var p2 any = {param2}
"#
);
for t in t {
let conc_type = t.0.as_go_type_annotation(type_env);
go_code.push('\n');
go_code.push_str(&format!(
r#"
switch p1.(type) {{
case {conc_type}:
switch p2.(type) {{
case {conc_type}:
return true
}}
}}
"#
));
}
go_code.push_str("\nreturn false");
format!("func() bool {{ {go_code} }}()")
}
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
inner.0.call_eq(
&format!("{derefs_str}{param1}"),
&format!("{derefs_str}{param2}"),
type_env,
)
}
_ => panic!("Compilre Bug: cannot call eq method on {self:?}"),
}
}
pub fn call_hash(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::Int => format!("Int_Hash({param1})"),
TypeExpr::Byte => format!("Byte_Hash({param1})"),
TypeExpr::UInt => format!("UInt_Hash({param1})"),
TypeExpr::Float => format!("Float_Hash({param1})"),
TypeExpr::Bool(..) => format!("Bool_Hash({param1})"),
TypeExpr::String(..) => format!("String_Hash({param1})"),
TypeExpr::Char => format!("Char_Hash({param1})"),
TypeExpr::Tuple(..) => format!("{param1}.hash()"),
TypeExpr::Array(..) => {
format!("{}_Hash({param1})", self.as_clean_go_type_name(type_env))
}
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.hash()"),
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
inner
.0
.call_hash(&format!("{derefs_str}{param1}"), type_env)
}
_ => panic!("Compiler Bug: cannot call hash method on {self:?}"),
}
}
pub fn call_clone(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::Int
| TypeExpr::Byte
| TypeExpr::UInt
| TypeExpr::Float
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Tag(..)
| TypeExpr::String(..) => param1.to_string(),
TypeExpr::Tuple(..) => format!("{param1}.clone()"),
TypeExpr::Array(..) => {
format!("{}_Clone({param1})", self.as_clean_go_type_name(type_env))
}
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.clone()"),
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
var p1 any = {param1}
"#
);
for t in t {
let conc_type = t.0.as_go_type_annotation(type_env);
go_code.push('\n');
go_code.push_str(&format!(
r#"
switch p1.(type) {{
case {conc_type}:
tmp := p1.({conc_type})
_ = tmp
return {}
}}
"#,
t.0.call_clone("tmp", type_env)
));
}
go_code.push_str("var ret_guard *any\nreturn *ret_guard");
format!("func() any {{ {go_code} }}()")
}
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let (derefs_str, _ref_str) = (0..derefs).fold(
(String::with_capacity(derefs), String::with_capacity(derefs)),
|(mut acc1, mut acc2), _| {
acc1.push('*');
acc2.push('&');
(acc1, acc2)
},
);
let inner_call = inner
.0
.call_clone(&format!("{derefs_str}{param1}"), type_env);
let mut out_go = String::new();
out_go.push_str(&format!("final_res := {inner_call}"));
for _ in 0..derefs {
out_go.push('\n');
out_go.push('{');
out_go.push('\n');
out_go.push_str("final_res := &final_res");
}
out_go.push('\n');
out_go.push_str("return final_res");
out_go.push('\n');
for _ in 0..derefs {
out_go.push('}');
out_go.push('\n');
}
let self_type_anno = self.as_go_type_annotation(type_env);
format!("func() {self_type_anno} {{ {out_go} }}()")
}
_ => panic!("Compiler Bug: cannot call clone method on {self:?}"),
}
}
pub fn call_copy(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::Struct { .. } => {
format!("{}_Copy({param1})", self.as_clean_go_type_name(type_env))
}
TypeExpr::Array(..) => {
format!("{}_Copy({param1})", self.as_clean_go_type_name(type_env))
}
TypeExpr::Tuple(..) => {
format!("{param1}.copy()")
}
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
var p1 any = {param1}
"#
);
for t in t {
let conc_type = t.0.as_go_type_annotation(type_env);
go_code.push('\n');
go_code.push_str(&format!(
r#"
switch p1.(type) {{
case {conc_type}:
tmp := p1.({conc_type})
_ = tmp
return {}
}}
"#,
t.0.call_copy("tmp", type_env)
));
}
go_code.push_str("var ret_guard *any\nreturn *ret_guard");
format!("func() any {{ {go_code} }}()")
}
_ => {
format!("IDENTITY({param1})")
}
}
}
pub fn call_from_json(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::String(..)
| TypeExpr::Int
| TypeExpr::UInt
| TypeExpr::Byte
| TypeExpr::Float
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Tuple(..)
| TypeExpr::Struct { .. }
| TypeExpr::Tag(..)
| TypeExpr::Duck(..)
| TypeExpr::Array(..) => {
format!(
"{}_FromJson({param1})",
self.as_clean_go_type_name(type_env)
)
}
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
//var s string = {param1}
"#
);
for t in t {
go_code.push('\n');
go_code.push_str(&format!(
r#"
{{
var b {}
b, err := {}
if err == nil {{
return b, nil
}}
}}
"#,
t.0.as_go_type_annotation(type_env),
t.0.call_from_json(param1, type_env)
));
}
go_code.push_str("\nreturn 0, errors.New(\"union parsing failed\")\n");
format!("func() (any, error) {{ {go_code} }}()")
}
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
let mut go_res = format!(
"final_res := {}",
inner
.0
.call_from_json(&format!("{derefs_str}{param1}"), type_env)
);
for _ in 0..derefs {
go_res.push_str("\n{");
go_res.push_str("final_res := &final_res");
}
go_res.push_str("return final_res");
for _ in 0..derefs {
go_res.push_str("\n}");
}
let return_type =
format!("{derefs_str}{}", inner.0.as_go_type_annotation(type_env));
format!("func() {return_type} {{\n{go_res}\n}}()")
}
_ => panic!("Compiler Bug: cannot call from_json method on {self:?}"),
}
}
pub fn call_to_json(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::String(..) => format!("fmt.Sprintf(\"\\\"%s\\\"\", {param1})"),
TypeExpr::Char => format!("fmt.Sprintf(\"\\\"%c\\\"\", {param1})"),
TypeExpr::Int => format!("strconv.Itoa({param1})"),
TypeExpr::Byte => format!("strconv.Itoa(int({param1}))"),
TypeExpr::UInt => format!("fmt.Sprintf(\"%d\", {param1})"),
TypeExpr::Float => format!("fmt.Sprintf(\"%f\", {param1})"),
TypeExpr::Bool(..) => format!("fmt.Sprintf(\"%t\", {param1})"),
TypeExpr::Tuple(..) => format!("{param1}.to_json()"),
TypeExpr::Array(..) | TypeExpr::Duck(..) => {
format!("{}_ToJson({param1})", self.as_clean_go_type_name(type_env))
}
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.to_json()"),
TypeExpr::Tag(t) => format!(r#"fmt.Sprintf("\"{t}\"")"#),
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
var p1 any = {param1}
"#
);
for t in t {
let conc_type = t.0.as_go_type_annotation(type_env);
go_code.push('\n');
go_code.push_str(&format!(
r#"
switch p1.(type) {{
case {conc_type}:
tmp := p1.({conc_type})
_ = tmp
return {}
}}
"#,
t.0.call_to_json("tmp", type_env)
));
}
go_code.push_str("var ret_guard *string\nreturn *ret_guard");
format!("func() string {{ {go_code} }}()")
}
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
inner
.0
.call_to_json(&format!("{derefs_str}{param1}"), type_env)
}
_ => panic!("Compiler Bug: cannot call to_json method on {self:?}"),
}
}
pub fn call_to_string(&self, param1: &str, type_env: &mut TypeEnv) -> String {
let param1 = &format!("({param1})");
match self {
TypeExpr::String(..) => param1.to_string(),
TypeExpr::Int => format!("strconv.Itoa({param1})"),
TypeExpr::Byte => format!("strconv.Itoa(int({param1}))"),
TypeExpr::UInt => format!("fmt.Sprintf(\"%d\", {param1})"),
TypeExpr::Float => format!("fmt.Sprintf(\"%f\", {param1})"),
TypeExpr::Bool(..) => format!("fmt.Sprintf(\"%t\", {param1})"),
TypeExpr::Char => format!("fmt.Sprintf(\"%c\", {param1})"),
TypeExpr::Tuple(..) => format!("{param1}.to_string()"),
TypeExpr::Array(..) | TypeExpr::Duck(..) => format!(
"{}_ToString({param1})",
self.as_clean_go_type_name(type_env)
),
// TypeExpr::Duck(..) => format!("{}_Eq({param1}, {param2})", self.as_clean_go_type_name(type_env)),
TypeExpr::Struct { .. } => format!("{param1}.to_string()"),
TypeExpr::Tag(t) => format!(r#"fmt.Sprintf("{t}")"#),
TypeExpr::Or(t) => {
let mut go_code = format!(
r#"
var p1 any = {param1}
"#
);
for t in t {
let conc_type = t.0.as_go_type_annotation(type_env);
go_code.push('\n');
go_code.push_str(&format!(
r#"
switch p1.(type) {{
case {conc_type}:
tmp := p1.({conc_type})
_ = tmp
return {}
}}
"#,
t.0.call_to_string("tmp", type_env)
));
}
go_code.push_str("var ret_guard *string\nreturn *ret_guard");
format!("func() string {{ {go_code} }}()")
}
TypeExpr::Ref(inner) | TypeExpr::RefMut(inner) => {
let mut derefs = 1;
let mut inner = inner.as_ref().clone();
while let TypeExpr::Ref(new_inner) | TypeExpr::RefMut(new_inner) = inner.0 {
derefs += 1;
inner = *new_inner;
}
let derefs_str = (0..derefs).fold(String::with_capacity(derefs), |mut acc, _| {
acc.push('*');
acc
});
inner
.0
.call_to_string(&format!("{derefs_str}{param1}"), type_env)
}
_ => panic!("Compiler Bug: cannot call to_string method on {self:?}"),
}
}
pub fn implements_from_json(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_from_json(type_env),
TypeExpr::Array(t) => t.0.implements_from_json(type_env),
TypeExpr::Duck(Duck { fields }) => fields
.iter()
.all(|f| f.type_expr.0.implements_from_json(type_env)),
TypeExpr::Tuple(t) => t.iter().all(|t| t.0.implements_from_json(type_env)),
TypeExpr::Or(t) => t.iter().all(|t| t.0.implements_from_json(type_env)),
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::FromJson)
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Byte
| TypeExpr::Float
| TypeExpr::UInt
| TypeExpr::Tag(..) => true,
_ => false,
}
}
pub fn implements_to_json(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_to_json(type_env),
TypeExpr::Array(t) => t.0.implements_to_json(type_env),
TypeExpr::Duck(Duck { fields }) => fields
.iter()
.all(|f| f.type_expr.0.implements_to_json(type_env)),
TypeExpr::Tuple(t) => t.iter().all(|t| t.0.implements_to_json(type_env)),
TypeExpr::Or(t) => t.iter().all(|t| t.0.implements_to_json(type_env)),
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::ToJson)
|| (def.methods.iter().any(|f| {
f.name.as_str() == "to_json"
&& f.params.is_empty()
&& matches!(f.return_type.0, TypeExpr::String(..))
}) && !def.mut_methods.contains("to_json"))
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Float
| TypeExpr::UInt
| TypeExpr::Byte
| TypeExpr::Tag(..) => true,
_ => false,
}
}
pub fn implements_to_string(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_to_string(type_env),
TypeExpr::Array(t) => t.0.implements_to_string(type_env),
TypeExpr::Duck(Duck { fields }) => fields
.iter()
.all(|f| f.type_expr.0.implements_to_string(type_env)),
TypeExpr::Tuple(t) => t.iter().all(|t| t.0.implements_to_string(type_env)),
TypeExpr::Or(t) => t.iter().all(|t| t.0.implements_to_string(type_env)),
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::ToString)
|| (def.methods.iter().any(|f| {
f.name.as_str() == "to_string"
&& f.params.is_empty()
&& matches!(f.return_type.0, TypeExpr::String(..))
}) && !def.mut_methods.contains("to_string"))
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Byte
| TypeExpr::Float
| TypeExpr::UInt
| TypeExpr::Tag(..) => true,
_ => false,
}
}
pub fn implements_hash(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_hash(type_env),
TypeExpr::Array(t) => t.0.implements_hash(type_env),
TypeExpr::Duck(Duck { fields: _ }) => {
false
// && fields
// .iter()
// .all(|f| f.type_expr.0.implements_hash(type_env))
}
TypeExpr::Tuple(t) => t.iter().all(|t| t.0.implements_hash(type_env)),
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::Hash)
|| (def.methods.iter().any(|f| {
f.name.as_str() == "hash"
&& f.params.is_empty()
&& f.return_type.0 == TypeExpr::Int
}) && !def.mut_methods.contains("hash"))
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Byte
| TypeExpr::UInt
| TypeExpr::Float => true,
_ => false,
}
}
pub fn implements_clone(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_clone(type_env),
TypeExpr::Array(t) => t.0.implements_clone(type_env),
TypeExpr::Duck(Duck { fields: _ }) => {
false
// && fields
// .iter()
// .all(|f| f.type_expr.0.implements_clone(type_env))
}
TypeExpr::Tuple(t) | TypeExpr::Or(t) => {
t.iter().all(|t| t.0.implements_clone(type_env))
}
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::Clone)
|| (def.methods.iter().any(|f| {
f.name.as_str() == "clone"
&& f.params.is_empty()
&& &f.return_type.0 == self
}) && !def.mut_methods.contains("clone"))
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::Float
| TypeExpr::UInt
| TypeExpr::Byte
| TypeExpr::Tag(..) => true,
_ => false,
}
}
pub fn implements_ord(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => t.0.implements_ord(type_env),
TypeExpr::Array(t) => t.0.implements_ord(type_env),
TypeExpr::Duck(Duck { fields: _ }) => {
false
// false
// && fields
// .iter()
// .all(|f| f.type_expr.0.implements_ord(type_env))
}
TypeExpr::Tuple(t) => t.iter().all(|t| t.0.implements_ord(type_env)),
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.derived
.contains(&crate::parse::struct_parser::DerivableInterface::Ord)
|| (def.methods.iter().any(|f| {
f.name.as_str() == "ord"
&& f.params.len() == 1
&& f.params[0].1.0.clone().into_empty_span().0
== TypeExpr::Ref(self.clone().into_empty_span().into())
&& f.return_type.0.clone().into_empty_span().0 == TypeExpr::ord_result()
}) && !def.mut_methods.contains("ord"))
}
TypeExpr::Int
| TypeExpr::String(..)
| TypeExpr::Byte
| TypeExpr::Bool(..)
| TypeExpr::Char
| TypeExpr::UInt
| TypeExpr::Float => true,
_ => false,
}
}
pub fn call_iter(&self, type_env: &mut TypeEnv, param1: &str) -> String {
match self {
TypeExpr::Array(..) => {
format!("{}_Iter({param1})", self.as_clean_go_type_name(type_env))
}
TypeExpr::Struct { .. } => {
format!("{param1}.iter()")
}
_ => panic!("cannot call iter() on {self:?}"),
}
}
pub fn implements_into_iter_mut(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.methods.iter().any(|f| {
f.name.as_str() == "iter_mut" && {
if let TypeExpr::Struct { name, type_params } = &f.return_type.0 {
if name.as_str() == mangle(&["std", "col", "Iter"]) {
if type_params.len() == 1 {
type_params[0].0.clone().into_empty_span().0
== TypeExpr::RefMut(self.clone().into_empty_span().into())
.into_empty_span()
.0
} else {
false
}
} else {
false
}
} else {
false
}
}
}) && def.mut_methods.contains("iter_mut")
}
TypeExpr::Array(..) => true,
_ => false,
}
}
pub fn implements_into_iter(&self, type_env: &mut TypeEnv) -> bool {
match self {
TypeExpr::Struct { name, type_params } => {
let def =
type_env.get_struct_def_with_type_params_mut(name, type_params, empty_range());
def.methods.iter().any(|f| {
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/extensions_def_parser.rs | src/parse/extensions_def_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use crate::parse::{
SS, Spanned,
function_parser::{FunctionDefintion, function_definition_parser},
};
use super::{
lexer::Token,
type_parser::{TypeExpr, type_expression_parser},
};
pub type Param = (String, Spanned<TypeExpr>);
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct ExtensionsDef {
pub target_type_expr: Spanned<TypeExpr>,
pub function_definitions: Vec<Spanned<FunctionDefintion>>,
pub doc_comments: Vec<Spanned<String>>,
pub span: SS,
}
pub fn extensions_def_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, ExtensionsDef, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
let doc_comments_parser = select_ref! { Token::DocComment(comment) => comment.to_string() }
.map_with(|comment, ctx| (comment, ctx.span()))
.repeated()
.collect()
.or_not();
doc_comments_parser
.then_ignore(just(Token::Extend))
.then(type_expression_parser())
.then_ignore(just(Token::With))
.then_ignore(just(Token::Impl))
.then(
function_definition_parser(make_input)
.map_with(|fn_def, ctx| (fn_def, ctx.span()))
.repeated()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}'))),
)
.map_with(
|((doc_comments, target_type_expr), function_definitions), ctx| ExtensionsDef {
target_type_expr,
function_definitions,
span: ctx.span(),
doc_comments: doc_comments.unwrap_or_else(Vec::new),
},
)
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::parse::{lexer::lex_parser, make_input, value_parser::empty_range};
#[test]
fn test_extensions_definition_parsing() {
let valid_extensions_definitions =
vec!["extend Int with impl { fn to_str() -> String { return \"h\" } }"];
for valid_extensions_defintion in valid_extensions_definitions {
println!("lexing {valid_extensions_defintion}");
let lexer_parse_result = lex_parser("test", "").parse(valid_extensions_defintion);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("extensions definition parsing {valid_extensions_defintion}");
let extensions_def_parse_result =
extensions_def_parser(make_input).parse(make_input(empty_range(), &tokens));
assert_eq!(extensions_def_parse_result.has_errors(), false);
assert_eq!(extensions_def_parse_result.has_output(), true);
}
let invalid_extensions_defs = vec![];
for invalid_extensions_def in invalid_extensions_defs {
println!("lexing {invalid_extensions_def}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_extensions_def);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {invalid_extensions_def}");
let extensions_def_parse_result = extensions_def_parser(make_input)
.parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(extensions_def_parse_result.has_errors(), true);
assert_eq!(extensions_def_parse_result.has_output(), false);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/generics_parser.rs | src/parse/generics_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use crate::parse::{
SS, Spanned, failure_with_occurence,
lexer::Token,
type_parser::{TypeExpr, type_expression_parser},
};
#[derive(Debug, Clone, PartialEq)]
pub struct Generic {
pub name: String,
pub constraint: Option<Spanned<TypeExpr>>,
}
pub fn generics_parser<'src, I>()
-> impl Parser<'src, I, Vec<Spanned<Generic>>, extra::Err<Rich<'src, Token, SS>>> + Clone + 'src
where
I: BorrowInput<'src, Token = Token, Span = SS>,
{
// '<' <identifier> '>'
just(Token::ControlChar('<'))
.ignore_then(
select_ref! { Token::Ident(identifier) => identifier.clone() }
.then(
just(Token::ControlChar(':'))
.ignore_then(
type_expression_parser()
.map(|type_expr| {
let span = type_expr.1;
match &type_expr.0 {
TypeExpr::Duck(_) => type_expr,
other if false => {
failure_with_occurence(
"Invalid Syntax",
span,
vec![
(
format!(
"Type constraints are defined using ducks. You've passed a {other}"
),
span,
),
],
);
}
_ => type_expr,
}
})
)
.or_not()
)
.map(|(identifier, constraint)| Generic {
name: identifier.clone(),
constraint
})
.map_with(|generic, ctx| (generic, ctx.span()))
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.at_least(1)
.collect::<Vec<Spanned<Generic>>>(),
)
.then_ignore(just(Token::ControlChar('>')))
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::{
parse::{
Field,
generics_parser::generics_parser,
lexer::lex_parser,
make_input,
type_parser::Duck,
value_parser::{empty_range, type_expr_into_empty_range},
},
semantics::type_resolve::sort_fields_type_expr,
};
#[test]
fn test_simple_generics_parser() {
let valid_generic_definitions = vec![
"<TYPENAME>",
"<TYPENAME1, TYPENAME2>",
"<TYPENAME1, TYPENAME2, TYPENAME3, TYPENAME4, TYPENAME5, TYPENAME6, TYPENAME7, TYPENAME8, TYPENAME9>",
"<ABCDEFGHIJKLMNOPQRSTUVWQXYZ>",
"<XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX>",
"<XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX, XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX>",
"<XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX, XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX>",
"<WITH_TRAILING_COMMA,>",
"<WITH_SPACE_BETWEEN, AND_TRAILING_COMMA_WITH_EVEN_MORE_SPACE, >",
"<WITH_SPACE_BETWEEN,
AND_A_LINE_BREAK_AND_EVEN_MORE_SPACE, >",
];
for valid_generic_definition in valid_generic_definitions {
println!("lexing {valid_generic_definition}");
let lexer_parse_result = lex_parser("test", "").parse(valid_generic_definition);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {valid_generic_definition}");
let typedef_parse_result = generics_parser().parse(make_input(empty_range(), &tokens));
assert_eq!(typedef_parse_result.has_errors(), false);
assert_eq!(typedef_parse_result.has_output(), true);
}
let invalid_generic_definitions = vec!["<>", "<{}>", "<*()>"];
for invalid_generic_definition in invalid_generic_definitions {
println!("lexing {invalid_generic_definition}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_generic_definition);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {invalid_generic_definition}");
let typedef_parse_result =
generics_parser().parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(typedef_parse_result.has_errors(), true);
assert_eq!(typedef_parse_result.has_output(), false);
}
}
#[test]
fn test_detailed_generics_parser() {
let test_cases = vec![
(
"<TYPENAME>",
vec![Generic {
name: "TYPENAME".to_string(),
constraint: None,
}],
),
(
"<TYPENAME, TYPENAMETWO>",
vec![
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETWO".to_string(),
constraint: None,
},
],
),
(
"<TYPENAME, TYPENAMETWO, TYPENAMETHREE>",
vec![
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETWO".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETHREE".to_string(),
constraint: None,
},
],
),
(
"<ABCDEFGHIJKLMNOPQRSTUVWXYZ>",
vec![Generic {
name: "ABCDEFGHIJKLMNOPQRSTUVWXYZ".to_string(),
constraint: None,
}],
),
(
"<A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z>",
vec![
Generic {
name: "A".to_string(),
constraint: None,
},
Generic {
name: "B".to_string(),
constraint: None,
},
Generic {
name: "C".to_string(),
constraint: None,
},
Generic {
name: "D".to_string(),
constraint: None,
},
Generic {
name: "E".to_string(),
constraint: None,
},
Generic {
name: "F".to_string(),
constraint: None,
},
Generic {
name: "G".to_string(),
constraint: None,
},
Generic {
name: "H".to_string(),
constraint: None,
},
Generic {
name: "I".to_string(),
constraint: None,
},
Generic {
name: "J".to_string(),
constraint: None,
},
Generic {
name: "K".to_string(),
constraint: None,
},
Generic {
name: "L".to_string(),
constraint: None,
},
Generic {
name: "M".to_string(),
constraint: None,
},
Generic {
name: "N".to_string(),
constraint: None,
},
Generic {
name: "O".to_string(),
constraint: None,
},
Generic {
name: "P".to_string(),
constraint: None,
},
Generic {
name: "Q".to_string(),
constraint: None,
},
Generic {
name: "R".to_string(),
constraint: None,
},
Generic {
name: "S".to_string(),
constraint: None,
},
Generic {
name: "T".to_string(),
constraint: None,
},
Generic {
name: "U".to_string(),
constraint: None,
},
Generic {
name: "V".to_string(),
constraint: None,
},
Generic {
name: "W".to_string(),
constraint: None,
},
Generic {
name: "X".to_string(),
constraint: None,
},
Generic {
name: "Y".to_string(),
constraint: None,
},
Generic {
name: "Z".to_string(),
constraint: None,
},
],
),
];
for (i, (src, expected_generics)) in test_cases.into_iter().enumerate() {
let lex_result = lex_parser("test", "").parse(src).into_result().expect(&src);
let parse_result = generics_parser().parse(make_input(empty_range(), &lex_result));
assert_eq!(
parse_result.has_errors(),
false,
"{i}: {} {:?} {:?}",
src,
lex_result,
parse_result
);
assert_eq!(parse_result.has_output(), true, "{i}: {}", src);
let output = parse_result.into_result().expect(&src);
let actual_generics = output
.iter()
.map(|(generic, _)| generic.clone())
.collect::<Vec<Generic>>();
assert_eq!(actual_generics, expected_generics, "{i}: {}", src);
}
}
#[test]
fn test_constrained_generics() {
let test_cases = vec![
(
"<TYPENAME: { username: String }>",
vec![Generic {
name: "TYPENAME".to_string(),
constraint: Some((
TypeExpr::Duck(Duck {
fields: vec![Field {
name: "username".to_string(),
type_expr: (TypeExpr::String(None), empty_range()),
}],
}),
empty_range(),
)),
}],
),
(
"<TYPENAME, TYPENAMETWO: { username: String, b: Int }>",
vec![
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETWO".to_string(),
constraint: Some((
TypeExpr::Duck(Duck {
fields: vec![
Field {
name: "username".to_string(),
type_expr: (TypeExpr::String(None), empty_range()),
},
Field {
name: "b".to_string(),
type_expr: (TypeExpr::Int, empty_range()),
},
],
}),
empty_range(),
)),
},
],
),
(
"<TYPENAME, TYPENAMETWO, TYPENAMETHREE: { username: { x: String }}>",
vec![
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETWO".to_string(),
constraint: None,
},
Generic {
name: "TYPENAMETHREE".to_string(),
constraint: Some((
TypeExpr::Duck(Duck {
fields: vec![Field {
name: "username".to_string(),
type_expr: (
TypeExpr::Duck(Duck {
fields: vec![Field {
name: "x".to_string(),
type_expr: (TypeExpr::String(None), empty_range()),
}],
}),
empty_range(),
),
}],
}),
empty_range(),
)),
},
],
),
];
for (i, (src, mut expected_generics)) in test_cases.into_iter().enumerate() {
let lex_result = lex_parser("test", "").parse(src).into_result().expect(&src);
let parse_result = generics_parser().parse(make_input(empty_range(), &lex_result));
assert_eq!(
parse_result.has_errors(),
false,
"{i}: {} {:?} {:?}",
src,
lex_result,
parse_result
);
assert_eq!(parse_result.has_output(), true, "{i}: {}", src);
let output = parse_result.into_result().expect(&src);
let actual_generics = output
.iter()
.map(|generic| {
let mut generic = generic.clone();
if let Some((type_expr, span)) = generic.0.constraint.as_mut() {
*span = empty_range();
if let TypeExpr::Duck(duck) = type_expr {
duck.fields.iter_mut().for_each(|field| {
type_expr_into_empty_range(&mut field.type_expr);
});
} else {
unreachable!("this should be anything else than a duck")
}
}
generic.0
})
.collect::<Vec<Generic>>();
expected_generics.iter_mut().for_each(|generic| {
if let Some((type_expr, _)) = &mut generic.constraint {
sort_fields_type_expr(type_expr);
}
});
assert_eq!(actual_generics, expected_generics, "{i}: {}", src);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/source_file_parser.rs | src/parse/source_file_parser.rs | use crate::parse::{type_parser::type_expression_parser, value_parser::value_expr_parser};
use std::{collections::HashMap, path::PathBuf};
use chumsky::{input::BorrowInput, prelude::*};
use tree_sitter::{Node, Parser as TSParser};
use crate::{
parse::{
Context, SS, Spanned,
duckx_component_parser::{DuckxComponent, duckx_component_parser},
extensions_def_parser::{ExtensionsDef, extensions_def_parser},
function_parser::{FunctionDefintion, LambdaFunctionExpr, function_definition_parser},
jsx_component_parser::{JsxComponent, jsx_component_parser},
lexer::{Token, lex_parser},
make_input, parse_failure,
schema_def_parser::{self, SchemaDefinition},
struct_parser::{StructDefinition, struct_definition_parser},
test_parser::{TestCase, test_parser},
type_parser::{Duck, TypeDefinition, TypeExpr, type_definition_parser},
use_statement_parser::{UseStatement, use_statement_parser},
value_parser::{ValFmtStringContents, ValHtmlStringContents, ValueExpr},
},
semantics::ident_mangler::{
MangleEnv, mangle, mangle_duckx_component, mangle_jsx_component, mangle_type_expression,
mangle_value_expr, unmangle,
},
};
#[derive(Debug, Clone, PartialEq, Default)]
pub struct SourceFile {
pub function_definitions: Vec<FunctionDefintion>,
pub type_definitions: Vec<TypeDefinition>,
pub struct_definitions: Vec<StructDefinition>,
pub use_statements: Vec<UseStatement>,
pub extensions_defs: Vec<ExtensionsDef>,
pub sub_modules: Vec<(String, SourceFile)>,
pub jsx_compontents: Vec<JsxComponent>,
pub duckx_components: Vec<DuckxComponent>,
pub test_cases: Vec<TestCase>,
pub schema_defs: Vec<SchemaDefinition>,
pub global_var_decls: Vec<GlobalVariableDeclaration>,
}
#[derive(Clone, Debug, PartialEq)]
pub struct GlobalVariableDeclaration {
pub is_mut: bool,
pub name: String,
pub type_expr: Spanned<TypeExpr>,
pub initializer: Spanned<ValueExpr>,
}
#[derive(Debug, Clone)]
pub enum SourceUnit {
Func(FunctionDefintion),
Schema(SchemaDefinition),
Type(TypeDefinition),
Extensions(ExtensionsDef),
Component(JsxComponent),
Template(DuckxComponent),
Struct((StructDefinition, Vec<FunctionDefintion>)),
Use(Vec<UseStatement>),
Module(String, SourceFile),
Test(Spanned<TestCase>),
GlobalVariableDecl(GlobalVariableDeclaration),
}
impl SourceFile {
pub fn push_use(&mut self, s: &UseStatement) {
if !self.use_statements.contains(s) {
self.use_statements.push(s.to_owned());
}
}
pub fn flatten(&self, global_prefix: &Vec<String>, with_std: bool) -> SourceFile {
fn flatten0(
s: &SourceFile,
global_prefix: &Vec<String>,
prefix: &Vec<String>,
with_std: bool,
) -> SourceFile {
let mut mangle_env = MangleEnv {
sub_mods: s.sub_modules.iter().map(|x| x.0.clone()).collect(),
global_prefix: global_prefix.clone(),
jsx_components: s.jsx_compontents.iter().map(|x| x.name.clone()).collect(),
duckx_components: s.duckx_components.iter().map(|x| x.name.clone()).collect(),
imports: {
let mut imports = HashMap::new();
if with_std {
imports.insert("std".into(), (true, vec![]));
}
for use_statement in &s.use_statements {
if let UseStatement::Regular(glob, segments) = use_statement {
let pre = segments[..segments.len() - 1].to_vec();
let last = segments.last().cloned();
if let Some(symbol) = last {
imports.insert(symbol.clone(), (*glob, pre.clone()));
}
}
}
imports
},
names: vec![
s.function_definitions
.iter()
.map(|x| x.name.clone())
.chain(s.global_var_decls.iter().map(|x| x.name.clone()))
.collect::<Vec<_>>(),
],
types: vec![
s.type_definitions
.iter()
.map(|x| x.name.clone())
.chain(s.struct_definitions.iter().map(|x| x.name.clone()))
.collect::<Vec<_>>(),
],
};
let mut result = SourceFile::default();
for (name, sub_module) in &s.sub_modules {
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(name.to_owned());
let src = flatten0(sub_module, global_prefix, &p, with_std);
for function_definition in src.function_definitions {
mangle_env.insert_ident(function_definition.name[prefix.len()..].to_string());
result.function_definitions.push(function_definition);
}
for global_var_decl in src.global_var_decls {
mangle_env.insert_ident(global_var_decl.name[prefix.len()..].to_string());
result.global_var_decls.push(global_var_decl);
}
for type_definition in src.type_definitions {
mangle_env.insert_type(type_definition.name[prefix.len()..].to_string());
result.type_definitions.push(type_definition);
}
for struct_definition in src.struct_definitions {
mangle_env.insert_type(struct_definition.name[prefix.len()..].to_string());
result.struct_definitions.push(struct_definition);
}
for schema_def in src.schema_defs {
mangle_env.insert_type(schema_def.name[prefix.len()..].to_string());
result.schema_defs.push(schema_def);
}
for jsx_component in src.jsx_compontents {
mangle_env.insert_ident(jsx_component.name[prefix.len()..].to_string());
result.jsx_compontents.push(jsx_component);
}
for duck_component in src.duckx_components {
mangle_env.insert_ident(duck_component.name[prefix.len()..].to_string());
result.duckx_components.push(duck_component);
}
for test_case in src.test_cases {
result.test_cases.push(test_case);
}
for extensions_def in src.extensions_defs {
result.extensions_defs.push(extensions_def.clone())
}
for use_statement in &src.use_statements {
if matches!(use_statement, UseStatement::Go(..)) {
result.push_use(use_statement);
}
}
}
for use_statement in &s.use_statements {
if matches!(use_statement, UseStatement::Go(..)) {
result.push_use(use_statement);
}
}
for global in &s.global_var_decls {
let mut global = global.clone();
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(global.name.clone());
global.name = mangle(&p);
mangle_type_expression(&mut global.type_expr.0, prefix, &mut mangle_env);
mangle_value_expr(
&mut global.initializer.0,
global_prefix,
prefix,
&mut mangle_env,
);
result.global_var_decls.push(global);
}
for func in &s.function_definitions {
let mut func = func.clone();
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(func.name.clone());
func.name = mangle(&p);
mangle_type_expression(&mut func.return_type.0, prefix, &mut mangle_env);
mangle_env.push_idents();
for (name, type_expr) in &mut func.params {
mangle_type_expression(&mut type_expr.0, prefix, &mut mangle_env);
mangle_env.insert_ident(name.clone());
}
mangle_value_expr(
&mut func.value_expr.0,
global_prefix,
prefix,
&mut mangle_env,
);
mangle_env.pop_idents();
result.function_definitions.push(func);
}
for extensions_def in &s.extensions_defs {
let mut extensions_result = extensions_def.clone();
extensions_result.function_definitions = vec![];
for func in &extensions_def.function_definitions {
let mut func = func.clone();
mangle_type_expression(&mut func.0.return_type.0, prefix, &mut mangle_env);
mangle_env.push_idents();
for (name, type_expr) in &mut func.0.params {
mangle_type_expression(&mut type_expr.0, prefix, &mut mangle_env);
mangle_env.insert_ident(name.clone());
}
mangle_value_expr(
&mut func.0.value_expr.0,
global_prefix,
prefix,
&mut mangle_env,
);
mangle_env.pop_idents();
extensions_result.function_definitions.push(func)
}
result.extensions_defs.push(extensions_result)
}
for type_definition in &s.type_definitions {
let mut ty = type_definition.clone();
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(ty.name.clone());
ty.name = mangle(&p);
mangle_type_expression(&mut ty.type_expression.0, prefix, &mut mangle_env);
result.type_definitions.push(ty);
}
// todo(@Apfelfrosch): implement flatten for struct definitions
// can this be deleted?
for struct_def in &s.struct_definitions {
let mut struct_def = struct_def.clone();
let mut new_name = Vec::new();
new_name.extend_from_slice(prefix);
new_name.push(struct_def.name.clone());
struct_def.name = mangle(&new_name);
for field in &mut struct_def.fields {
mangle_type_expression(&mut field.type_expr.0, prefix, &mut mangle_env);
}
for func in &mut struct_def.methods {
mangle_type_expression(&mut func.return_type.0, prefix, &mut mangle_env);
mangle_env.push_idents();
for (name, type_expr) in &mut func.params {
mangle_type_expression(&mut type_expr.0, prefix, &mut mangle_env);
mangle_env.insert_ident(name.clone());
}
mangle_value_expr(
&mut func.value_expr.0,
global_prefix,
prefix,
&mut mangle_env,
);
mangle_env.pop_idents();
}
result.struct_definitions.push(struct_def);
}
for schema_def in &s.schema_defs {
let mut schema_def = schema_def.clone();
let mut new_name = Vec::new();
new_name.extend_from_slice(prefix);
new_name.push(schema_def.name.clone());
// schema_def.name = mangle(&new_name);
for schema_field in &mut schema_def.fields {
mangle_type_expression(&mut schema_field.type_expr.0, prefix, &mut mangle_env);
if let Some(branch) = &mut schema_field.if_branch {
mangle_value_expr(
&mut branch.0.condition.0,
global_prefix,
prefix,
&mut mangle_env,
);
if let Some(value_expr) = &mut branch.0.value_expr {
mangle_value_expr(
&mut value_expr.0,
global_prefix,
prefix,
&mut mangle_env,
);
}
}
if let Some(value_expr) = &mut schema_field.else_branch_value_expr {
mangle_value_expr(
&mut value_expr.0,
global_prefix,
prefix,
&mut mangle_env,
);
}
}
result.schema_defs.push(schema_def);
}
for component in &s.jsx_compontents {
// todo: mangle components in jsx
let mut component = component.clone();
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(component.name.clone());
component.name = mangle(&p);
mangle_jsx_component(&mut component, global_prefix, prefix, &mut mangle_env);
result.jsx_compontents.push(component.clone());
}
for c in &s.duckx_components {
// todo: mangle components in jsx
let mut c = c.clone();
let mut p = Vec::new();
p.extend_from_slice(prefix);
p.push(c.name.clone());
c.name = mangle(&p);
mangle_duckx_component(&mut c, global_prefix, prefix, &mut mangle_env);
result.duckx_components.push(c.clone());
}
for test_case in &s.test_cases {
let mut test_case = test_case.clone();
mangle_value_expr(
&mut test_case.body.0,
global_prefix,
prefix,
&mut mangle_env,
);
result.test_cases.push(test_case)
}
result
}
let mut flattened_source_file = flatten0(self, global_prefix, &vec![], with_std);
let mut mangle_env = MangleEnv {
sub_mods: Vec::new(),
global_prefix: global_prefix.clone(),
jsx_components: flattened_source_file
.jsx_compontents
.iter()
.map(|x| x.name.clone())
.collect(),
duckx_components: flattened_source_file
.duckx_components
.iter()
.map(|x| x.name.clone())
.collect(),
imports: HashMap::new(),
names: vec![
flattened_source_file
.function_definitions
.iter()
.map(|x| x.name.clone())
.chain(
flattened_source_file
.global_var_decls
.iter()
.map(|x| x.name.clone()),
)
.collect::<Vec<_>>(),
],
types: vec![
flattened_source_file
.type_definitions
.iter()
.map(|x| x.name.clone())
.chain(
flattened_source_file
.struct_definitions
.iter()
.map(|x| x.name.clone()),
)
.collect::<Vec<_>>(),
],
};
for function_definition in &mut flattened_source_file.function_definitions {
let mut c = global_prefix.clone();
c.extend(unmangle(&function_definition.name));
function_definition.name = mangle(&c);
for type_expr in [&mut function_definition.return_type.0]
.into_iter()
.chain(function_definition.params.iter_mut().map(|x| &mut x.1.0))
{
append_global_prefix_type_expr(type_expr, &mut mangle_env);
}
append_global_prefix_value_expr(&mut function_definition.value_expr.0, &mut mangle_env);
}
for global in &mut flattened_source_file.global_var_decls {
let mut p = global_prefix.clone();
p.extend(unmangle(&global.name));
global.name = mangle(&p);
append_global_prefix_type_expr(&mut global.type_expr.0, &mut mangle_env);
append_global_prefix_value_expr(&mut global.initializer.0, &mut mangle_env);
}
for type_definition in &mut flattened_source_file.type_definitions {
let mut p = global_prefix.clone();
p.extend(unmangle(&type_definition.name));
type_definition.name = mangle(&p);
append_global_prefix_type_expr(&mut type_definition.type_expression.0, &mut mangle_env);
}
for struct_definition in &mut flattened_source_file.struct_definitions {
let mut c = global_prefix.clone();
c.extend(unmangle(&struct_definition.name));
struct_definition.name = mangle(&c);
for field in &mut struct_definition.fields {
append_global_prefix_type_expr(&mut field.type_expr.0, &mut mangle_env);
}
for method in &mut struct_definition.methods {
for type_expr in [&mut method.return_type.0]
.into_iter()
.chain(method.params.iter_mut().map(|x| &mut x.1.0))
{
append_global_prefix_type_expr(type_expr, &mut mangle_env);
}
append_global_prefix_value_expr(&mut method.value_expr.0, &mut mangle_env);
}
}
for component in &mut flattened_source_file.duckx_components {
let mut p = global_prefix.clone();
append_global_prefix_type_expr(&mut component.props_type.0, &mut mangle_env);
append_global_prefix_value_expr(&mut component.value_expr.0, &mut mangle_env);
p.extend(unmangle(&component.name));
component.name = mangle(&p);
}
for jsx_component in &mut flattened_source_file.jsx_compontents {
let mut c = global_prefix.clone();
c.extend(unmangle(&jsx_component.name));
jsx_component.name = mangle(&c);
}
for test_case in &mut flattened_source_file.test_cases {
append_global_prefix_value_expr(&mut test_case.body.0, &mut mangle_env);
}
for ext_def in &mut flattened_source_file.extensions_defs {
for def in &mut ext_def.function_definitions {
for t in [&mut def.0.return_type]
.into_iter()
.map(|(x, _)| x)
.chain(def.0.params.iter_mut().map(|(_, y)| &mut y.0))
{
append_global_prefix_type_expr(t, &mut mangle_env);
}
append_global_prefix_value_expr(&mut def.0.value_expr.0, &mut mangle_env);
}
}
flattened_source_file
}
}
fn append_global_prefix_type_expr(type_expr: &mut TypeExpr, mangle_env: &mut MangleEnv) {
match type_expr {
TypeExpr::TypeName(_, name, _) => {
if mangle_env.is_top_level_type(name) {
let mut v = Vec::new();
v.extend_from_slice(&mangle_env.global_prefix);
v.extend(unmangle(name));
*name = mangle(&v);
}
}
TypeExpr::RawTypeName(..) => panic!("raw type name shouldnt be here"),
TypeExpr::Duck(Duck { fields }) => {
for f in fields {
append_global_prefix_type_expr(&mut f.type_expr.0, mangle_env);
}
}
TypeExpr::Tuple(fields) => {
for f in fields {
append_global_prefix_type_expr(&mut f.0, mangle_env);
}
}
TypeExpr::Fun(params, return_type, _) => {
for (_, param_type) in params {
append_global_prefix_type_expr(&mut param_type.0, mangle_env);
}
append_global_prefix_type_expr(&mut return_type.0, mangle_env);
}
TypeExpr::Or(s) => {
for t in s {
append_global_prefix_type_expr(&mut t.0, mangle_env);
}
}
TypeExpr::Array(t) | TypeExpr::Ref(t) | TypeExpr::RefMut(t) => {
append_global_prefix_type_expr(&mut t.0, mangle_env);
}
_ => {}
}
}
fn append_global_prefix_value_expr(value_expr: &mut ValueExpr, mangle_env: &mut MangleEnv) {
match value_expr {
ValueExpr::BitAnd { lhs, rhs }
| ValueExpr::BitOr { lhs, rhs }
| ValueExpr::BitXor { lhs, rhs }
| ValueExpr::ShiftLeft {
target: lhs,
amount: rhs,
}
| ValueExpr::ShiftRight {
target: lhs,
amount: rhs,
} => {
append_global_prefix_value_expr(&mut lhs.0, mangle_env);
append_global_prefix_value_expr(&mut rhs.0, mangle_env);
}
ValueExpr::RawStruct { .. } => panic!("raw struct shouldn't be here"),
ValueExpr::Async(d) | ValueExpr::Defer(d) | ValueExpr::BitNot(d) => {
append_global_prefix_value_expr(&mut d.0, mangle_env)
}
ValueExpr::As(v, t) => {
append_global_prefix_type_expr(&mut t.0, mangle_env);
append_global_prefix_value_expr(&mut v.0, mangle_env);
}
ValueExpr::For {
ident: _,
target,
block,
} => {
append_global_prefix_value_expr(&mut target.0, mangle_env);
append_global_prefix_value_expr(&mut block.0, mangle_env);
}
ValueExpr::Negate(v) | ValueExpr::Deref(v) | ValueExpr::Ref(v) | ValueExpr::RefMut(v) => {
append_global_prefix_value_expr(&mut v.0, mangle_env)
}
ValueExpr::HtmlString(contents) => {
for c in contents {
if let ValHtmlStringContents::Expr(e) = c {
append_global_prefix_value_expr(&mut e.0, mangle_env);
}
}
}
ValueExpr::Int(..)
| ValueExpr::String(..)
| ValueExpr::Bool(..)
| ValueExpr::Float(..)
| ValueExpr::Tag(..)
| ValueExpr::Return(None)
| ValueExpr::Char(..) => {}
ValueExpr::Continue => {}
ValueExpr::Break => {}
ValueExpr::ArrayAccess(target, idx) => {
append_global_prefix_value_expr(&mut target.0, mangle_env);
append_global_prefix_value_expr(&mut idx.0, mangle_env);
}
ValueExpr::Match {
value_expr,
arms,
else_arm,
span: _,
} => {
append_global_prefix_value_expr(&mut value_expr.0, mangle_env);
for arm in arms {
append_global_prefix_type_expr(&mut arm.type_case.0, mangle_env);
mangle_env.push_idents();
if let Some(identifier) = &arm.identifier_binding {
mangle_env.insert_ident(identifier.clone());
}
append_global_prefix_value_expr(&mut arm.value_expr.0, mangle_env);
mangle_env.pop_idents();
}
if let Some(arm) = else_arm {
append_global_prefix_type_expr(&mut arm.type_case.0, mangle_env);
mangle_env.push_idents();
if let Some(identifier) = &arm.identifier_binding {
mangle_env.insert_ident(identifier.clone());
}
append_global_prefix_value_expr(&mut arm.value_expr.0, mangle_env);
mangle_env.pop_idents();
}
}
ValueExpr::FormattedString(contents) => {
for c in contents {
if let ValFmtStringContents::Expr(e) = c {
append_global_prefix_value_expr(&mut e.0, mangle_env);
}
}
}
ValueExpr::Array(exprs, _ty) => {
for expr in exprs {
append_global_prefix_value_expr(&mut expr.0, mangle_env);
}
}
ValueExpr::InlineGo(t, ty) => {
if let Some(ty) = ty {
append_global_prefix_type_expr(&mut ty.0, mangle_env);
}
let mut parser = TSParser::new();
parser
.set_language(&tree_sitter_go::LANGUAGE.into())
.expect("Couldn't set go grammar");
let src = parser.parse(t.as_bytes(), None).unwrap();
let root_node = src.root_node();
fn trav(
s: &Node,
t: &[u8],
e: &mut MangleEnv,
out: &mut Vec<(tree_sitter::Range, String)>,
) {
fn extract_all_ident(t: &[u8], n: &Node) -> Vec<(tree_sitter::Range, String)> {
if n.grammar_name() == "selector_expression" {
return vec![(n.range(), n.utf8_text(t).unwrap().to_string())];
}
if n.grammar_name() == "identifier" {
return vec![(n.range(), n.utf8_text(t).unwrap().to_string())];
}
let mut res = Vec::new();
for i in 0..n.child_count() {
let x = extract_all_ident(t, &n.child(i).unwrap().clone());
res.extend(x);
}
res
}
let declared_var_ident = match s.grammar_name() {
"short_var_declaration" => {
Some(s.child(0).unwrap().utf8_text(t).unwrap().to_string())
}
"var_declaration" => Some(
s.child(1)
.unwrap()
.child(0)
.unwrap()
.utf8_text(t)
.unwrap()
.to_string(),
),
_ => None,
};
if s.grammar_name() == "expression_statement" {
let i = extract_all_ident(t, s);
out.extend(i);
}
// TODO: respect additional identifer scopes like blocks and lambdas
if let Some(i) = declared_var_ident {
e.insert_ident(i);
}
for i in 0..s.child_count() {
trav(&s.child(i).unwrap(), t, e, out);
}
}
let mut o = Vec::new();
trav(&root_node, t.as_bytes(), mangle_env, &mut o);
let mut translation = 0;
for (range, ident) in o {
if mangle_env.is_top_level_ident(&ident) {
let mut v = Vec::new();
v.extend_from_slice(&mangle_env.global_prefix);
v.push(ident.clone());
let mangled = mangle(&v);
let size_diff = mangled.len() - ident.len();
t.drain((range.start_byte + translation)..(range.end_byte + translation));
t.insert_str(range.start_byte + translation, &mangled);
translation += size_diff;
}
}
}
ValueExpr::Lambda(lambda_expr) => {
let LambdaFunctionExpr {
is_mut: _,
params,
return_type,
value_expr,
} = &mut **lambda_expr;
for (_, param_type) in params {
if let Some(param_type) = param_type.as_mut() {
append_global_prefix_type_expr(&mut param_type.0, mangle_env);
}
}
if let Some(return_type) = return_type {
append_global_prefix_type_expr(&mut return_type.0, mangle_env);
}
mangle_env.push_idents();
append_global_prefix_value_expr(&mut value_expr.0, mangle_env);
mangle_env.pop_idents();
}
ValueExpr::FunctionCall {
target,
params,
type_params,
..
} => {
// TODO: type params
append_global_prefix_value_expr(&mut target.0, mangle_env);
params
.iter_mut()
.for_each(|param| append_global_prefix_value_expr(&mut param.0, mangle_env));
for param in type_params {
append_global_prefix_type_expr(&mut param.0, mangle_env);
}
}
ValueExpr::RawVariable(..) => panic!("raw variable shouldn't be here"),
ValueExpr::Variable(_, name, _, _, _) => {
if mangle_env.is_top_level_ident(name) {
let mut v = Vec::new();
v.extend_from_slice(&mangle_env.global_prefix);
v.push(name.clone());
*name = mangle(&v);
}
}
ValueExpr::If {
condition,
then,
r#else,
} => {
append_global_prefix_value_expr(&mut condition.0, mangle_env);
mangle_env.push_idents();
append_global_prefix_value_expr(&mut then.0, mangle_env);
mangle_env.pop_idents();
if let Some(r#else) = r#else {
mangle_env.push_idents();
append_global_prefix_value_expr(&mut r#else.0, mangle_env);
}
}
ValueExpr::While { condition, body } => {
append_global_prefix_value_expr(&mut condition.0, mangle_env);
mangle_env.push_idents();
append_global_prefix_value_expr(&mut body.0, mangle_env);
mangle_env.pop_idents();
}
ValueExpr::Tuple(value_exprs) => value_exprs
.iter_mut()
.for_each(|value_expr| append_global_prefix_value_expr(&mut value_expr.0, mangle_env)),
ValueExpr::Block(value_exprs) => {
mangle_env.push_idents();
value_exprs.iter_mut().for_each(|value_expr| {
append_global_prefix_value_expr(&mut value_expr.0, mangle_env)
});
mangle_env.pop_idents();
}
ValueExpr::Duck(items) => items.iter_mut().for_each(|(_, value_expr)| {
append_global_prefix_value_expr(&mut value_expr.0, mangle_env)
}),
ValueExpr::Struct {
name,
fields,
type_params,
} => {
if mangle_env.is_top_level_type(name) {
let mut v = Vec::new();
v.extend_from_slice(&mangle_env.global_prefix);
v.push(name.clone());
*name = mangle(&v);
}
fields.iter_mut().for_each(|(_, value_expr)| {
append_global_prefix_value_expr(&mut value_expr.0, mangle_env)
});
for (g, _) in type_params {
append_global_prefix_type_expr(g, mangle_env);
}
}
ValueExpr::FieldAccess { target_obj, .. } => {
append_global_prefix_value_expr(&mut target_obj.0, mangle_env);
}
ValueExpr::Return(Some(value_expr)) => {
append_global_prefix_value_expr(&mut value_expr.0, mangle_env)
}
ValueExpr::VarAssign(assignment) => {
append_global_prefix_value_expr(&mut assignment.0.target.0, mangle_env);
append_global_prefix_value_expr(&mut assignment.0.value_expr.0, mangle_env);
}
ValueExpr::VarDecl(declaration) => {
let declaration = &mut declaration.0;
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/test_parser.rs | src/parse/test_parser.rs | use chumsky::Parser;
use chumsky::input::BorrowInput;
use chumsky::prelude::*;
use crate::parse::value_parser::value_expr_parser;
use crate::parse::{SS, Spanned, failure_with_occurence, value_parser::ValueExpr};
use super::lexer::Token;
#[derive(Debug, Clone, PartialEq)]
pub struct TestCase {
pub name: String,
pub body: Spanned<ValueExpr>,
}
pub fn test_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, Spanned<TestCase>, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
just(Token::Test)
.ignore_then(select_ref! { Token::StringLiteral(str) => str.clone() })
.then(value_expr_parser(make_input))
.map_with(|(name, mut body), ctx| {
body = match body {
x @ (ValueExpr::Block(_), _) => x,
_ => {
let msg = "Test body needs to be a block";
failure_with_occurence(msg, body.1, [(msg, body.1)]);
}
};
(TestCase { name, body }, ctx.span())
})
}
#[cfg(test)]
pub mod tests {
use crate::parse::{
lexer::lex_parser,
make_input,
value_parser::{empty_range, value_expr_into_empty_range},
};
use chumsky::Parser;
use super::*;
fn strip_spans(spanned_type_expr: Spanned<TestCase>) -> Spanned<TestCase> {
let (mut expr, _) = spanned_type_expr;
expr.body.1 = empty_range();
value_expr_into_empty_range(&mut expr.body);
(expr, empty_range())
}
fn assert_test_case(input_str: &str, expected_expr: TestCase) {
println!("lexing and parsing: \"{}\"", input_str);
let lexer_parse_result = lex_parser("test", "").parse(input_str);
assert!(
!lexer_parse_result.has_errors(),
"lexing errors for \"{}\": {:?}",
input_str,
lexer_parse_result
.errors()
.map(|err| err.to_string())
.collect::<Vec<_>>()
);
assert!(
lexer_parse_result.has_output(),
"lexer produced no output for \"{}\"",
input_str
);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!();
};
let parse_result = test_parser(make_input).parse(make_input(empty_range(), &tokens));
assert!(
!parse_result.has_errors(),
"parsing errors for \"{}\": {:?}",
input_str,
parse_result
.errors()
.map(|err| err.to_string())
.collect::<Vec<_>>(),
);
assert!(
parse_result.has_output(),
"parser produced no output for \"{}\"",
input_str
);
let parsed = parse_result.into_output().unwrap();
let parsed = strip_spans(parsed);
let stripped_parsed = strip_spans(parsed);
assert_eq!(
stripped_parsed.0, expected_expr,
"mismatch for \"{}\"",
input_str
);
}
#[test]
fn test_assert_type() {
assert_test_case(
"test \"lol\" { return }",
TestCase {
name: "lol".to_string(),
body: ValueExpr::Return(Some(ValueExpr::Tuple(vec![]).into_empty_span().into()))
.into_empty_span_and_block(),
},
);
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/mod.rs | src/parse/mod.rs | use std::fmt;
use ariadne::{Color, Label, Report, ReportKind, sources};
use chumsky::{
error::Rich,
input::{BorrowInput, Input},
span::SimpleSpan,
};
use crate::parse::{lexer::Token, type_parser::TypeExpr};
pub mod duckx_component_parser;
pub mod extensions_def_parser;
pub mod function_parser;
pub mod generics_parser;
pub mod jsx_component_parser;
pub mod lexer;
pub mod schema_def_parser;
pub mod source_file_parser;
pub mod struct_parser;
pub mod test_parser;
pub mod type_parser;
pub mod use_statement_parser;
pub mod value_parser;
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Context {
pub file_name: &'static str,
pub file_contents: &'static str,
}
pub type SS = SimpleSpan<usize, Context>;
pub type Spanned<T> = (T, SS);
pub type SpannedMutRef<'a, T> = (&'a mut T, SS);
#[derive(Debug, Clone)]
pub struct Field {
pub name: String,
pub type_expr: Spanned<TypeExpr>,
}
impl PartialEq for Field {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
}
}
impl Field {
pub fn new(name: String, type_expr: Spanned<TypeExpr>) -> Self {
return Self { name, type_expr };
}
}
pub fn make_input<'src>(
eoi: SS,
toks: &'src [Spanned<Token>],
) -> impl BorrowInput<'src, Token = Token, Span = SS> {
toks.map(eoi, |(t, s)| (t, s))
}
pub fn failure_with_occurence(
msg: impl AsRef<str>,
occured_at: SS,
labels: impl IntoIterator<Item = (impl AsRef<str>, SS)>,
) -> ! {
let mut srcs = vec![(
occured_at.context.file_name,
occured_at.context.file_contents,
)];
let labels = labels.into_iter().collect::<Vec<_>>();
for label in &labels {
let ctx = label.1.context;
if ctx.file_name != occured_at.context.file_name {
srcs.push((label.1.context.file_name, label.1.context.file_contents));
}
}
Report::build(
ReportKind::Error,
(occured_at.context.file_name, occured_at.into_range()),
)
.with_config(ariadne::Config::new().with_index_type(ariadne::IndexType::Byte))
.with_message(msg.as_ref())
.with_labels(labels.into_iter().map(|label2| {
Label::new((label2.1.context.file_name, label2.1.into_range()))
.with_message(label2.0.as_ref())
.with_color(Color::Yellow)
}))
.finish()
.eprint(sources(srcs))
.unwrap();
panic!("{}", msg.as_ref())
}
pub fn failure(
file_name: &'static str,
msg: String,
label: (String, SS),
extra_labels: impl IntoIterator<Item = (String, SS)>,
src: &str,
) -> ! {
Report::build(ReportKind::Error, (file_name, label.1.into_range()))
.with_config(ariadne::Config::new().with_index_type(ariadne::IndexType::Byte))
.with_message(&msg)
.with_label(
Label::new((file_name, label.1.into_range()))
.with_message(label.0)
.with_color(Color::Red),
)
.with_labels(extra_labels.into_iter().map(|label2| {
Label::new((file_name, label2.1.into_range()))
.with_message(label2.0)
.with_color(Color::Yellow)
}))
.finish()
.eprint(sources([(file_name, src)]))
.unwrap();
panic!("{msg}")
}
pub fn parse_failure(file_name: &str, err: &Rich<impl fmt::Display, SS>, src: &str) -> ! {
failure(
file_name.to_string().leak(),
err.reason().to_string(),
(
err.found()
.map(|c| c.to_string())
.unwrap_or_else(|| "end of input".to_string()),
*err.span(),
),
err.contexts()
.map(|(l, s)| (format!("while parsing this {l}"), *s)),
src,
)
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/schema_def_parser.rs | src/parse/schema_def_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use crate::parse::{SS, Spanned};
use super::{
lexer::Token,
type_parser::{TypeExpr, type_expression_parser},
value_parser::{ValueExpr, value_expr_parser},
};
#[derive(Debug, Clone, PartialEq)]
pub struct IfBranch {
pub condition: Spanned<ValueExpr>,
pub value_expr: Option<Spanned<ValueExpr>>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct SchemaField {
pub name: String,
pub type_expr: Spanned<TypeExpr>,
pub if_branch: Option<Spanned<IfBranch>>,
pub else_branch_value_expr: Option<Spanned<ValueExpr>>,
pub span: SS,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct SchemaDefinition {
pub name: String,
pub fields: Vec<SchemaField>,
pub comments: Vec<Spanned<String>>,
pub span: SS,
pub out_type: Option<Spanned<TypeExpr>>,
pub schema_fn_type: Option<Spanned<TypeExpr>>,
}
pub fn schema_definition_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, SchemaDefinition, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
let doc_comments_parser = select_ref! { Token::DocComment(comment) => comment.to_string() }
.map_with(|comment, ctx| (comment, ctx.span()))
.repeated()
.collect()
.or_not();
let if_branch_parser = just(Token::If)
.ignore_then(value_expr_parser(make_input.clone()))
.then(value_expr_parser(make_input.clone()).or_not())
.map_with(|(condition, maybe_value_expr), ctx| {
(
IfBranch {
condition,
value_expr: maybe_value_expr,
},
ctx.span(),
)
});
let else_branch_parser = just(Token::Else)
.ignore_then(value_expr_parser(make_input))
.map(|value_expr| value_expr);
let field_parser = select_ref! { Token::Ident(identifier) => identifier.to_string() }
.then_ignore(just(Token::ControlChar(':')))
.then(type_expression_parser())
.then(if_branch_parser.or_not())
.then(else_branch_parser.or_not())
.map_with(
|(((identifier, type_expr), if_branch), else_branch), ctx| SchemaField {
name: identifier,
type_expr,
if_branch,
else_branch_value_expr: else_branch,
span: ctx.span(),
},
);
let fields_parser = field_parser
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.at_least(1)
.collect::<Vec<SchemaField>>();
doc_comments_parser
.then_ignore(just(Token::Schema))
.then(select_ref! { Token::Ident(identifier) => identifier.to_string() })
.then_ignore(just(Token::ControlChar('=')))
.then_ignore(just(Token::ControlChar('{')))
.then(fields_parser)
.then_ignore(just(Token::ControlChar('}')))
.map_with(|((doc_comments, identifier), mut fields), ctx| {
// todo: do a check if all fields if's value_exprs have a block for the value expr
// value_expr = match value_expr {
// (ValueExpr::Duck(x), loc) if x.is_empty() => (ValueExpr::Block(vec![]), loc),
// x @ (ValueExpr::Block(_), _) => x,
// _ => panic!("Function must be block"),
fields.sort_by_key(|x| x.name.clone());
SchemaDefinition {
name: identifier,
fields,
span: ctx.span(),
comments: doc_comments.unwrap_or_else(Vec::new),
out_type: None,
schema_fn_type: None,
}
})
}
#[cfg(test)]
pub mod tests {
use crate::parse::{lexer::lex_parser, make_input, value_parser::empty_range};
use super::*;
#[test]
fn test_schema_parser() {
let valid_schema_defs = vec![
"schema X = { name: String }",
"schema Y = { name: String else .not }",
"schema Z = { name: String if !name.empty() }",
"schema Z = { name: String if !name.empty() else .not }",
"schema Z = { name: String if !name.empty() { 5 } else .not }",
];
for valid_schema_def in valid_schema_defs {
println!("lexing {valid_schema_def}");
let lexer_parse_result = lex_parser("test", "").parse(valid_schema_def);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("schema parsing {valid_schema_def}");
let typedef_parse_result =
schema_definition_parser(make_input).parse(make_input(empty_range(), &tokens));
assert_eq!(typedef_parse_result.has_errors(), false);
assert_eq!(typedef_parse_result.has_output(), true);
}
let invalid_schema_def = vec![];
for invalid_schema_def in invalid_schema_def {
println!("lexing {invalid_schema_def}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_schema_def);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("schema parsing {invalid_schema_def}");
let typedef_parse_result = schema_definition_parser(make_input)
.parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(typedef_parse_result.has_errors(), true);
assert_eq!(typedef_parse_result.has_output(), false);
}
}
// #[test]
// fn test_detailed_function_definitions() {
// let test_cases = vec![
// (
// "fn y<TYPENAME>() {}",
// SchemaDefinition {
// name: "y".to_string(),
// params: Some(vec![]),
// out_type: None,
// generics: Some(vec![(
// Generic {
// name: "TYPENAME".to_string(),
// constraint: None,
// },
// empty_range(),
// )]),
// value_expr: ValueExpr::Block(vec![]).into_empty_span(),
// span: empty_range(),
// comments: Vec::new(),
// },
// ),
// (
// "fn y<TYPENAME, TYPENAME2>() {}",
// SchemaDefinition {
// name: "y".to_string(),
// params: Some(vec![]),
// out_type: None,
// generics: Some(vec![
// (
// Generic {
// name: "TYPENAME".to_string(),
// constraint: None,
// },
// empty_range(),
// ),
// (
// Generic {
// name: "TYPENAME2".to_string(),
// constraint: None,
// },
// empty_range(),
// ),
// ]),
// value_expr: ValueExpr::Block(vec![]).into_empty_span(),
// span: empty_range(),
// comments: Vec::new(),
// },
// ),
// (
// "fn y<TYPENAME, TYPENAME2, TYPENAME3>() {}",
// SchemaDefinition {
// name: "y".to_string(),
// params: Some(vec![]),
// out_type: None,
// generics: Some(vec![
// (
// Generic {
// name: "TYPENAME".to_string(),
// constraint: None,
// },
// empty_range(),
// ),
// (
// Generic {
// name: "TYPENAME2".to_string(),
// constraint: None,
// },
// empty_range(),
// ),
// (
// Generic {
// name: "TYPENAME3".to_string(),
// constraint: None,
// },
// empty_range(),
// ),
// ]),
// value_expr: ValueExpr::Block(vec![]).into_empty_span(),
// span: empty_range(),
// comments: Vec::new(),
// },
// ),
// ];
// for (i, (src, expected_fns)) in test_cases.into_iter().enumerate() {
// let lex_result = lex_parser("test", "").parse(src).into_result().expect(&src);
// let parse_result = schema_definition_parser(make_input)
// .parse(make_input(empty_range(), &lex_result));
// assert_eq!(
// parse_result.has_errors(),
// false,
// "{i}: {} {:?} {:?}",
// src,
// lex_result,
// parse_result
// );
// assert_eq!(parse_result.has_output(), true, "{i}: {}", src);
// let mut output = parse_result.into_result().expect(&src);
// output
// .generics
// .as_mut()
// .unwrap()
// .iter_mut()
// .for_each(|generic| {
// *generic = (generic.0.clone(), empty_range());
// });
// output.span = empty_range();
// output.value_expr = ValueExpr::Block(vec![]).into_empty_span();
// assert_eq!(output, expected_fns, "{i}: {}", src);
// }
// }
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/value_parser.rs | src/parse/value_parser.rs | use crate::{
Tag,
parse::{
Context, SS, Spanned, failure_with_occurence,
function_parser::LambdaFunctionExpr,
lexer::{FmtStringContents, HtmlStringContents},
source_file_parser::SourceFile,
type_parser::type_expression_parser,
},
parse_failure,
semantics::type_resolve::TypeEnv,
};
use super::{lexer::Token, type_parser::TypeExpr};
use chumsky::{input::BorrowInput, prelude::*, span::Span};
pub type TypeParam = TypeExpr;
#[derive(Debug, Clone, PartialEq)]
pub struct MatchArm {
pub type_case: Spanned<TypeExpr>,
pub base: Option<Spanned<TypeExpr>>,
pub identifier_binding: Option<String>,
pub condition: Option<Spanned<ValueExpr>>,
pub value_expr: Spanned<ValueExpr>,
pub span: SS,
}
#[derive(Debug, Clone, PartialEq)]
pub enum ValFmtStringContents {
String(String),
Expr(Spanned<ValueExpr>),
}
#[derive(Debug, Clone, PartialEq)]
pub enum ValHtmlStringContents {
String(String),
Expr(Spanned<ValueExpr>),
}
#[derive(Debug, Clone, PartialEq)]
pub enum DuckxContents {
HtmlString(Vec<ValHtmlStringContents>),
Expr(Spanned<ValueExpr>),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Declaration {
pub name: String,
pub type_expr: Option<Spanned<TypeExpr>>,
pub initializer: Option<Spanned<ValueExpr>>,
pub is_const: bool,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Assignment {
pub target: Spanned<ValueExpr>,
pub value_expr: Spanned<ValueExpr>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum ValueExpr {
Defer(Box<Spanned<ValueExpr>>),
Async(Box<Spanned<ValueExpr>>),
For {
ident: (String, bool, Option<TypeExpr>),
target: Box<Spanned<ValueExpr>>,
block: Box<Spanned<ValueExpr>>,
},
FunctionCall {
target: Box<Spanned<ValueExpr>>,
params: Vec<Spanned<ValueExpr>>,
type_params: Vec<Spanned<TypeParam>>,
},
Int(u64, Option<Spanned<TypeExpr>>),
String(String, bool),
Bool(bool),
Float(f64),
Char(char),
RawVariable(bool, Vec<String>),
Variable(
bool, // is_global
String, // name
Option<TypeExpr>, // type
Option<bool>, // constness
bool, // needs_copy (for emit)
),
If {
condition: Box<Spanned<ValueExpr>>,
then: Box<Spanned<ValueExpr>>,
r#else: Option<Box<Spanned<ValueExpr>>>,
},
While {
condition: Box<Spanned<ValueExpr>>,
body: Box<Spanned<ValueExpr>>,
},
Tuple(Vec<Spanned<ValueExpr>>),
Block(Vec<Spanned<ValueExpr>>),
Break,
Continue,
Duck(Vec<(String, Spanned<ValueExpr>)>),
HtmlString(Vec<ValHtmlStringContents>),
Tag(String),
As(Box<Spanned<ValueExpr>>, Spanned<TypeExpr>),
RawStruct {
is_global: bool,
name: Vec<String>,
fields: Vec<(String, Spanned<ValueExpr>)>,
type_params: Vec<Spanned<TypeParam>>,
},
Struct {
name: String,
fields: Vec<(String, Spanned<ValueExpr>)>,
type_params: Vec<Spanned<TypeParam>>,
},
FieldAccess {
target_obj: Box<Spanned<ValueExpr>>,
field_name: String,
},
Array(Vec<Spanned<ValueExpr>>, Option<Spanned<TypeExpr>>),
Return(Option<Box<Spanned<ValueExpr>>>),
VarAssign(Box<Spanned<Assignment>>),
VarDecl(Box<Spanned<Declaration>>),
Add(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Sub(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Mul(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Div(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Mod(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
BoolNegate(Box<Spanned<ValueExpr>>),
Negate(Box<Spanned<ValueExpr>>),
Equals(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
NotEquals(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
LessThan(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
LessThanOrEquals(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
GreaterThan(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
GreaterThanOrEquals(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
And(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Or(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
InlineGo(String, Option<Spanned<TypeExpr>>),
Lambda(Box<LambdaFunctionExpr>),
ArrayAccess(Box<Spanned<ValueExpr>>, Box<Spanned<ValueExpr>>),
Match {
value_expr: Box<Spanned<ValueExpr>>,
arms: Vec<MatchArm>,
else_arm: Option<Box<MatchArm>>,
span: SS,
},
FormattedString(Vec<ValFmtStringContents>),
Ref(Box<Spanned<ValueExpr>>),
RefMut(Box<Spanned<ValueExpr>>),
Deref(Box<Spanned<ValueExpr>>),
ShiftLeft {
target: Box<Spanned<ValueExpr>>,
amount: Box<Spanned<ValueExpr>>,
},
ShiftRight {
target: Box<Spanned<ValueExpr>>,
amount: Box<Spanned<ValueExpr>>,
},
BitAnd {
lhs: Box<Spanned<ValueExpr>>,
rhs: Box<Spanned<ValueExpr>>,
},
BitOr {
lhs: Box<Spanned<ValueExpr>>,
rhs: Box<Spanned<ValueExpr>>,
},
BitXor {
lhs: Box<Spanned<ValueExpr>>,
rhs: Box<Spanned<ValueExpr>>,
},
BitNot(Box<Spanned<ValueExpr>>),
}
pub trait IntoBlock {
fn into_block(self) -> Spanned<ValueExpr>;
}
pub trait IntoReturn {
fn into_return(self) -> Spanned<ValueExpr>;
}
impl IntoReturn for Spanned<ValueExpr> {
fn into_return(self) -> Spanned<ValueExpr> {
let cl = self.1;
(ValueExpr::Return(Some(self.into())), cl)
}
}
impl IntoBlock for Spanned<ValueExpr> {
fn into_block(self) -> Spanned<ValueExpr> {
let cl = self.1;
(ValueExpr::Block(vec![self]), cl)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum NeverReturnAnalysisResult {
Global, // entire function / Program
Local, // loop scope
Partial,
None,
}
pub fn value_expr_returns_never(
v: &Spanned<ValueExpr>,
is_in_loop: bool,
) -> NeverReturnAnalysisResult {
let span = v.1;
use NeverReturnAnalysisResult::*;
match &v.0 {
ValueExpr::Return(..) => Global,
ValueExpr::Continue | ValueExpr::Break => {
if !is_in_loop {
let msg = "Can only use this in a loop";
failure_with_occurence(msg, span, [(msg, span)]);
}
Local
}
ValueExpr::While { condition, body } => {
let condition_res = value_expr_returns_never(condition, is_in_loop);
if condition_res != None {
return condition_res;
}
if value_expr_returns_never(body, true) != None {
Partial
} else {
None
}
}
ValueExpr::For {
ident: _,
target,
block,
} => {
let target_res = value_expr_returns_never(target, is_in_loop);
if target_res != None {
return target_res;
}
let block_res = value_expr_returns_never(block, true);
return block_res;
}
_ => None,
}
}
impl ValueExpr {
pub fn into_empty_span(self) -> Spanned<ValueExpr> {
(self, empty_range())
}
pub fn into_empty_span_and_block(self) -> Spanned<ValueExpr> {
self.into_empty_span().into_block()
}
pub fn into_empty_span_and_block_and_return(self) -> Spanned<ValueExpr> {
self.into_empty_span().into_block().into_return()
}
pub fn needs_semicolon(&self) -> bool {
match self {
ValueExpr::Negate(..) => true,
ValueExpr::RawStruct { .. } => true,
ValueExpr::Async(..) => true,
ValueExpr::Defer(..) => true,
ValueExpr::As(..) => true,
ValueExpr::For { .. } => false,
ValueExpr::Deref(..) | ValueExpr::Ref(..) | ValueExpr::RefMut(..) => true,
ValueExpr::HtmlString(..) => true,
ValueExpr::If {
condition: _,
then: _,
r#else: _,
} => false,
ValueExpr::While {
condition: _,
body: _,
} => false,
ValueExpr::Block(_) => false,
ValueExpr::InlineGo(..) => false,
ValueExpr::Match { .. } => false,
ValueExpr::Add(..)
| ValueExpr::Mul(..)
| ValueExpr::Tag(..)
| ValueExpr::Duck(..)
| ValueExpr::Int(..)
| ValueExpr::String(..)
| ValueExpr::Float(..)
| ValueExpr::Bool(..)
| ValueExpr::Char(..)
| ValueExpr::FieldAccess { .. }
| ValueExpr::Array(..)
| ValueExpr::ArrayAccess(..)
| ValueExpr::Variable(..)
| ValueExpr::RawVariable(..)
| ValueExpr::Tuple(..)
| ValueExpr::Break
| ValueExpr::Continue
| ValueExpr::Return(..)
| ValueExpr::Struct { .. }
| ValueExpr::VarDecl(..)
| ValueExpr::VarAssign(..)
| ValueExpr::BoolNegate(..)
| ValueExpr::Equals(..)
| ValueExpr::NotEquals(..)
| ValueExpr::LessThan(..)
| ValueExpr::LessThanOrEquals(..)
| ValueExpr::GreaterThan(..)
| ValueExpr::GreaterThanOrEquals(..)
| ValueExpr::And(..)
| ValueExpr::Or(..)
| ValueExpr::Lambda(..)
| ValueExpr::FormattedString(..)
| ValueExpr::Sub(..)
| ValueExpr::Div(..)
| ValueExpr::Mod(..)
| ValueExpr::BitAnd { .. }
| ValueExpr::BitOr { .. }
| ValueExpr::BitXor { .. }
| ValueExpr::ShiftLeft { .. }
| ValueExpr::ShiftRight { .. }
| ValueExpr::BitNot(..)
| ValueExpr::FunctionCall { .. } => true,
}
}
}
pub fn block_expr_parser<'src, I, M>(
_make_input: M,
value_expr_parser: impl Parser<'src, I, Spanned<ValueExpr>, extra::Err<Rich<'src, Token, SS>>>
+ Clone
+ 'src,
) -> impl Parser<'src, I, Spanned<ValueExpr>, extra::Err<Rich<'src, Token, SS>>> + Clone + 'src
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
value_expr_parser
.clone()
.then(just(Token::ControlChar(';')).or_not())
.repeated()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}')))
.map_with(|mut exprs, e| {
if exprs.len() >= 2 {
for (expr, has_semi) in &exprs[..exprs.len() - 1] {
if expr.0.needs_semicolon() && has_semi.is_none() {
failure_with_occurence(
"This expression needs a semicolon",
expr.1,
[(
"This expression needs a semicolon at the end".to_string(),
expr.1,
)],
);
}
}
}
if !exprs.is_empty() && exprs.last().unwrap().1.is_some() {
exprs.push(((empty_tuple(), exprs.last().unwrap().0.1), None));
}
(
ValueExpr::Block(exprs.into_iter().map(|(expr, _)| expr).collect()),
e.span(),
)
})
}
pub fn value_expr_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, Spanned<ValueExpr>, extra::Err<Rich<'src, Token, SS>>> + Clone + 'src
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
let make_input = Box::leak(Box::new(make_input));
recursive(
|value_expr_parser: Recursive<
dyn Parser<'src, I, Spanned<ValueExpr>, extra::Err<Rich<'src, Token, SS>>> + 'src,
>| {
let block_expression = block_expr_parser(make_input.clone(), value_expr_parser.clone());
let scope_res_ident = just(Token::ScopeRes)
.or_not()
.then(
select_ref! { Token::Ident(ident) => ident.to_string() }
.separated_by(just(Token::ScopeRes))
.at_least(1)
.collect::<Vec<_>>(),
)
.map(|(is_global, path)| ValueExpr::RawVariable(is_global.is_some(), path))
.map_with(|x, e| (x, e.span()));
let lambda_parser = {
let params_parser =
select_ref! { Token::Ident(identifier) => identifier.to_string() }
.then(
(just(Token::ControlChar(':')))
.ignore_then(type_expression_parser())
.or_not(),
)
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>()
.boxed();
let return_type_parser = just(Token::ThinArrow)
.ignore_then(type_expression_parser())
.boxed();
just(Token::Mut)
.or_not()
.then(
just(Token::Function)
.ignore_then(just(Token::ControlChar('(')))
.ignore_then(params_parser)
.then_ignore(just(Token::ControlChar(')')))
.then(return_type_parser.or_not())
.then(block_expression.clone()),
)
.map(|(is_mut, ((params, return_type), value_expr))| {
ValueExpr::Lambda(
LambdaFunctionExpr {
is_mut: is_mut.is_some(),
params,
return_type,
value_expr: (
ValueExpr::Return(Some(value_expr.clone().into())),
value_expr.1,
),
}
.into(),
)
})
.boxed()
};
let params = value_expr_parser
.clone()
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')')))
.boxed();
let match_arm_condition = just(Token::If)
.ignore_then(value_expr_parser.clone())
.or_not();
let match_arm_identifier_binding = just(Token::ControlChar('@'))
.ignore_then(select_ref! { Token::Ident(ident) => ident.to_string() })
.then(match_arm_condition)
.or_not();
let match_arm = type_expression_parser()
.then(match_arm_identifier_binding.clone())
.then_ignore(just(Token::ThickArrow))
.then(value_expr_parser.clone())
.map_with(|((type_expr, identifier), value_expr), ctx| MatchArm {
type_case: type_expr,
base: None,
identifier_binding: identifier.clone().map(|x| x.0),
condition: identifier.map(|x| x.1).unwrap_or_else(|| None),
value_expr,
span: ctx.span(),
});
let else_arm = just(Token::Else)
.then(match_arm_identifier_binding)
.then_ignore(just(Token::ThickArrow))
.then(value_expr_parser.clone())
.then_ignore(just(Token::ControlChar(',')).or_not())
// todo: add span of else
.map_with(|((_, identifier), value_expr), ctx| MatchArm {
// todo: check if typeexpr::any is correct for the else arm in pattern matching
type_case: (TypeExpr::Any, value_expr.1),
base: None,
identifier_binding: identifier.clone().map(|x| x.0),
condition: identifier.map(|x| x.1).unwrap_or_else(|| None),
value_expr,
span: ctx.span(),
});
let r#match = just(Token::Match)
.ignore_then(value_expr_parser.clone())
.then(
match_arm
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>()
.then(else_arm.map(Box::new).or_not())
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}'))),
)
.map_with(|(value_expr, (arms, else_arm)), ctx| ValueExpr::Match {
value_expr: Box::new(value_expr),
arms,
else_arm,
span: ctx.span(),
})
.map_with(|x, e| (x, e.span()))
.boxed();
let tuple = lambda_parser
.clone()
.or((just(Token::ControlChar('('))
.ignore_then(just(Token::ControlChar(')')))
.to(ValueExpr::Tuple(vec![])))
.or(value_expr_parser
.clone()
.separated_by(just(Token::ControlChar(',')))
.at_least(1)
.allow_trailing()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')')))
.map(ValueExpr::Tuple)))
.map_with(|x, e| (x, e.span()))
.boxed();
let initializer = just(Token::ControlChar('='))
.ignore_then(value_expr_parser.clone())
.boxed();
let declare_type = just(Token::ControlChar(':'))
.ignore_then(type_expression_parser())
.or_not()
.boxed();
let declaration = choice((just(Token::Let), just(Token::Const)))
.then(
select_ref! { Token::Ident(identifier) => identifier.to_string() }
.map_with(|x, e| (x, e.span())),
)
.then(declare_type)
.then(initializer.or_not())
.map_with(
|(((let_or_const, (ident, _)), type_expr), initializer), e| {
(
ValueExpr::VarDecl(
(
Declaration {
name: ident,
type_expr,
initializer: initializer.clone(),
is_const: matches!(let_or_const, Token::Const),
},
initializer.as_ref().map(|obj| obj.1).unwrap_or(e.span()),
)
.into(),
),
e.span(),
)
},
)
.boxed();
let struct_type_params_parser = just(Token::ControlChar('<'))
.ignore_then(
type_expression_parser()
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.at_least(1)
.collect::<Vec<_>>(),
)
.then_ignore(just(Token::ControlChar('>')));
let struct_expression = just(Token::ScopeRes)
.or_not()
.map(|x| x.is_some())
.then(
select_ref! { Token::Ident(identifier) => identifier.clone() }
.separated_by(just(Token::ScopeRes))
.at_least(1)
.collect::<Vec<_>>(),
)
.then(struct_type_params_parser.or_not())
.then(
select_ref! { Token::Ident(ident) => ident.to_owned() }
.then_ignore(just(Token::ControlChar(':')))
.then(value_expr_parser.clone())
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}'))),
)
.map(
|(((is_global, identifier), generics), values)| ValueExpr::RawStruct {
is_global,
name: identifier,
fields: values,
type_params: generics.unwrap_or_default(),
},
)
.map_with(|x, e| (x, e.span()))
.boxed();
let for_parser = just(Token::For)
.ignore_then(
just(Token::Mut)
.or_not()
.then(select_ref! { Token::Ident(ident) => ident.to_owned() })
.then_ignore(just(Token::In))
.then(value_expr_parser.clone())
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')'))),
)
.then(block_expression.clone())
.map(|(((is_mut, ident), target), block)| ValueExpr::For {
ident: (ident, is_mut.is_none(), None),
target: Box::new(target),
block: Box::new(block),
})
.map_with(|x, e| (x, e.span()));
let duck_expression = select_ref! { Token::Ident(ident) => ident.to_owned() }
.then_ignore(just(Token::ControlChar(':')))
.then(value_expr_parser.clone())
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<_>>()
.delimited_by(just(Token::ControlChar('{')), just(Token::ControlChar('}')))
.filter(|x| !x.is_empty())
.map(|mut x| {
x.sort_by_key(|(name, _)| name.clone());
ValueExpr::Duck(x)
})
.map_with(|x, e| (x, e.span()))
.boxed();
let if_condition = value_expr_parser
.clone()
.delimited_by(just(Token::ControlChar('(')), just(Token::ControlChar(')')))
.boxed();
let if_body = block_expression.clone();
let if_with_condition_and_body = just(Token::If)
.ignore_then(if_condition.clone())
.then(if_body.clone())
.boxed();
let while_condition = if_condition.clone();
let while_body = block_expression.clone();
let while_with_condition_and_body = just(Token::While)
.ignore_then(while_condition.clone())
.then(while_body.clone())
.boxed();
let int = select_ref! { Token::IntLiteral(i) => *i }
.map(|x| ValueExpr::Int(x, None))
.map_with(|x, e| (x, e.span()))
.boxed();
let bool_val = select_ref! { Token::BoolLiteral(b) => *b }
.map(ValueExpr::Bool)
.map_with(|x, e| (x, e.span()))
.boxed();
let string_val = select_ref! { Token::StringLiteral(s) => s.to_owned() }
.map(|s| ValueExpr::String(s, true))
.map_with(|x, e| (x, e.span()));
let if_expr = if_with_condition_and_body
.clone()
.then(
just(Token::Else)
.ignore_then(if_with_condition_and_body.clone())
.repeated()
.collect::<Vec<(Spanned<ValueExpr>, Spanned<ValueExpr>)>>(),
)
.then(just(Token::Else).ignore_then(if_body.clone()).or_not())
.map(|(((condition, then), else_ifs), r#else)| ValueExpr::If {
condition: Box::new(condition),
then: Box::new(then),
r#else: else_ifs.into_iter().rfold(
r#else.map(Box::new),
|acc, (cond, then)| {
let span = then.1;
Some(Box::new((
ValueExpr::If {
condition: Box::new(cond),
then: Box::new(then),
r#else: acc,
},
span,
)))
},
),
})
.map_with(|x, e| (x, e.span()))
.boxed();
let char_expr = select_ref! { Token::CharLiteral(c) => *c }
.map(ValueExpr::Char)
.map_with(|x, e| (x, e.span()))
.boxed();
let float_expr = select_ref! { Token::IntLiteral(num) => *num }
.then_ignore(just(Token::ControlChar('.')))
.then(select_ref! { Token::IntLiteral(num) => *num })
.map(|(pre, frac)| {
ValueExpr::Float(format!("{pre}.{frac}").parse::<f64>().unwrap())
})
.map_with(|x, e| (x, e.span()))
.boxed();
let tag_identifier = choice((
select_ref! { Token::Ident(ident) => ident.to_string() },
just(Token::ControlChar('.')).map(|_| "DOT".to_string()),
))
.boxed();
let tag_expr = just(Token::ControlChar('.'))
.ignore_then(tag_identifier)
.map(|identifier| ValueExpr::Tag(identifier.clone()))
.map_with(|x, e| (x, e.span()))
.boxed();
#[derive(Debug, PartialEq, Clone)]
enum AtomPostParseUnit {
FuncCall(Vec<Spanned<ValueExpr>>, Option<Vec<Spanned<TypeParam>>>),
ArrayAccess(Spanned<ValueExpr>),
FieldAccess(String),
}
let fmt_string =
select_ref! { Token::FormatStringLiteral(elements) => elements.clone() }
.map({
let value_expr_parser = value_expr_parser.clone();
let make_input = make_input.clone();
move |contents| {
let contents = contents.leak();
let mut res = Vec::new();
for c in contents {
match c {
FmtStringContents::String(s) => {
res.push(ValFmtStringContents::String(s.to_owned()))
}
FmtStringContents::Tokens(s) => {
let span = SS {
start: s[0].1.start,
end: s.last().unwrap().1.end,
context: s[0].1.context,
};
if !s.is_empty() {
let (expr, expr_errors) = value_expr_parser
.parse(make_input(span, s.as_slice()))
.into_output_errors();
expr_errors.into_iter().for_each(|e| {
parse_failure(
span.context.file_name,
&Rich::<&str, SS>::custom(
SS {
start: e.span().start,
end: e.span().end,
context: Context {
file_name: span.context.file_name,
file_contents: span
.context
.file_contents,
},
},
format!(
"{}{} {}",
Tag::Parser,
Tag::Err,
e.reason()
),
),
span.context.file_contents,
);
});
res.push(ValFmtStringContents::Expr(expr.unwrap()));
}
}
}
}
ValueExpr::FormattedString(res)
}
})
.map_with(|x, e| (x, e.span()));
let html_string = select_ref! {
Token::HtmlString(s) => s.clone()
}
.map({
let value_expr_parser = value_expr_parser.clone();
let make_input = make_input.clone();
move |contents| {
let mut out_contents = Vec::new();
for c in contents {
match c {
HtmlStringContents::String(s) => {
out_contents.push(ValHtmlStringContents::String(s))
}
HtmlStringContents::Tokens(t) => {
// t.insert(0, (Token::ControlChar('{'), empty_range()));
// t.push((Token::ControlChar('}'), empty_range()));
let cl = t.clone();
let expr = value_expr_parser
.parse(make_input(empty_range(), t.leak()))
.into_result()
.unwrap_or_else(|_| panic!("invalid code {cl:?}"));
out_contents.push(ValHtmlStringContents::Expr(expr));
}
}
}
ValueExpr::HtmlString(out_contents)
}
})
.map_with(|x, e| (x, e.span()));
let duckx = select_ref! {
Token::InlineDuckx(contents) => contents.clone()
}
.map({
let value_expr_parser = value_expr_parser.clone();
let make_input = make_input.clone();
move |x| {
let cl = x.clone();
value_expr_parser
.parse(make_input(empty_range(), x.leak()))
.into_result()
.unwrap_or_else(|e| panic!("invavlid code {cl:?} {e:?}"))
}
});
// .map_with(|x, e| (x, e.span()));
let array = value_expr_parser
.clone()
.separated_by(just(Token::ControlChar(',')))
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/parse/function_parser.rs | src/parse/function_parser.rs | use chumsky::{input::BorrowInput, prelude::*};
use crate::{
parse::{
SS, Spanned, failure_with_occurence,
generics_parser::{Generic, generics_parser},
value_parser::empty_range,
},
semantics::type_resolve::FunHeader,
};
use super::{
lexer::Token,
type_parser::{TypeExpr, type_expression_parser},
value_parser::{ValueExpr, block_expr_parser, value_expr_parser},
};
pub type Param = (String, Spanned<TypeExpr>);
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct FunctionDefintion {
pub name: String,
pub return_type: Spanned<TypeExpr>,
pub params: Vec<Param>,
pub value_expr: Spanned<ValueExpr>,
pub generics: Vec<Spanned<Generic>>,
pub span: SS,
pub comments: Vec<Spanned<String>>,
}
impl FunctionDefintion {
pub fn to_header(&self) -> FunHeader {
let return_type = self.return_type.clone();
FunHeader {
params: self.params.iter().map(|x| x.1.clone()).collect(),
return_type,
}
}
pub fn type_expr(&self) -> Spanned<TypeExpr> {
// todo: retrieve correct span for function defintions typeexpr
let return_type = self.return_type.clone();
return (
TypeExpr::Fun(
self.params
.iter()
.map(|(name, type_expr)| (Some(name.to_owned()), type_expr.to_owned()))
.collect::<Vec<_>>(),
return_type.into(),
false,
),
self.value_expr.1,
);
}
}
impl Default for FunctionDefintion {
fn default() -> Self {
FunctionDefintion {
name: Default::default(),
return_type: TypeExpr::Tuple(vec![]).into_empty_span(),
params: Default::default(),
value_expr: ValueExpr::Return(Some(ValueExpr::Block(vec![]).into_empty_span().into()))
.into_empty_span(),
generics: vec![],
span: empty_range(),
comments: Vec::new(),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct LambdaFunctionExpr {
pub is_mut: bool,
pub params: Vec<(String, Option<Spanned<TypeExpr>>)>,
pub return_type: Option<Spanned<TypeExpr>>,
pub value_expr: Spanned<ValueExpr>,
}
pub fn function_definition_parser<'src, I, M>(
make_input: M,
) -> impl Parser<'src, I, FunctionDefintion, extra::Err<Rich<'src, Token, SS>>> + Clone
where
I: BorrowInput<'src, Token = Token, Span = SS>,
M: Fn(SS, &'src [Spanned<Token>]) -> I + Clone + 'static,
{
let doc_comments_parser = select_ref! { Token::DocComment(comment) => comment.to_string() }
.map_with(|comment, ctx| (comment, ctx.span()))
.repeated()
.collect()
.or_not();
let param_parser = select_ref! { Token::Ident(identifier) => identifier.to_string() }
.then_ignore(just(Token::ControlChar(':')))
.then(type_expression_parser())
.map(|(identifier, type_expr)| (identifier, type_expr) as Param);
let params_parser = param_parser
.separated_by(just(Token::ControlChar(',')))
.allow_trailing()
.collect::<Vec<Param>>();
let return_type_parser = just(Token::ThinArrow).ignore_then(type_expression_parser());
doc_comments_parser
.then_ignore(just(Token::Function))
.then(select_ref! { Token::Ident(identifier) => identifier.to_string() })
.then(generics_parser().or_not())
.then_ignore(just(Token::ControlChar('(')))
.then(params_parser)
.then_ignore(just(Token::ControlChar(')')))
.then(return_type_parser.or_not())
.then(block_expr_parser(
make_input.clone(),
value_expr_parser(make_input.clone()),
))
.map_with(
|(((((doc_comments, identifier), generics), params), return_type), mut value_expr),
ctx| {
value_expr = match value_expr {
x @ (ValueExpr::Block(_), _) => x,
_ => {
let msg = "Function must be a block expression";
failure_with_occurence(msg, value_expr.1, [(msg, value_expr.1)]);
}
};
FunctionDefintion {
name: identifier,
return_type: return_type.unwrap_or((TypeExpr::Tuple(vec![]), ctx.span())),
params,
value_expr: (
ValueExpr::Return(Some(Box::new(value_expr.clone()))),
value_expr.1,
),
generics: generics.unwrap_or_default(),
span: ctx.span(),
comments: doc_comments.unwrap_or_else(Vec::new),
}
},
)
}
#[cfg(test)]
pub mod tests {
use crate::parse::{
lexer::lex_parser,
make_input,
value_parser::{empty_range, type_expr_into_empty_range, value_expr_into_empty_range},
};
use super::*;
#[test]
fn test_function_parser() {
let valid_function_definitions = vec![
"fn x(){}",
"fn x(x: String){}",
"fn x(x: { hallo: String, x: { y: {} }}){}",
"fn x() -> String {}",
"fn x() -> {x: String} {}",
"fn x() -> {x: String} { 5; }",
"fn x() -> {x: String} { 5; }",
"fn x<TYPE>() -> {x: String} { 5; }",
"fn x<TYPE, TYPE2>() -> {x: String} { 5; }",
"fn x<TYPE, TYPE2, TYPE3>() -> {x: String} { 5; }",
];
for valid_function_definition in valid_function_definitions {
println!("lexing {valid_function_definition}");
let lexer_parse_result = lex_parser("test", "").parse(valid_function_definition);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {valid_function_definition}");
let typedef_parse_result =
function_definition_parser(make_input).parse(make_input(empty_range(), &tokens));
assert_eq!(typedef_parse_result.has_errors(), false);
assert_eq!(typedef_parse_result.has_output(), true);
}
let invalid_function_definitions = vec![];
for invalid_function_definition in invalid_function_definitions {
println!("lexing {invalid_function_definition}");
let lexer_parse_result = lex_parser("test", "").parse(invalid_function_definition);
assert_eq!(lexer_parse_result.has_errors(), false);
assert_eq!(lexer_parse_result.has_output(), true);
let Some(tokens) = lexer_parse_result.into_output() else {
unreachable!()
};
println!("typedef_parsing {invalid_function_definition}");
let typedef_parse_result = function_definition_parser(make_input)
.parse(make_input(empty_range(), tokens.as_slice()));
assert_eq!(typedef_parse_result.has_errors(), true);
assert_eq!(typedef_parse_result.has_output(), false);
}
}
#[test]
fn test_detailed_function_definitions() {
let test_cases = vec![
(
"fn y<TYPENAME>() {}",
FunctionDefintion {
name: "y".to_string(),
params: vec![],
return_type: TypeExpr::Tuple(vec![]).into_empty_span(),
generics: vec![(
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
empty_range(),
)],
value_expr: ValueExpr::Return(Some(
ValueExpr::Block(vec![]).into_empty_span().into(),
))
.into_empty_span(),
span: empty_range(),
comments: Vec::new(),
},
),
(
"fn y<TYPENAME, TYPENAME2>() {}",
FunctionDefintion {
name: "y".to_string(),
params: vec![],
return_type: TypeExpr::Tuple(vec![]).into_empty_span(),
generics: vec![
(
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
empty_range(),
),
(
Generic {
name: "TYPENAME2".to_string(),
constraint: None,
},
empty_range(),
),
],
value_expr: ValueExpr::Return(Some(
ValueExpr::Block(vec![]).into_empty_span().into(),
))
.into_empty_span(),
span: empty_range(),
comments: Vec::new(),
},
),
(
"fn y<TYPENAME, TYPENAME2, TYPENAME3>() {}",
FunctionDefintion {
name: "y".to_string(),
params: vec![],
return_type: TypeExpr::Tuple(vec![]).into_empty_span(),
generics: vec![
(
Generic {
name: "TYPENAME".to_string(),
constraint: None,
},
empty_range(),
),
(
Generic {
name: "TYPENAME2".to_string(),
constraint: None,
},
empty_range(),
),
(
Generic {
name: "TYPENAME3".to_string(),
constraint: None,
},
empty_range(),
),
],
value_expr: ValueExpr::Return(Some(
ValueExpr::Block(vec![]).into_empty_span().into(),
))
.into_empty_span(),
span: empty_range(),
comments: Vec::new(),
},
),
];
for (i, (src, expected_fns)) in test_cases.into_iter().enumerate() {
let lex_result = lex_parser("test", "").parse(src).into_result().expect(&src);
let parse_result = function_definition_parser(make_input)
.parse(make_input(empty_range(), &lex_result));
assert_eq!(
parse_result.has_errors(),
false,
"{i}: {} {:?} {:?}",
src,
lex_result,
parse_result
);
assert_eq!(parse_result.has_output(), true, "{i}: {}", src);
let mut output = parse_result.into_result().expect(&src);
output.generics.iter_mut().for_each(|generic| {
*generic = (generic.0.clone(), empty_range());
});
type_expr_into_empty_range(&mut output.return_type);
value_expr_into_empty_range(&mut output.value_expr);
output.span = empty_range();
assert_eq!(output, expected_fns, "{i}: {}", src);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/semantics/typechecker.rs | src/semantics/typechecker.rs | use std::collections::HashMap;
use std::panic::Location;
use std::process;
use chumsky::container::Seq;
use colored::Colorize;
use crate::parse::struct_parser::{NamedDuckDefinition, StructDefinition};
use crate::parse::type_parser::{Duck, TypeExpr};
use crate::parse::value_parser::{empty_range, type_expr_into_empty_range};
use crate::parse::{Field, SS, failure_with_occurence};
use crate::parse::{
Spanned, failure,
value_parser::{ValFmtStringContents, ValueExpr},
};
use crate::semantics::ident_mangler::{MANGLE_SEP, mangle};
use crate::semantics::type_resolve::{TypeEnv, is_const_var, merge_all_or_type_expr};
impl TypeExpr {
pub fn as_clean_user_faced_type_name(&self) -> String {
return format!("{self}");
}
pub fn is_component_compatible(&self) -> bool {
match self {
TypeExpr::Duck(Duck { fields }) => fields
.iter()
.all(|x| x.type_expr.0.is_component_compatible()),
TypeExpr::Array(ty) => ty.0.is_component_compatible(),
TypeExpr::String(..)
| TypeExpr::Bool(..)
| TypeExpr::Float
| TypeExpr::Int
| TypeExpr::UInt => true,
_ => false,
}
}
pub fn from_value_expr_dereferenced(
value_expr: &Spanned<ValueExpr>,
type_env: &mut TypeEnv,
) -> Spanned<TypeExpr> {
let (res, _, _) =
TypeExpr::from_value_expr_dereferenced_with_count_and_mut(value_expr, type_env);
res
}
pub fn from_value_expr_dereferenced_with_count(
value_expr: &Spanned<ValueExpr>,
type_env: &mut TypeEnv,
) -> (Spanned<TypeExpr>, usize) {
let (res, counter, _) =
TypeExpr::from_value_expr_dereferenced_with_count_and_mut(value_expr, type_env);
(res, counter)
}
pub fn from_value_expr_dereferenced_with_count_and_mut(
value_expr: &Spanned<ValueExpr>,
type_env: &mut TypeEnv,
) -> (Spanned<TypeExpr>, usize, bool) {
let mut res = TypeExpr::from_value_expr(value_expr, type_env);
let mut counter = 0;
let mut is_mut = true;
loop {
if let TypeExpr::Ref(v) = res.0 {
res.0 = v.0;
counter += 1;
is_mut = false;
} else if let TypeExpr::RefMut(v) = res.0 {
res.0 = v.0;
counter += 1;
is_mut = true;
} else {
break;
}
}
(res, counter, is_mut)
}
#[track_caller]
pub fn from_value_expr(
value_expr: &Spanned<ValueExpr>,
type_env: &mut TypeEnv,
) -> Spanned<TypeExpr> {
let complete_span = &value_expr.1;
let value_expr = &value_expr.0;
return (
match value_expr {
ValueExpr::BitAnd { lhs: inner, rhs: _ }
| ValueExpr::BitOr { lhs: inner, rhs: _ }
| ValueExpr::BitXor { lhs: inner, rhs: _ }
| ValueExpr::ShiftLeft {
target: inner,
amount: _,
}
| ValueExpr::ShiftRight {
target: inner,
amount: _,
}
| ValueExpr::BitNot(inner) => {
let inner_type = TypeExpr::from_value_expr(inner, type_env);
inner_type.0
}
ValueExpr::RawStruct { .. } => panic!("raw struct should not be here"),
ValueExpr::Negate(v) => TypeExpr::from_value_expr(v.as_ref(), type_env).0,
ValueExpr::Async(e) => {
let inner = TypeExpr::from_value_expr(e, type_env);
let ValueExpr::FunctionCall {
target,
params,
type_params: _,
} = &e.0
else {
panic!("can only async func call")
};
if [target.as_ref()]
.into_iter()
.chain(params.iter())
.any(|v| TypeExpr::from_value_expr(v, type_env).0.is_never())
{
TypeExpr::Never
} else {
TypeExpr::Struct {
name: mangle(&["std", "sync", "Channel"]),
type_params: vec![(inner.0, *complete_span)],
}
}
}
ValueExpr::Defer(call) => {
let ValueExpr::FunctionCall {
target,
params,
type_params: _,
} = &call.0
else {
panic!("can only defer func call")
};
if [target.as_ref()]
.into_iter()
.chain(params.iter())
.any(|v| TypeExpr::from_value_expr(v, type_env).0.is_never())
{
TypeExpr::Never
} else {
TypeExpr::Statement
}
}
ValueExpr::As(v, t) => {
let v_type = TypeExpr::from_value_expr(v.as_ref(), type_env);
if v_type.0.is_never() {
TypeExpr::Never
} else {
check_type_compatability(t, &(v_type.0, v.1), type_env);
t.0.clone()
}
}
ValueExpr::For { .. } => TypeExpr::Statement,
ValueExpr::Ref(v) => {
let v_type = TypeExpr::from_value_expr(v, type_env);
if v_type.0.is_never() {
TypeExpr::Never
} else {
TypeExpr::Ref((v_type.0, v.1).into())
}
}
ValueExpr::RefMut(v) => {
let v_type = TypeExpr::from_value_expr(v, type_env);
if v_type.0.is_never() {
TypeExpr::Never
} else {
TypeExpr::RefMut((v_type.0, v.1).into())
}
}
ValueExpr::Deref(v) => {
let ty_expr = TypeExpr::from_value_expr(v, type_env);
if ty_expr.0.is_never() {
TypeExpr::Never
} else if !matches!(ty_expr.0, TypeExpr::Ref(..) | TypeExpr::RefMut(..)) {
failure_with_occurence(
"Can only dereference a reference",
*complete_span,
[("This is not a reference".to_string(), v.1)],
);
} else {
let (TypeExpr::Ref(t) | TypeExpr::RefMut(t)) = ty_expr.0 else {
unreachable!()
};
t.0
}
}
ValueExpr::HtmlString(..) => TypeExpr::Html, // TODO: CHECK FOR NEVER
ValueExpr::Tag(identifier) => TypeExpr::Tag(identifier.clone()),
ValueExpr::RawVariable(_x, p) => panic!("{}", p.join(" ").leak()),
ValueExpr::FormattedString(contents) => {
for c in contents {
if let ValFmtStringContents::Expr(e) = c {
let type_expr = TypeExpr::from_value_expr(e, type_env);
if type_expr.0.is_never() {
return (TypeExpr::Never, type_expr.1);
}
if !type_expr.0.is_string() {
let hints = [
(
"interpolated values inside a f-string must evaluate to a string".to_string(),
e.1,
),
(
format!(
"this is of type {}{}",
type_expr.0.as_clean_user_faced_type_name().yellow(),
if type_expr.0.implements_to_string(type_env) {
format!(
", which implements {}. Add the method-call after the value",
"to_string".yellow(),
)
} else {
String::new()
}
),
e.1,
),
];
failure_with_occurence("Incompatible Types", e.1, hints);
}
require(
type_expr.0.is_string(),
format!("Needs to be string, is {type_expr:?}"),
);
}
}
TypeExpr::String(None)
}
ValueExpr::ArrayAccess(target, idx) => {
let target_type = TypeExpr::from_value_expr_dereferenced(target, type_env);
let idx_type = TypeExpr::from_value_expr(idx, type_env);
if target_type.0.is_never() || idx_type.0.is_never() {
TypeExpr::Never
} else {
require(
target_type.0.is_array() || target_type.0.ref_is_array(),
"Needs to be array".into(),
);
require(idx_type.0.is_int(), "Needs to be int".into());
let TypeExpr::Array(array_type) = target_type.0 else {
panic!("{target_type:?}")
};
array_type.0.clone()
}
}
ValueExpr::Array(_, ty) => TypeExpr::Array(ty.as_ref().cloned().unwrap().into()),
ValueExpr::Lambda(lambda_expr) => TypeExpr::Fun(
lambda_expr
.params
.iter()
.map(|(name, type_expr)| {
(Some(name.clone()), type_expr.as_ref().cloned().unwrap())
})
.collect(),
Box::new(
lambda_expr
.return_type
.clone()
.unwrap_or(TypeExpr::unit_with_span(*complete_span)),
),
lambda_expr.is_mut,
),
ValueExpr::InlineGo(_, ty) => ty.as_ref().cloned().unwrap_or(TypeExpr::unit()).0,
ValueExpr::Int(_, t) => {
t.as_ref().cloned().map(|(x, _)| x).unwrap_or(TypeExpr::Int)
}
ValueExpr::Bool(..) => TypeExpr::Bool(None),
ValueExpr::Char(..) => TypeExpr::Char,
ValueExpr::Float(..) => TypeExpr::Float,
ValueExpr::String(..) => TypeExpr::String(None),
ValueExpr::Break => TypeExpr::Never,
ValueExpr::Continue => TypeExpr::Never,
ValueExpr::Return(..) => TypeExpr::Never,
ValueExpr::VarAssign(assignment) => {
let target_type = TypeExpr::from_value_expr(&assignment.0.target, type_env);
let value_type = TypeExpr::from_value_expr(&assignment.0.value_expr, type_env);
if target_type.0.is_never() || value_type.0.is_never() {
TypeExpr::Never
} else {
TypeExpr::Statement
}
}
ValueExpr::VarDecl(decl) => {
let decl = decl.as_ref();
if let Some(initializer) = decl.0.initializer.as_ref() {
let init_type = TypeExpr::from_value_expr(initializer, type_env);
if init_type.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
check_type_compatability(
decl.0.type_expr.as_ref().expect(
"compiler error: i expect the implicit types to be resolved by now",
),
&(init_type.0, initializer.1),
type_env,
);
}
TypeExpr::Statement
}
ValueExpr::Struct {
name,
fields,
type_params,
} => {
let _struct_def = type_env.get_struct_def_with_type_params_mut(
name.as_str(),
type_params,
*complete_span,
);
for field in fields {
let ty = TypeExpr::from_value_expr(&field.1, type_env);
if ty.0.is_never() {
return (TypeExpr::Never, field.1.1);
}
}
TypeExpr::Struct {
name: name.to_string(),
type_params: type_params.clone(),
}
}
ValueExpr::Tuple(fields) => {
let mut is_never = false;
let types = fields
.iter()
.map(|value_expr| {
// todo: check if we really want to unconst tuple values
// maybe we need a way to tell that it should be consted here. e.g.
// `(5,"hallo")`
(
match TypeExpr::from_value_expr(value_expr, type_env).0 {
TypeExpr::Never => {
is_never = true;
TypeExpr::Never
}
x => x,
},
value_expr.1,
)
})
.collect::<Vec<Spanned<TypeExpr>>>();
if is_never {
TypeExpr::Never
} else {
TypeExpr::Tuple(types)
}
}
ValueExpr::Duck(fields) => {
let mut f = Vec::new();
for (name, (value_expr, span)) in fields {
let spanned_type = (
TypeExpr::from_value_expr(&(value_expr.clone(), *span), type_env),
*span,
);
if spanned_type.0.0.is_never() {
return (TypeExpr::Never, spanned_type.1);
}
f.push(Field {
name: name.clone(),
type_expr: spanned_type.0,
});
}
TypeExpr::Duck(Duck { fields: f })
}
ValueExpr::Add(left, right) => {
let left_type_expr: Spanned<TypeExpr> =
TypeExpr::from_value_expr(left, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr: Spanned<TypeExpr> =
TypeExpr::from_value_expr(right, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
require(
left_type_expr.0.is_number(),
format!(
"Addition '+' is only allowed for numbers. You've used {} + {}.",
left_type_expr.0.as_go_type_annotation(type_env),
right_type_expr.0.as_go_type_annotation(type_env)
),
);
check_type_compatability(
&(left_type_expr.0.clone(), left.as_ref().1),
&(right_type_expr.0, right.as_ref().1),
type_env,
);
left_type_expr.0.unconst()
}
ValueExpr::Sub(left, right) => {
let left_type_expr: Spanned<TypeExpr> =
TypeExpr::from_value_expr(left, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr: Spanned<TypeExpr> =
TypeExpr::from_value_expr(right, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
require(
left_type_expr.0.is_number(),
format!(
"Subtraction '-' is only allowed for numbers. You've used {} - {}.",
left_type_expr.0.as_go_type_annotation(type_env),
right_type_expr.0.as_go_type_annotation(type_env)
),
);
check_type_compatability(
&(left_type_expr.0.clone(), left.as_ref().1),
&(right_type_expr.0, right.as_ref().1),
type_env,
);
left_type_expr.0.unconst()
}
ValueExpr::Mod(left, right) => {
let left_type_expr = TypeExpr::from_value_expr(left, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr = TypeExpr::from_value_expr(right, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
require(
left_type_expr.0.is_number(),
format!(
"Modulo '-' is only allowed for numbers. You've used {} % {}.",
left_type_expr.0.as_go_type_annotation(type_env),
right_type_expr.0.as_go_type_annotation(type_env)
),
);
check_type_compatability(
&(left_type_expr.0.clone(), left.as_ref().1),
&(right_type_expr.0, right.as_ref().1),
type_env,
);
left_type_expr.0.unconst()
}
ValueExpr::Div(left, right) => {
let left_type_expr = TypeExpr::from_value_expr(left, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr = TypeExpr::from_value_expr(right, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
require(
left_type_expr.0.is_number(),
format!(
"Division '/' is only allowed for numbers. You've used {} / {}.",
left_type_expr.0.as_go_type_annotation(type_env),
right_type_expr.0.as_go_type_annotation(type_env)
),
);
check_type_compatability(
&(left_type_expr.0.clone(), left.as_ref().1),
&(right_type_expr.0, right.as_ref().1),
type_env,
);
left_type_expr.0.unconst()
}
ValueExpr::Equals(lhs, rhs)
| ValueExpr::NotEquals(lhs, rhs)
| ValueExpr::LessThan(lhs, rhs)
| ValueExpr::LessThanOrEquals(lhs, rhs)
| ValueExpr::GreaterThan(lhs, rhs)
| ValueExpr::GreaterThanOrEquals(lhs, rhs)
| ValueExpr::And(lhs, rhs)
| ValueExpr::Or(lhs, rhs) => {
let left_type_expr = TypeExpr::from_value_expr(lhs, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr = TypeExpr::from_value_expr(rhs, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
check_type_compatability(
&(left_type_expr.0.clone(), lhs.1),
&(right_type_expr.0, rhs.1),
type_env,
);
TypeExpr::Bool(None)
}
ValueExpr::Mul(left, right) => {
let left_type_expr = TypeExpr::from_value_expr(left, type_env);
if left_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let right_type_expr = TypeExpr::from_value_expr(right, type_env);
if right_type_expr.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
require(
left_type_expr.0.is_number(),
format!(
"Multiplication '*' is only allowed for numbers. You've used {} + {}.",
left_type_expr.0.as_go_type_annotation(type_env),
right_type_expr.0.as_go_type_annotation(type_env)
),
);
check_type_compatability(
&(left_type_expr.0.clone(), left.1),
&(right_type_expr.0, right.1),
type_env,
);
left_type_expr.0.unconst()
}
ValueExpr::FunctionCall {
target,
params,
type_params,
} => {
let target_type = TypeExpr::from_value_expr(target, type_env);
if target_type.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
let mut in_param_types = Vec::new();
for param in params {
let param_type = (TypeExpr::from_value_expr(param, type_env), param.1);
if param_type.0.0.is_never() {
return (TypeExpr::Never, *complete_span);
}
in_param_types.push(param_type);
}
// todo: type_params
if !type_params.is_empty() {
match &target.0 {
ValueExpr::Variable(_, var_name, ..) => {
let new_fn_name = [var_name.to_string()]
.into_iter()
.chain(
type_params
.iter()
.map(|(t, _)| t.as_clean_go_type_name(type_env)),
)
.collect::<Vec<_>>()
.join(MANGLE_SEP);
let fn_def = type_env
.generic_fns_generated
.iter()
.find(|fn_def| fn_def.name.as_str() == new_fn_name.clone());
let fn_def = fn_def.expect("this should exist");
let params = fn_def.params.clone();
for (index, param) in params.iter().enumerate() {
let given_type =
in_param_types.get(index).expect("todo: len doesnt match");
// TODO: check if we should clone the typeenv
check_type_compatability_full(
¶m.1.clone(),
&given_type.0,
&mut type_env.clone(),
false,
);
}
return fn_def.return_type.clone();
}
ValueExpr::FieldAccess {
target_obj,
field_name,
} => {
let t =
TypeExpr::from_value_expr_dereferenced(target_obj, type_env);
let TypeExpr::Struct {
name: struct_name,
type_params: struct_type_params,
} = t.0
else {
panic!("{t:?}")
};
let replaced_generics = type_env
.get_struct_def_with_type_params_mut(
&struct_name,
&struct_type_params,
*complete_span,
)
.name
.clone();
let (_new_fn_name, global_generic_generation_id) = {
let new_method_name = [field_name.clone()]
.into_iter()
.chain(
type_params
.iter()
.map(|(t, _)| t.as_clean_go_type_name(type_env)),
)
.collect::<Vec<_>>();
let mut gen_id = new_method_name.clone();
gen_id.insert(0, replaced_generics.clone());
(new_method_name.join(MANGLE_SEP), gen_id.join(MANGLE_SEP))
};
let header =
type_env.get_method_header(&global_generic_generation_id);
let params = header.params;
for (index, param) in params.iter().enumerate() {
let given_type =
in_param_types.get(index).expect("todo: len doesnt match");
check_type_compatability_full(
param,
&given_type.0,
type_env,
false,
);
}
return header.return_type.clone();
}
_ => {}
};
}
let mut target_type = TypeExpr::from_value_expr(target.as_ref(), type_env);
if let TypeExpr::Fun(param_types, return_type, _) = &mut target_type.0 {
param_types
.iter_mut()
.enumerate()
.for_each(|(index, param_type)| {
if matches!(param_type.1.0, TypeExpr::Any) {
return;
}
let Some(in_param_type) = in_param_types.get(index) else {
failure_with_occurence(
"Missing Parameter in Function Call",
target.1,
[
(
format!(
"This function requires a {}",
param_type.1.0.as_clean_user_faced_type_name(),
),
param_type.1.1,
),
(
format!(
"You need to pass a {} to this function",
param_type.1.0.as_clean_user_faced_type_name(),
),
target.1,
),
],
)
};
if let Some(param_name) = ¶m_type.0
&& param_name == "self"
{
check_type_compatability_full(
¶m_type.1,
&in_param_type.0,
type_env,
is_const_var(¶ms[index].0),
);
} else {
check_type_compatability_full(
¶m_type.1,
&in_param_type.0,
type_env,
is_const_var(¶ms[index].0),
);
}
// variant any replace
if let TypeExpr::Array(boxed) =
&in_param_types.get(index).unwrap().0.0
&& let TypeExpr::Or(_) = boxed.as_ref().0
{
param_type.1.0 =
TypeExpr::Array(Box::new((TypeExpr::Any, empty_range())))
}
});
return return_type.as_ref().clone();
}
failure(
target.as_ref().1.context.file_name,
"Tried to invoke a non-function value".to_string(),
(
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/semantics/mod.rs | src/semantics/mod.rs | pub mod ident_mangler;
pub mod type_resolve;
pub mod typechecker;
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/semantics/ident_mangler.rs | src/semantics/ident_mangler.rs | use std::collections::HashMap;
use tree_sitter::{Node, Parser};
use crate::parse::{
duckx_component_parser::DuckxComponent,
function_parser::LambdaFunctionExpr,
jsx_component_parser::{Edit, JsxComponent, JsxSourceUnit, do_edits},
type_parser::{Duck, TypeExpr},
value_parser::{ValFmtStringContents, ValHtmlStringContents, ValueExpr},
};
#[derive(Debug, Clone, PartialEq)]
pub struct MangleEnv {
pub imports: HashMap<String, (bool, Vec<String>)>,
pub sub_mods: Vec<String>,
pub global_prefix: Vec<String>,
pub names: Vec<Vec<String>>,
pub types: Vec<Vec<String>>,
pub jsx_components: Vec<String>,
pub duckx_components: Vec<String>,
}
pub const MANGLE_SEP: &str = "_____";
pub fn mangle(p: &[impl AsRef<str>]) -> String {
p.iter()
.map(|x| x.as_ref().to_string())
.collect::<Vec<_>>()
.join(MANGLE_SEP)
}
pub fn unmangle(s: &str) -> Vec<String> {
s.split(MANGLE_SEP).map(String::from).collect()
}
impl MangleEnv {
pub fn mangle_component(&self, prefix: &[String], comp: &str) -> Option<Vec<String>> {
let is_global = comp.starts_with("::");
let ident = if is_global && comp.len() >= 3 {
&comp[2..]
} else {
comp
}
.split("::")
.map(String::from)
.collect::<Vec<_>>();
let prefix = if is_global { &[] } else { prefix };
if !is_global
&& let Some((is_glob, import_path)) = self.resolve_import(ident.first()?.clone())
{
let mut res = Vec::new();
if !is_glob {
res.extend_from_slice(prefix);
} else {
// res.extend_from_slice(&self.global_prefix);
}
res.extend(import_path);
res.extend(ident);
return Some(res);
}
if self.jsx_components.contains(ident.first().unwrap()) {
let mut x = Vec::new();
if is_global {
// x.extend_from_slice(&self.global_prefix);
} else {
x.extend_from_slice(prefix);
}
x.extend_from_slice(&ident);
return Some(x);
}
None
}
pub fn is_imported_name(&self, x: &String) -> bool {
self.is_top_level_ident(x) && self.imports.contains_key(x)
}
pub fn is_imported_type(&self, x: &String) -> bool {
self.is_top_level_type(x) && self.imports.contains_key(x)
}
pub fn local_defined(&self, n: &String) -> bool {
self.names.last().filter(|x| x.contains(n)).is_some()
}
pub fn resolve_import(&self, mut sym: String) -> Option<(bool, Vec<String>)> {
let mut result = None;
if self.sub_mods.contains(&sym) {
return Some((false, vec![]));
}
while let Some((is_glob, import_path)) = self.imports.get(&sym) {
result = result
.map(|(g, p)| {
(g || *is_glob, {
let mut import_path = import_path.to_owned();
import_path.extend(p);
import_path
})
})
.or(Some((*is_glob, import_path.to_owned())));
if import_path.is_empty() {
break;
}
sym = import_path.first().unwrap().clone();
}
result
}
pub fn mangle_type(
&self,
is_global: bool,
prefix: &[String],
ident: &[String],
) -> Option<Vec<String>> {
let prefix = if is_global { &[] } else { prefix };
if !is_global
&& let Some((is_glob, import_path)) = self.resolve_import(ident.first()?.clone())
{
let mut res = Vec::new();
if !is_glob {
res.extend_from_slice(prefix);
} else {
// res.extend_from_slice(&self.global_prefix);
}
res.extend(import_path);
res.extend_from_slice(ident);
return Some(res);
}
if self.is_top_level_type(ident.first()?) {
let mut x = Vec::new();
if is_global {
// x.extend_from_slice(&self.global_prefix);
} else {
x.extend_from_slice(prefix);
}
x.extend_from_slice(ident);
return Some(x);
}
None
}
pub fn mangle_ident(
&self,
is_global: bool,
prefix: &[String],
ident: &[String],
) -> Option<Vec<String>> {
if self.local_defined(ident.first()?) {
return None;
}
let prefix = if is_global { &[] } else { prefix };
if let Some((is_glob, import_path)) = self.resolve_import(ident.first()?.clone()) {
let mut res = Vec::new();
if !is_glob {
res.extend_from_slice(prefix);
} else {
res.extend_from_slice(&self.global_prefix);
}
res.extend(import_path);
res.extend_from_slice(ident);
return Some(res);
}
if self.is_top_level_ident(ident.first()?) || self.is_top_level_type(ident.first()?) {
let mut x = Vec::new();
x.extend_from_slice(prefix);
x.extend_from_slice(ident);
return Some(x);
}
None
}
pub fn is_top_level_type(&self, ident: &String) -> bool {
for i in 1..self.types.len() {
if self.types[i].contains(ident) {
return false;
}
}
self.types
.first()
.map(|x| x.contains(ident))
.unwrap_or(false)
}
pub fn is_top_level_ident(&self, ident: &String) -> bool {
for i in 1..self.names.len() {
if self.names[i].contains(ident) {
return false;
}
}
self.names
.first()
.map(|x| x.contains(ident))
.unwrap_or(false)
}
pub fn insert_ident(&mut self, ident: String) {
let n = self.names.last_mut().unwrap();
if !n.contains(&ident) {
n.push(ident);
}
}
pub fn insert_type(&mut self, type_name: String) {
let t = self.types.last_mut().unwrap();
if !t.contains(&type_name) {
t.push(type_name);
}
}
pub fn push_idents(&mut self) {
self.names.push(vec![]);
}
pub fn pop_idents(&mut self) {
if self.names.len() == 1 {
panic!("Cant pop last env");
}
self.names.pop();
}
pub fn push_types(&mut self) {
self.types.push(vec![]);
}
pub fn pop_types(&mut self) {
if self.types.len() == 1 {
panic!("Cant pop last env");
}
self.types.pop();
}
}
pub fn mangle_type_expression(
type_expr: &mut TypeExpr,
prefix: &Vec<String>,
mangle_env: &mut MangleEnv,
) {
match type_expr {
TypeExpr::TypeName(..) => panic!("type name shouldn't be here"),
TypeExpr::Ref(t) | TypeExpr::RefMut(t) => {
mangle_type_expression(&mut t.0, prefix, mangle_env)
}
TypeExpr::RawTypeName(is_global, path, type_params) => {
// TODO: type params
if let Some(mangled) = mangle_env.mangle_type(*is_global, prefix, path) {
*path = mangle_env.global_prefix.clone();
path.extend(mangled);
}
for (type_param, _) in type_params.iter_mut() {
mangle_type_expression(type_param, prefix, mangle_env);
}
*type_expr = TypeExpr::TypeName(true, mangle(path), type_params.clone());
}
TypeExpr::Duck(Duck { fields }) => {
for f in fields {
mangle_type_expression(&mut f.type_expr.0, prefix, mangle_env);
}
}
TypeExpr::Tuple(fields) => {
for f in fields {
mangle_type_expression(&mut f.0, prefix, mangle_env);
}
}
TypeExpr::Fun(params, return_type, _) => {
for (_, param_type) in params {
mangle_type_expression(&mut param_type.0, prefix, mangle_env);
}
mangle_type_expression(&mut return_type.0, prefix, mangle_env);
}
TypeExpr::Or(s) => {
for t in s {
mangle_type_expression(&mut t.0, prefix, mangle_env);
}
}
TypeExpr::Array(t) => {
mangle_type_expression(&mut t.0, prefix, mangle_env);
}
_ => {}
}
}
pub fn mangle_duckx_component(
comp: &mut DuckxComponent,
global_prefix: &Vec<String>,
prefix: &Vec<String>,
mangle_env: &mut MangleEnv,
) {
mangle_type_expression(&mut comp.props_type.0, prefix, mangle_env);
mangle_value_expr(&mut comp.value_expr.0, global_prefix, prefix, mangle_env);
}
pub fn mangle_jsx_component(
comp: &mut JsxComponent,
_global_prefix: &[String],
prefix: &[String],
mangle_env: &mut MangleEnv,
) {
let units = comp.find_units();
let mut edits = Vec::new();
for (range, unit) in units.iter() {
if let JsxSourceUnit::Ident = unit {
let old_ident = &comp.javascript_source.0[range.start_byte..range.end_byte];
//todo(@Apfelfrosch): use our scope resolution syntax
let ident_str = mangle_env.mangle_component(prefix, &old_ident.replace("--", "::"));
if let Some(ident_str) = ident_str {
edits.push((range.start_byte, Edit::Delete(old_ident.len())));
edits.push((
range.start_byte,
Edit::Insert(format!("${{{}}}", mangle(&ident_str))),
));
}
}
}
do_edits(&mut comp.javascript_source.0, &mut edits);
}
pub fn mangle_value_expr(
value_expr: &mut ValueExpr,
global_prefix: &Vec<String>,
prefix: &Vec<String>,
mangle_env: &mut MangleEnv,
) {
match value_expr {
ValueExpr::BitAnd { lhs, rhs }
| ValueExpr::BitOr { lhs, rhs }
| ValueExpr::BitXor { lhs, rhs } => {
mangle_value_expr(&mut lhs.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut rhs.0, global_prefix, prefix, mangle_env);
}
ValueExpr::BitNot(d) => mangle_value_expr(&mut d.0, global_prefix, prefix, mangle_env),
ValueExpr::ShiftLeft { target, amount } | ValueExpr::ShiftRight { target, amount } => {
mangle_value_expr(&mut target.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut amount.0, global_prefix, prefix, mangle_env);
}
ValueExpr::RawStruct {
is_global,
name,
fields,
type_params,
} => {
if let Some(mangled) = mangle_env.mangle_type(*is_global, prefix, name) {
let mut m = mangle_env.global_prefix.clone();
m.extend(mangled);
*name = m;
}
fields.iter_mut().for_each(|(_, value_expr)| {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
});
for (g, _) in type_params.iter_mut() {
mangle_type_expression(g, prefix, mangle_env);
}
*value_expr = ValueExpr::Struct {
name: mangle(name),
fields: fields.clone(),
type_params: type_params.clone(),
};
}
ValueExpr::Negate(d) | ValueExpr::Async(d) | ValueExpr::Defer(d) => {
mangle_value_expr(&mut d.0, global_prefix, prefix, mangle_env)
}
ValueExpr::As(v, t) => {
mangle_value_expr(&mut v.0, global_prefix, prefix, mangle_env);
mangle_type_expression(&mut t.0, prefix, mangle_env);
}
ValueExpr::For {
ident: _,
target,
block,
} => {
mangle_value_expr(&mut target.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut block.0, global_prefix, prefix, mangle_env);
}
ValueExpr::Deref(t) | ValueExpr::Ref(t) | ValueExpr::RefMut(t) => {
mangle_value_expr(&mut t.0, global_prefix, prefix, mangle_env)
}
ValueExpr::HtmlString(contents) => {
for c in contents {
if let ValHtmlStringContents::Expr(e) = c {
mangle_value_expr(&mut e.0, global_prefix, prefix, mangle_env);
}
}
}
ValueExpr::Int(..)
| ValueExpr::String(..)
| ValueExpr::Bool(..)
| ValueExpr::Float(..)
| ValueExpr::Return(None)
| ValueExpr::Tag(..)
| ValueExpr::Char(..) => {}
ValueExpr::Continue => {}
ValueExpr::Break => {}
ValueExpr::ArrayAccess(target, idx) => {
mangle_value_expr(&mut target.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut idx.0, global_prefix, prefix, mangle_env);
}
ValueExpr::Match {
value_expr,
arms,
else_arm,
span: _,
} => {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env);
for arm in arms {
mangle_type_expression(&mut arm.type_case.0, prefix, mangle_env);
mangle_env.push_idents();
if let Some(identifier) = &arm.identifier_binding {
mangle_env.insert_ident(identifier.clone());
}
mangle_value_expr(&mut arm.value_expr.0, global_prefix, prefix, mangle_env);
mangle_env.pop_idents();
}
if let Some(arm) = else_arm {
mangle_type_expression(&mut arm.type_case.0, prefix, mangle_env);
mangle_env.push_idents();
if let Some(identifier) = &arm.identifier_binding {
mangle_env.insert_ident(identifier.clone());
}
mangle_value_expr(&mut arm.value_expr.0, global_prefix, prefix, mangle_env);
mangle_env.pop_idents();
}
}
ValueExpr::FormattedString(contents) => {
for c in contents {
if let ValFmtStringContents::Expr(e) = c {
mangle_value_expr(&mut e.0, global_prefix, prefix, mangle_env);
}
}
}
ValueExpr::Array(exprs, _ty) => {
for expr in exprs {
mangle_value_expr(&mut expr.0, global_prefix, prefix, mangle_env);
}
}
ValueExpr::InlineGo(t, ty) => {
if let Some(ty) = ty {
mangle_type_expression(&mut ty.0, prefix, mangle_env);
}
let mut parser = Parser::new();
parser
.set_language(&tree_sitter_go::LANGUAGE.into())
.expect("Couldn't set go grammar");
let src = parser.parse(t.as_bytes(), None).unwrap();
let root_node = src.root_node();
fn trav(
s: &Node,
t: &[u8],
e: &mut MangleEnv,
out: &mut Vec<(tree_sitter::Range, String)>,
) {
fn extract_all_ident(t: &[u8], n: &Node) -> Vec<(tree_sitter::Range, String)> {
if n.grammar_name() == "selector_expression" {
return vec![(n.range(), n.utf8_text(t).unwrap().to_string())];
}
if n.grammar_name() == "identifier" {
return vec![(n.range(), n.utf8_text(t).unwrap().to_string())];
}
let mut res = Vec::new();
for i in 0..n.child_count() {
let x = extract_all_ident(t, &n.child(i).unwrap().clone());
res.extend(x);
}
res
}
let declared_var_ident = match s.grammar_name() {
"short_var_declaration" => {
Some(s.child(0).unwrap().utf8_text(t).unwrap().to_string())
}
"var_declaration" => Some(
s.child(1)
.unwrap()
.child(0)
.unwrap()
.utf8_text(t)
.unwrap()
.to_string(),
),
_ => None,
};
if s.grammar_name() == "expression_statement" {
let i = extract_all_ident(t, s);
out.extend(i);
}
// TODO: respect additional identifer scopes like blocks and lambdas
if let Some(i) = declared_var_ident {
e.insert_ident(i);
}
for i in 0..s.child_count() {
trav(&s.child(i).unwrap(), t, e, out);
}
}
let mut o = Vec::new();
trav(&root_node, t.as_bytes(), mangle_env, &mut o);
let mut translation = 0;
for (range, ident) in o {
let mangled_ident =
mangle_env.mangle_ident(false, prefix, std::slice::from_ref(&ident));
if let Some(mangled_ident) = mangled_ident {
let mangled = mangle(&mangled_ident);
let size_diff = mangled.len() - ident.len();
t.drain((range.start_byte + translation)..(range.end_byte + translation));
t.insert_str(range.start_byte + translation, &mangled);
translation += size_diff;
}
}
}
ValueExpr::Lambda(lambda_expr) => {
let LambdaFunctionExpr {
is_mut: _,
params,
return_type,
value_expr,
} = &mut **lambda_expr;
for (_, param_type) in params {
if let Some(param_type) = param_type {
mangle_type_expression(&mut param_type.0, prefix, mangle_env);
}
}
if let Some(return_type) = return_type {
mangle_type_expression(&mut return_type.0, prefix, mangle_env);
}
mangle_env.push_idents();
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env);
mangle_env.pop_idents();
}
ValueExpr::FunctionCall {
target,
params,
type_params,
..
} => {
// TODO: type params
mangle_value_expr(&mut target.0, global_prefix, prefix, mangle_env);
params.iter_mut().for_each(|param| {
mangle_value_expr(&mut param.0, global_prefix, prefix, mangle_env)
});
for param in type_params {
mangle_type_expression(&mut param.0, prefix, mangle_env);
}
}
ValueExpr::RawVariable(is_global, path) => {
if let Some(mangled) = mangle_env.mangle_ident(*is_global, prefix, path) {
*path = mangled;
}
*value_expr = ValueExpr::Variable(true, mangle(path), None, None, true);
}
ValueExpr::Variable(..) => panic!("variable shouldn't be here. {value_expr:?}"),
ValueExpr::If {
condition,
then,
r#else,
} => {
mangle_value_expr(&mut condition.0, global_prefix, prefix, mangle_env);
mangle_env.push_idents();
mangle_value_expr(&mut then.0, global_prefix, prefix, mangle_env);
mangle_env.pop_idents();
if let Some(r#else) = r#else {
mangle_env.push_idents();
mangle_value_expr(&mut r#else.0, global_prefix, prefix, mangle_env);
}
}
ValueExpr::While { condition, body } => {
mangle_value_expr(&mut condition.0, global_prefix, prefix, mangle_env);
mangle_env.push_idents();
mangle_value_expr(&mut body.0, global_prefix, prefix, mangle_env);
mangle_env.pop_idents();
}
ValueExpr::Tuple(value_exprs) => value_exprs.iter_mut().for_each(|value_expr| {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
}),
ValueExpr::Block(value_exprs) => {
mangle_env.push_idents();
value_exprs.iter_mut().for_each(|value_expr| {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
});
mangle_env.pop_idents();
}
ValueExpr::Duck(items) => items.iter_mut().for_each(|(_, value_expr)| {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
}),
ValueExpr::Struct {
name,
fields,
type_params,
} => {
if let Some(mangled) = mangle_env.mangle_type(false, prefix, std::slice::from_ref(name))
{
let mut m = mangle_env.global_prefix.clone();
m.extend(mangled);
*name = mangle(&m);
}
fields.iter_mut().for_each(|(_, value_expr)| {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
});
for (g, _) in type_params {
mangle_type_expression(g, prefix, mangle_env);
}
}
ValueExpr::FieldAccess { target_obj, .. } => {
mangle_value_expr(&mut target_obj.0, global_prefix, prefix, mangle_env);
}
ValueExpr::Return(Some(value_expr)) => {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env)
}
ValueExpr::VarAssign(assignment) => {
mangle_value_expr(
&mut assignment.0.target.0,
global_prefix,
prefix,
mangle_env,
);
mangle_value_expr(
&mut assignment.0.value_expr.0,
global_prefix,
prefix,
mangle_env,
);
}
ValueExpr::VarDecl(declaration) => {
let declaration = &mut declaration.0;
if let Some(type_expr) = &mut declaration.type_expr {
mangle_type_expression(&mut type_expr.0, prefix, mangle_env);
}
mangle_env.insert_ident(declaration.name.clone());
if let Some(initializer) = declaration.initializer.as_mut() {
mangle_value_expr(&mut initializer.0, global_prefix, prefix, mangle_env);
}
}
ValueExpr::Add(lhs, rhs) => {
mangle_value_expr(&mut lhs.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut rhs.0, global_prefix, prefix, mangle_env);
}
ValueExpr::Mul(lhs, rhs)
| ValueExpr::Sub(lhs, rhs)
| ValueExpr::Mod(lhs, rhs)
| ValueExpr::Div(lhs, rhs) => {
mangle_value_expr(&mut lhs.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut rhs.0, global_prefix, prefix, mangle_env);
}
ValueExpr::Equals(lhs, rhs)
| ValueExpr::NotEquals(lhs, rhs)
| ValueExpr::LessThan(lhs, rhs)
| ValueExpr::LessThanOrEquals(lhs, rhs)
| ValueExpr::GreaterThan(lhs, rhs)
| ValueExpr::GreaterThanOrEquals(lhs, rhs)
| ValueExpr::And(lhs, rhs)
| ValueExpr::Or(lhs, rhs) => {
mangle_value_expr(&mut lhs.0, global_prefix, prefix, mangle_env);
mangle_value_expr(&mut rhs.0, global_prefix, prefix, mangle_env);
}
ValueExpr::BoolNegate(value_expr) => {
mangle_value_expr(&mut value_expr.0, global_prefix, prefix, mangle_env);
}
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/semantics/type_resolve.rs | src/semantics/type_resolve.rs | use colored::Colorize;
use std::{
cell::{Cell, RefCell},
collections::{HashMap, HashSet},
rc::Rc,
sync::mpsc::Sender,
};
use chumsky::container::Container;
use indexmap::IndexMap;
use crate::{
parse::{
Field, SS, Spanned, SpannedMutRef,
duckx_component_parser::DuckxComponent,
extensions_def_parser::ExtensionsDef,
failure_with_occurence,
function_parser::{FunctionDefintion, LambdaFunctionExpr},
generics_parser::Generic,
jsx_component_parser::{
Edit, JsxComponent, JsxComponentDependencies, JsxSourceUnit, do_edits,
},
schema_def_parser::SchemaDefinition,
source_file_parser::SourceFile,
struct_parser::{NamedDuckDefinition, StructDefinition},
test_parser::TestCase,
type_parser::{Duck, TypeDefinition, TypeExpr},
value_parser::{
Assignment, Declaration, IntoBlock, IntoReturn, MatchArm, ValFmtStringContents,
ValHtmlStringContents, ValueExpr, empty_range,
},
},
semantics::{
ident_mangler::{MANGLE_SEP, mangle, unmangle},
typechecker::{check_type_compatability, check_type_compatability_full},
},
tags::Tag,
};
fn typeresolve_duckx_component(c: &mut DuckxComponent, type_env: &mut TypeEnv) {
type_env.push_identifier_types();
type_env.insert_identifier_type("props".to_string(), c.props_type.0.clone(), false, false);
typeresolve_value_expr((&mut c.value_expr.0, c.value_expr.1), type_env);
type_env.pop_identifier_types();
}
fn typeresolve_extensions_def(extensions_def: &mut ExtensionsDef, type_env: &mut TypeEnv) {
type_env.push_identifier_types();
type_env.insert_identifier_type(
"self".to_string(),
extensions_def.target_type_expr.0.clone(),
false,
false,
);
let type_expr = extensions_def.target_type_expr.clone();
for extension_method in &mut extensions_def.function_definitions {
let extension_function_name = type_expr
.0
.build_extension_access_function_name(&extension_method.0.name.clone(), type_env);
if type_env
.extension_functions
.iter()
.any(|(existing, _)| *existing == extension_function_name)
{
continue;
}
for (_, p) in &mut extension_method.0.params {
resolve_all_aliases_type_expr(p, type_env);
process_keyof_in_type_expr(&mut p.0, type_env);
}
resolve_all_aliases_type_expr(&mut extension_method.0.return_type, type_env);
process_keyof_in_type_expr(&mut extension_method.0.return_type.0, type_env);
let underlying_fn_type = extension_method.0.type_expr();
let access_fn_type = TypeExpr::Fun(
vec![(Some("self".to_string()), type_expr.clone())],
Box::new(extension_method.0.type_expr()),
// todo: mutable extension fns?
false,
);
type_env.extension_functions.insert(
extension_function_name,
(underlying_fn_type.clone(), access_fn_type.clone()),
);
typeresolve_function_definition(&mut extension_method.0, type_env);
}
}
fn typeresolve_test_case(test_case: &mut TestCase, type_env: &mut TypeEnv) {
type_env.push_identifier_types();
typeresolve_value_expr((&mut test_case.body.0, test_case.body.1), type_env);
type_env.pop_identifier_types();
}
fn typeresolve_jsx_component(c: &mut JsxComponent, type_env: &mut TypeEnv) {
let units = c.find_units();
let mut edits = Vec::new();
for (range, unit) in units.iter() {
match unit {
JsxSourceUnit::Jsx => {
edits.push((range.start_byte, Edit::Insert("html`".to_string())));
edits.push((range.end_byte, Edit::Insert("`".to_string())));
}
JsxSourceUnit::OpeningJsx => edits.push((range.start_byte, Edit::Delete(2))),
JsxSourceUnit::ClosingJsx => edits.push((range.start_byte, Edit::Delete(3))),
JsxSourceUnit::Expression => {
if range.start_byte > 0
&& &c.javascript_source.0[range.start_byte - 1..(range.start_byte)] != "$"
{
edits.push((range.start_byte, Edit::Insert("$".to_string())))
}
}
JsxSourceUnit::Ident => {
// here we could implement rpc calls
let ident = &c.javascript_source.0[range.start_byte..range.end_byte];
if let Some(found_comp) = type_env.get_component(ident) {
let found_name = found_comp.name.clone();
type_env
.get_component_dependencies(c.name.clone())
.client_components
.push(found_name);
}
}
}
}
do_edits(&mut c.javascript_source.0, &mut edits);
}
#[derive(Debug, Clone)]
pub struct FunHeader {
pub params: Vec<Spanned<TypeExpr>>,
pub return_type: Spanned<TypeExpr>,
}
#[derive(Debug, Clone)]
pub struct TypeEnv<'a> {
pub identifier_types: Vec<HashMap<String, (TypeExpr, bool, bool)>>, // (type_expr, is_const, is_schema)
pub type_aliases: Vec<HashMap<String, TypeExpr>>,
pub extension_functions: HashMap<String, (Spanned<TypeExpr>, TypeExpr)>, // key = extension function name, (actual_fn_type, access_fn_type)
pub function_headers: HashMap<String, FunHeader>,
pub function_definitions: Vec<FunctionDefintion>,
pub jsx_components: Vec<JsxComponent>,
pub duckx_components: Vec<DuckxComponent>,
pub jsx_component_dependencies: HashMap<String, JsxComponentDependencies>,
pub struct_definitions: Vec<StructDefinition>,
pub schema_defs: Vec<SchemaDefinition>,
pub named_duck_definitions: Vec<NamedDuckDefinition>,
pub type_definitions: Vec<TypeDefinition>,
pub generic_fns_generated: Vec<FunctionDefintion>,
pub generic_structs_generated: Vec<StructDefinition>,
pub generic_ducks_generated: Vec<NamedDuckDefinition>,
pub generic_methods_generated: HashMap<String, Vec<FunctionDefintion>>,
pub resolved_methods: HashMap<String, HashSet<String>>,
pub prevent_generic_generation: HashSet<String>,
pub tailwind_sender: Option<&'a Sender<String>>,
pub is_recursive_type_alias: HashSet<String>,
pub total_structs_resolved: HashSet<String>,
pub all_go_imports: &'static HashSet<String>,
}
impl Default for TypeEnv<'_> {
fn default() -> Self {
Self {
identifier_types: vec![HashMap::new()],
type_aliases: vec![HashMap::new()],
extension_functions: HashMap::new(),
jsx_components: Vec::new(),
duckx_components: Vec::new(),
jsx_component_dependencies: HashMap::new(),
function_headers: HashMap::new(),
function_definitions: vec![FunctionDefintion {
name: "parse_json".to_string(),
return_type: TypeExpr::Or(vec![
TypeExpr::TemplParam("T".to_string()).into_empty_span(),
TypeExpr::Tag("err".to_string()).into_empty_span(),
])
.into_empty_span(),
params: vec![(
"json_str".to_string(),
TypeExpr::String(None).into_empty_span(),
)],
value_expr: ValueExpr::InlineGo(
String::new(),
Some(TypeExpr::Never.into_empty_span()),
)
.into_empty_span()
.into_block()
.into_return(),
generics: vec![(
Generic {
name: "T".to_string(),
constraint: None,
},
empty_range(),
)],
span: empty_range(),
comments: vec![],
}],
struct_definitions: Vec::new(),
schema_defs: Vec::new(),
named_duck_definitions: Vec::new(),
type_definitions: Vec::new(),
resolved_methods: Default::default(),
total_structs_resolved: Default::default(),
generic_fns_generated: Vec::new(),
generic_structs_generated: Vec::new(),
generic_ducks_generated: Vec::new(),
generic_methods_generated: HashMap::new(),
prevent_generic_generation: HashSet::new(),
tailwind_sender: None,
is_recursive_type_alias: HashSet::new(),
all_go_imports: Box::leak(Box::new(HashSet::new())),
}
}
}
#[derive(Clone, Debug)]
pub enum NeedsSearchResult {
Duck { fields: Vec<Field> },
Tuple { fields: Vec<Spanned<TypeExpr>> },
Tag { name: String },
Array { type_expr: Spanned<TypeExpr> },
}
#[derive(Clone, Debug)]
pub struct TypesSummary {
pub types_used: Vec<TypeExpr>,
pub param_names_used: Vec<String>,
}
pub fn build_struct_generic_id(
struct_name: &str,
type_params: &[Spanned<TypeExpr>],
type_env: &mut TypeEnv,
) -> Box<str> {
let mut res = struct_name.to_string();
for t in type_params {
res.push('_');
res.push_str(&t.0.as_clean_go_type_name(type_env));
}
res.into_boxed_str()
}
impl TypeEnv<'_> {
/// Returns whether it was resolved before
pub fn mark_resolved(&mut self, type_name: &str, method_name: &str) -> bool {
self.resolved_methods
.entry(type_name.to_string())
.or_default()
.insert(method_name.to_string())
}
pub fn is_resolved(&self, type_name: &str, method_name: &str) -> bool {
self.resolved_methods
.get(type_name)
.unwrap_or(&Default::default())
.contains(method_name)
}
pub fn get_duck_def_with_type_params_mut<'a>(
&'a mut self,
name: &str,
type_params: &[Spanned<TypeExpr>],
span: SS,
) -> &'a mut NamedDuckDefinition {
let generic_id = build_struct_generic_id(name, type_params, self);
if !type_params.is_empty()
&& let Some((idx, _def)) = self
.generic_ducks_generated
.iter()
.enumerate()
.find(|(_, stored_def)| stored_def.name.as_str() == generic_id.as_ref())
{
return &mut self.generic_ducks_generated[idx];
}
if type_params.is_empty() {
return self
.named_duck_definitions
.iter_mut()
.find(|d| d.name == name)
.unwrap();
}
let cloned_def = self
.named_duck_definitions
.iter()
.chain(self.generic_ducks_generated.iter())
.find(|user_struct_definition| user_struct_definition.name.as_str() == name)
.cloned();
if let Some(mut cloned_def) = cloned_def {
let generic_arguments = cloned_def
.generics
.iter()
.map(|(x, _)| x)
.zip(type_params.iter())
.fold(IndexMap::new(), |mut acc, (def, arg)| {
if let Some(c) = def.constraint.as_ref() {
check_type_compatability(c, arg, self);
}
acc.insert(def.name.clone(), arg.0.clone());
acc
});
let new_duck_name = [cloned_def.name.clone()]
.into_iter()
.chain(
generic_arguments
.iter()
.map(|(_, x)| x.as_clean_go_type_name(self)),
)
.collect::<Vec<_>>()
.join(MANGLE_SEP);
cloned_def.generics = vec![];
if self
.prevent_generic_generation
.insert(new_duck_name.clone())
{
replace_generics_in_named_duck_def(&mut cloned_def, &generic_arguments, self);
cloned_def.name = new_duck_name.clone();
for f in &mut cloned_def.fields {
resolve_all_aliases_type_expr(&mut f.type_expr, self);
}
self.generic_ducks_generated.push(cloned_def.clone());
}
self.generic_ducks_generated
.iter_mut()
.find(|f| f.name.as_str() == new_duck_name.as_str())
.unwrap()
} else {
failure_with_occurence(
format!("This duck does not exist {name}"),
span,
[(format!("duck {name} does not exist"), span)],
);
}
}
pub fn get_struct_def_with_type_params_mut<'a>(
&'a mut self,
name: &str,
type_params: &[Spanned<TypeExpr>],
span: SS,
) -> &'a mut StructDefinition {
let generic_id = build_struct_generic_id(name, type_params, self);
if !type_params.is_empty()
&& let Some((idx, _def)) = self
.generic_structs_generated
.iter()
.enumerate()
.find(|(_, stored_def)| stored_def.name.as_str() == generic_id.as_ref())
{
return &mut self.generic_structs_generated[idx];
}
if type_params.is_empty() {
return self
.struct_definitions
.iter_mut()
.find(|d| d.name == name)
.unwrap_or_else(|| {
failure_with_occurence(
"Unknown Struct",
span,
[(format!("Struct {name} does not exist"), span)],
);
});
}
let cloned_def = self
.struct_definitions
.iter()
.chain(self.generic_structs_generated.iter())
.find(|user_struct_definition| user_struct_definition.name.as_str() == name)
.cloned();
if let Some(mut cloned_def) = cloned_def {
let generic_arguments = cloned_def
.generics
.iter()
.map(|(x, _)| x)
.zip(type_params.iter())
.fold(IndexMap::new(), |mut acc, (def, arg)| {
if let Some(c) = def.constraint.as_ref() {
check_type_compatability(c, arg, self);
}
acc.insert(def.name.clone(), arg.0.clone());
acc
});
let new_struct_name = [cloned_def.name.clone()]
.into_iter()
.chain(
generic_arguments
.iter()
.map(|(_, x)| x.as_clean_go_type_name(self)),
)
.collect::<Vec<_>>()
.join(MANGLE_SEP);
cloned_def.generics = vec![];
if self
.prevent_generic_generation
.insert(new_struct_name.clone())
{
replace_generics_in_struct_definition(&mut cloned_def, &generic_arguments, self);
cloned_def.name = new_struct_name.clone();
self.generic_structs_generated.push(cloned_def.clone());
cloned_def.name = name.to_string();
typeresolve_struct_def(&mut cloned_def, type_params.to_vec(), self, false);
self.generic_structs_generated
.retain(|f| f.name.as_str() != new_struct_name.as_str());
cloned_def.name = new_struct_name.clone();
self.generic_structs_generated.push(cloned_def);
}
self.generic_structs_generated
.iter_mut()
.find(|f| f.name.as_str() == new_struct_name.as_str())
.unwrap()
} else {
failure_with_occurence(
"Unkown Struct",
span,
[(format!("Struct {name} does not exist"), span)],
);
}
}
pub fn check_for_tailwind(&self, s: &str) {
if let Some(sender) = self.tailwind_sender.as_ref() {
sender.send(s.to_string()).expect("tailwind channel closed");
}
}
pub fn has_component(&self, name: &str) -> bool {
self.jsx_components.iter().any(|x| x.name.as_str() == name)
}
pub fn get_component_dependencies(&mut self, name: String) -> &mut JsxComponentDependencies {
self.jsx_component_dependencies.entry(name).or_default()
}
pub fn get_duckx_component(&self, name: &str) -> Option<&DuckxComponent> {
self.duckx_components.iter().find(|x| x.name == name)
}
pub fn get_full_component_dependencies(&mut self, name: &str) -> HashSet<String> {
let mut out = self
.jsx_component_dependencies
.entry(name.to_string())
.or_default()
.client_components
.clone()
.into_iter()
.flat_map(|dep| {
let mut v = self.get_full_component_dependencies(&dep);
v.push(dep.clone());
v.into_iter()
})
.collect::<HashSet<_>>();
if self.get_component(name).is_some() {
out.insert(name.to_string());
}
out
}
pub fn get_component<'a>(&'a self, name: &str) -> Option<&'a JsxComponent> {
self.jsx_components.iter().find(|x| x.name.as_str() == name)
}
pub fn has_method_header(&self, name: &str) -> bool {
self.function_headers.contains_key(name)
}
pub fn get_method_header(&self, name: &str) -> FunHeader {
self.function_headers
.get(name)
.cloned()
.unwrap_or_else(|| panic!("{:?}\nSearched for {name}", self.function_headers))
}
pub fn get_struct_def_opt<'a>(&'a self, name: &str) -> Option<&'a StructDefinition> {
self.struct_definitions
.iter()
.chain(self.generic_structs_generated.iter())
.find(|x| x.name.as_str() == name)
}
pub fn get_schema_def_opt<'a>(&'a self, name: &str) -> Option<&'a SchemaDefinition> {
self.schema_defs.iter().find(|x| x.name.as_str() == name)
}
pub fn get_duck_def_opt<'a>(&'a self, name: &str) -> Option<&'a NamedDuckDefinition> {
self.named_duck_definitions
.iter()
.chain(self.generic_ducks_generated.iter())
.find(|x| x.name.as_str() == name)
}
pub fn get_schema_def<'a>(&'a self, name: &str) -> &'a SchemaDefinition {
self.get_schema_def_opt(name)
.unwrap_or_else(|| panic!("Could not find struct {name}"))
}
pub fn get_schema_def_mut<'a>(&'a mut self, name: &str) -> &'a mut SchemaDefinition {
self.schema_defs
.iter_mut()
.find(|x| x.name.as_str() == name)
.unwrap_or_else(|| panic!("Could not find struct {name}"))
}
pub fn get_struct_def<'a>(&'a self, name: &str) -> &'a StructDefinition {
self.get_struct_def_opt(name)
.unwrap_or_else(|| panic!("Could not find struct {name}"))
}
pub fn get_struct_def_mut<'a>(&'a mut self, name: &str) -> &'a mut StructDefinition {
self.struct_definitions
.iter_mut()
.chain(self.generic_structs_generated.iter_mut())
.find(|x| x.name.as_str() == name)
.unwrap_or_else(|| panic!("Could not find struct {name}"))
}
pub fn get_generic_methods(&mut self, type_name: String) -> &mut Vec<FunctionDefintion> {
self.generic_methods_generated.entry(type_name).or_default()
}
pub fn has_generic_method(&self, type_name: &str, method_name: &str) -> bool {
self.generic_methods_generated
.get(type_name)
.is_some_and(|x| x.iter().any(|x| x.name == method_name))
}
pub fn push_type_aliases(&mut self) {
self.type_aliases.push(HashMap::new());
}
pub fn pop_type_aliases(&mut self) {
self.type_aliases.pop();
}
pub fn push_identifier_types(&mut self) {
self.identifier_types.push(HashMap::new());
}
pub fn pop_identifier_types(&mut self) {
self.identifier_types.pop();
}
pub fn insert_identifier_type(
&mut self,
identifier: String,
type_expr: TypeExpr,
is_const: bool,
is_schema: bool,
) {
self.identifier_types
.last_mut()
.expect("At least one env should exist. :(")
.insert(identifier, (type_expr, is_const, is_schema));
}
pub fn get_identifier_type(&self, identifier: &str) -> Option<TypeExpr> {
self.get_identifier_type_and_const(identifier)
.map(|(x, _, _)| x)
}
pub fn get_identifier_type_in_typeof(&self, identifier: &str) -> Option<TypeExpr> {
self.get_identifier_type_and_const(identifier)
.map(|(x, _, schema)| {
if schema {
if let TypeExpr::Fun(_, return_duck, _) = x
&& let TypeExpr::Duck(duck) = &return_duck.as_ref().0
{
let fields = &duck.fields;
let from_json_fun = fields
.iter()
.find(|field| field.name == "from_json")
.expect("compiler error: schema without from json in object");
dbg!(&from_json_fun);
if let TypeExpr::Fun(_, return_type, _) = &from_json_fun.type_expr.0 {
return return_type.as_ref().clone().0;
}
panic!("compiler error: schema without function")
} else {
panic!("compiler error: schema without function")
}
} else {
x
}
})
}
pub fn get_identifier_type_and_const(
&self,
identifier: &str,
) -> Option<(TypeExpr, bool, bool)> {
for i in self.identifier_types.iter().rev() {
let r = i.get(identifier).cloned();
if r.is_some() {
return r;
}
}
None
}
pub fn insert_type_alias(&mut self, alias: String, type_expr: TypeExpr) {
self.type_aliases
.last_mut()
.expect("At least one type aliases hashmap should exist. :(")
.insert(alias, type_expr);
}
pub fn get_type_alias(&self, alias: &str) -> Option<TypeExpr> {
for i in self.type_aliases.iter().rev() {
let r = i.get(alias).cloned();
if r.is_some() {
return r;
}
}
None
}
pub fn find_ducks_and_tuples(&mut self, src_file: &SourceFile) -> Vec<NeedsSearchResult> {
let mut result = Vec::new();
let cloned_resolve = self.resolved_methods.clone();
for duckx_comp in &mut self.duckx_components.clone() {
self.find_ducks_and_tuples_type_expr(&mut duckx_comp.props_type, &mut result);
self.find_tuples_and_ducks_value_expr(&mut duckx_comp.value_expr, &mut result);
}
for jsx_comp in &mut self.jsx_components.clone() {
self.find_ducks_and_tuples_type_expr(&mut jsx_comp.props_type, &mut result);
}
for fun_def in self
.function_definitions
.clone()
.iter_mut()
.chain(
src_file
.extensions_defs
.clone()
.iter_mut()
.flat_map(|x| x.function_definitions.iter_mut().map(|x| &mut x.0)),
)
.chain(self.generic_fns_generated.clone().iter_mut())
.chain(
self.generic_methods_generated
.clone()
.values_mut()
.flat_map(|v| v.iter_mut()),
)
.chain(
self.struct_definitions
.clone()
.iter_mut()
.filter(|s| s.generics.is_empty())
.flat_map(|s| {
let leaked_name = s.name.clone().leak() as &'static str;
s.methods.iter_mut().filter(|m| {
cloned_resolve
.get(leaked_name)
.unwrap_or(&HashSet::default())
.contains(&m.name)
})
}),
)
.chain(
self.generic_structs_generated
.clone()
.iter_mut()
.filter(|s| s.generics.is_empty())
.flat_map(|s| {
let leaked_name = s.name.clone().leak() as &'static str;
s.methods.iter_mut().filter(|m| {
cloned_resolve
.get(leaked_name)
.unwrap_or(&HashSet::default())
.contains(&m.name)
})
}),
)
.filter(|f| f.generics.is_empty())
{
for t in fun_def
.params
.iter_mut()
.map(|v| &mut v.1)
.chain([&mut fun_def.return_type].into_iter())
{
self.find_ducks_and_tuples_type_expr(t, &mut result);
}
// dbg!(&fun_def.name);
// dbg!(&cloned_resolve);
self.find_tuples_and_ducks_value_expr(&mut fun_def.value_expr, &mut result);
}
// todo: check we should probably not clone here
let mut extension_functions = self.extension_functions.clone();
for extension_fun in extension_functions.iter_mut() {
self.find_ducks_and_tuples_type_expr(&mut extension_fun.1.0, &mut result);
}
let mut schema_defs = self.schema_defs.clone();
for schema_def in &mut schema_defs {
for schema_field in &mut schema_def.fields {
self.find_ducks_and_tuples_type_expr(&mut schema_field.type_expr, &mut result);
}
if let Some(out_type) = &mut schema_def.out_type {
self.find_ducks_and_tuples_type_expr(out_type, &mut result);
}
if let Some(function_type) = &mut schema_def.schema_fn_type {
self.find_ducks_and_tuples_type_expr(function_type, &mut result);
}
}
for s in self
.struct_definitions
.clone()
.iter_mut()
.chain(self.generic_structs_generated.clone().iter_mut())
.filter(|s| s.generics.is_empty())
{
for m in &mut s.fields {
self.find_ducks_and_tuples_type_expr(&mut m.type_expr, &mut result);
}
}
for s in self
.named_duck_definitions
.clone()
.iter_mut()
.chain(self.generic_ducks_generated.clone().iter_mut())
.filter(|s| s.generics.is_empty())
{
for m in &mut s.fields {
self.find_ducks_and_tuples_type_expr(&mut m.type_expr, &mut result);
}
}
result.sort_by_key(|e| match e {
NeedsSearchResult::Duck { fields } => TypeExpr::Duck(Duck {
fields: fields.clone(),
})
.as_clean_go_type_name(self),
NeedsSearchResult::Tuple { fields } => {
TypeExpr::Tuple(fields.clone()).as_clean_go_type_name(self)
}
NeedsSearchResult::Tag { name } => {
TypeExpr::Tag(name.clone()).as_clean_go_type_name(self)
}
NeedsSearchResult::Array { type_expr: t } => {
TypeExpr::Array(t.clone().into()).as_clean_go_type_name(self)
}
});
result.dedup_by_key(|e| match e {
NeedsSearchResult::Duck { fields } => TypeExpr::Duck(Duck {
fields: fields.clone(),
})
.as_clean_go_type_name(self),
NeedsSearchResult::Tuple { fields } => {
TypeExpr::Tuple(fields.clone()).as_clean_go_type_name(self)
}
NeedsSearchResult::Tag { name } => {
TypeExpr::Tag(name.clone()).as_clean_go_type_name(self)
}
NeedsSearchResult::Array { type_expr: t } => {
TypeExpr::Array(t.clone().into()).as_clean_go_type_name(self)
}
});
result
}
fn find_ducks_and_tuples_type_expr(
&mut self,
v: &mut Spanned<TypeExpr>,
out: &mut Vec<NeedsSearchResult>,
) {
let out = Rc::new(RefCell::new(out));
trav_type_expr(build_tuples_and_ducks_type_expr_trav_fn(out), v, self);
}
fn find_tuples_and_ducks_value_expr(
&mut self,
v: &mut Spanned<ValueExpr>,
out: &mut Vec<NeedsSearchResult>,
) {
let out = Rc::new(RefCell::new(out));
trav_value_expr(
build_tuples_and_ducks_type_expr_trav_fn(out.clone()),
build_tuples_and_ducks_value_expr_trav_fn(out.clone()),
v,
self,
);
}
}
fn build_tuples_and_ducks_type_expr_trav_fn(
out: Rc<RefCell<&mut Vec<NeedsSearchResult>>>,
) -> impl Fn(&mut Spanned<TypeExpr>, &mut TypeEnv<'_>) + Clone {
move |t, env| match &t.0 {
TypeExpr::Tuple(fields) => {
out.borrow_mut().push(NeedsSearchResult::Tuple {
fields: fields.clone(),
});
}
TypeExpr::Duck(fields) => {
out.borrow_mut().push(NeedsSearchResult::Duck {
fields: fields.fields.clone(),
});
}
TypeExpr::Tag(name) => {
out.borrow_mut()
.push(NeedsSearchResult::Tag { name: name.clone() });
}
TypeExpr::Struct { name, type_params } => {
env.get_struct_def_with_type_params_mut(name, type_params, t.1);
}
TypeExpr::Array(t) => {
out.borrow_mut().push(NeedsSearchResult::Array {
type_expr: t.as_ref().clone(),
});
}
_ => {}
}
}
fn build_tuples_and_ducks_value_expr_trav_fn(
out: Rc<RefCell<&mut Vec<NeedsSearchResult>>>,
) -> impl Fn(&mut Spanned<ValueExpr>, &mut TypeEnv<'_>) + Clone {
move |v, env| match &v.0 {
ValueExpr::Tag(name) => {
out.borrow_mut()
.push(NeedsSearchResult::Tag { name: name.clone() });
}
ValueExpr::Duck(..) => {
let TypeExpr::Duck(Duck { fields }) = TypeExpr::from_value_expr(v, env).0 else {
panic!()
};
out.borrow_mut().push(NeedsSearchResult::Duck { fields });
}
ValueExpr::Tuple(..) => {
let TypeExpr::Tuple(fields) = TypeExpr::from_value_expr(v, env).0 else {
panic!()
};
out.borrow_mut().push(NeedsSearchResult::Tuple { fields });
}
ValueExpr::Array(.., ty) => {
out.borrow_mut().push(NeedsSearchResult::Array {
type_expr: ty.as_ref().cloned().unwrap(),
});
}
_ => {}
}
}
pub fn trav_type_expr<F1>(f_t: F1, v: &mut Spanned<TypeExpr>, env: &mut TypeEnv)
where
F1: Fn(&mut Spanned<TypeExpr>, &mut TypeEnv) + Clone,
{
f_t(v, env);
match &mut v.0 {
TypeExpr::Byte => {}
TypeExpr::Never | TypeExpr::Statement => {}
TypeExpr::NamedDuck {
name: _,
type_params,
} => {
for t in type_params {
trav_type_expr(f_t.clone(), t, env);
}
}
TypeExpr::Duck(fields) => {
for f in &mut fields.fields {
trav_type_expr(f_t.clone(), &mut f.type_expr, env);
}
}
TypeExpr::Array(a) | TypeExpr::KeyOf(a) | TypeExpr::Ref(a) | TypeExpr::RefMut(a) => {
trav_type_expr(f_t.clone(), a, env)
}
TypeExpr::Bool(..)
| TypeExpr::Tag(..)
| TypeExpr::TypeOf(..)
| TypeExpr::Int
| TypeExpr::UInt
| TypeExpr::String(..)
| TypeExpr::Float
| TypeExpr::Char
| TypeExpr::Html
| TypeExpr::Go(..)
| TypeExpr::Any
| TypeExpr::TemplParam(..) => {}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/reports/mod.rs | src/reports/mod.rs | rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false | |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/go_fixup/mod.rs | src/go_fixup/mod.rs | pub mod remove_unused_imports;
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/go_fixup/remove_unused_imports.rs | src/go_fixup/remove_unused_imports.rs | use std::collections::{HashMap, HashSet, VecDeque};
use tree_sitter::{Node, Parser, Range};
use crate::multi_map::MultiMap;
#[derive(Debug, Clone)]
enum DeclKind {
Function,
Method,
Interface { methods: HashSet<String> },
Type,
Var,
Const,
}
#[derive(Debug, Clone)]
struct Declaration {
_name: String, // g for debuuggggggg
kind: DeclKind,
range: Range,
is_exported: bool,
dependencies: HashSet<String>,
}
pub fn cleanup_go_source(go_source: &str, remove_exported: bool) -> String {
// return go_source.to_string();
let mut parser = Parser::new();
parser
.set_language(&tree_sitter_go::LANGUAGE.into())
.expect("compiler error: couldn't load treesitter go parser");
let tree = parser.parse(go_source, None).unwrap();
let (declarations, _imports, _package_usages) =
analyze_source(tree.root_node(), go_source.as_bytes());
let live_set = calculate_live_set(&declarations, remove_exported);
let mut ranges_to_delete = Vec::new();
for (name, decl) in declarations.iter_flat() {
if !live_set.contains(name) && (!decl.is_exported || remove_exported) {
if matches!(decl.kind, DeclKind::Method) {
if let Some(dot_pos) = name.find('.') {
let receiver_type = &name[..dot_pos];
if live_set.contains(receiver_type) {
continue;
}
}
if live_set.contains(name) {
continue;
}
}
ranges_to_delete.push(decl.range);
}
}
let cleaned_source = drain_ranges(go_source, ranges_to_delete);
remove_unused_imports(&cleaned_source)
}
pub fn remove_unused_imports(go_source: &str) -> String {
let mut parser = Parser::new();
parser
.set_language(&tree_sitter_go::LANGUAGE.into())
.expect("compiler error: couldn't load treesitter go parser");
let tree = parser.parse(go_source, None).unwrap();
let import_ranges = find_import_ranges(&tree);
let used_imports = find_used_imports(&tree, go_source);
let mut ranges_to_delete = Vec::new();
for import_range in import_ranges {
let import_text = &go_source[import_range.start_byte..import_range.end_byte];
let Some(package_name) = extract_package_name_from_import(import_text) else {
continue;
};
if package_name == "_" {
ranges_to_delete.push(import_range);
continue;
}
if import_text.contains(". \"") {
let mut has_used_symbols = false;
if !used_imports.is_empty() {
has_used_symbols = true;
}
if !has_used_symbols {
ranges_to_delete.push(import_range);
}
} else if !used_imports.contains(&package_name) {
ranges_to_delete.push(import_range);
}
}
drain_ranges(go_source, ranges_to_delete)
}
fn find_import_ranges(tree: &tree_sitter::Tree) -> Vec<Range> {
let mut import_ranges = Vec::new();
let mut stack = vec![tree.root_node()];
while let Some(node) = stack.pop() {
if node.kind() != "import_declaration" {
for i in 0..node.child_count() {
if let Some(child) = node.child(i) {
stack.push(child);
}
}
continue;
}
let has_import_spec_list = node
.children(&mut node.walk())
.any(|child| child.kind() == "import_spec_list");
if !has_import_spec_list {
import_ranges.push(node.range());
continue;
}
for i in 0..node.child_count() {
let Some(child) = node.child(i) else { continue };
if child.kind() == "import_spec_list" {
for j in 0..child.child_count() {
let Some(spec) = child.child(j) else { continue };
if spec.kind() == "import_spec" {
import_ranges.push(spec.range());
}
}
} else if child.kind() == "import_spec" {
import_ranges.push(child.range());
}
}
}
import_ranges
}
fn find_local_variables(tree: &tree_sitter::Tree, go_source: &str) -> HashSet<String> {
let mut local_variables = HashSet::new();
let mut stack = vec![tree.root_node()];
fn extract_identifiers_from_node(node: &Node, go_source: &str) -> Vec<String> {
let mut identifiers = Vec::new();
if node.kind() == "identifier" {
if let Ok(name) = node.utf8_text(go_source.as_bytes()) {
identifiers.push(name.to_string());
}
return identifiers;
}
for child in node.children(&mut node.walk()) {
match child.kind() {
"identifier" => {
if let Ok(name) = child.utf8_text(go_source.as_bytes()) {
identifiers.push(name.to_string());
}
}
"identifier_list" => {
identifiers.extend(extract_identifiers_from_node(&child, go_source));
}
_ => {}
}
}
identifiers
}
while let Some(node) = stack.pop() {
match node.kind() {
"var_declaration" | "const_declaration" => {
for child in node.children(&mut node.walk()) {
if matches!(child.kind(), "var_spec" | "const_spec")
&& let Some(name_node) = child.child(0)
{
local_variables
.extend(extract_identifiers_from_node(&name_node, go_source));
}
}
}
"function_declaration" | "method_declaration" => {
if let Some(name_node) = node.child_by_field_name("name") {
let name = name_node.utf8_text(go_source.as_bytes()).expect(
"compiler error: this string comes from rust so we expect that it's utf-8",
);
local_variables.insert(name.to_string());
}
for child in node.children(&mut node.walk()) {
if child.kind() != "parameter_list" {
continue;
}
for param in child.children(&mut child.walk()) {
if param.kind() != "parameter_declaration" {
continue;
}
match param.child_by_field_name("name") {
Some(name_node) => local_variables
.extend(extract_identifiers_from_node(&name_node, go_source)),
None => local_variables
.extend(extract_identifiers_from_node(¶m, go_source)),
}
}
}
}
"type_declaration" => {
for child in node.children(&mut node.walk()) {
if child.kind() != "type_spec" {
continue;
}
let Some(name_node) = child.child_by_field_name("name") else {
continue;
};
let Ok(name) = name_node.utf8_text(go_source.as_bytes()) else {
continue;
};
local_variables.insert(name.to_string());
}
}
"range_clause" => {
for child in node.children(&mut node.walk()) {
if matches!(child.kind(), "expression_list" | "identifier_list") {
local_variables.extend(extract_identifiers_from_node(&child, go_source));
continue;
}
if child.kind() == "identifier" {
let name = child.utf8_text(go_source.as_bytes())
.expect("compiler error: this string comes from rust so we expect that it's utf-8");
local_variables.insert(name.to_string());
}
}
}
"short_var_declaration" => {
for child in node.children(&mut node.walk()) {
if child.kind() == "expression_list" {
local_variables.extend(extract_identifiers_from_node(&child, go_source));
}
}
}
_ => {}
}
for i in 0..node.child_count() {
if let Some(child) = node.child(i) {
stack.push(child);
}
}
}
local_variables
}
fn is_part_of_selector_expression(node: &Node) -> bool {
let Some(parent) = node.parent() else {
return false;
};
if parent.kind() == "selector_expression" {
return true;
}
return is_part_of_selector_expression(&parent);
}
fn find_used_imports(tree: &tree_sitter::Tree, go_source: &str) -> HashSet<String> {
let mut used_imports = HashSet::new();
let local_variables = find_local_variables(tree, go_source);
let mut stack = vec![tree.root_node()];
fn extract_package_name_from_node(node: &Node, go_source: &str) -> Option<String> {
return match node.kind() {
"package_identifier" | "identifier" => {
Some(go_source[node.start_byte()..node.end_byte()].to_string())
}
_ => None,
};
}
while let Some(node) = stack.pop() {
match node.kind() {
"identifier" => {
let text = &go_source[node.start_byte()..node.end_byte()];
if !local_variables.contains(text) && !is_part_of_selector_expression(&node) {
used_imports.insert(text.to_string());
}
}
"qualified_type" => {
if let Some(package_node) = node.child_by_field_name("package")
&& let Some(package_name) =
extract_package_name_from_node(&package_node, go_source)
&& !local_variables.contains(&package_name)
{
used_imports.insert(package_name);
}
}
"selector_expression" => {
if let Some(package_node) = node.child(0)
&& let Some(package_name) =
extract_package_name_from_node(&package_node, go_source)
&& !local_variables.contains(&package_name)
{
used_imports.insert(package_name);
}
}
"call_expression" => {
if let Some(function_node) = node.child(0)
&& function_node.kind() == "selector_expression"
&& let Some(package_node) = function_node.child(0)
&& let Some(package_name) =
extract_package_name_from_node(&package_node, go_source)
&& !local_variables.contains(&package_name)
{
used_imports.insert(package_name);
}
}
"type_identifier" => {
if let Some(parent) = node.parent()
&& parent.kind() == "selector_expression"
&& let Some(package_node) = parent.child(0)
&& let Some(package_name) =
extract_package_name_from_node(&package_node, go_source)
&& !local_variables.contains(&package_name)
{
used_imports.insert(package_name);
}
}
_ => {}
}
for i in 0..node.child_count() {
if let Some(child) = node.child(i) {
stack.push(child);
}
}
}
let mut import_stack = vec![tree.root_node()];
while let Some(node) = import_stack.pop() {
if node.kind() == "import_declaration" {
for spec in node
.children(&mut node.walk())
.filter(|c| c.kind() == "import_spec")
{
if let Some(name_node) = spec.child(0) {
if name_node.kind() == "dot" {
if let Some(path_node) = spec.child(spec.child_count() - 1)
&& path_node.kind() == "interpreted_string_literal"
{
let import_path = path_node
.utf8_text(go_source.as_bytes())
.unwrap()
.trim_matches('"');
let package_name = extract_package_name_from_path(import_path);
used_imports.insert(package_name);
}
} else if name_node.kind() == "identifier" {
let alias_name = name_node.utf8_text(go_source.as_bytes()).unwrap();
used_imports.insert(alias_name.to_string());
}
}
}
}
for i in 0..node.child_count() {
if let Some(child) = node.child(i) {
import_stack.push(child);
}
}
}
used_imports
}
fn extract_package_name_from_import(import_text: &str) -> Option<String> {
let trimmed = import_text.trim();
if trimmed.starts_with('_') {
if let Some(start) = trimmed.find('"')
&& let Some(end) = trimmed[start + 1..].find('"')
{
let package_path = &trimmed[start + 1..start + 1 + end];
return Some(extract_package_name_from_path(package_path));
}
return None;
}
if trimmed.starts_with('.') {
if let Some(start) = trimmed.find('"')
&& let Some(end) = trimmed[start + 1..].find('"')
{
let package_path = &trimmed[start + 1..start + 1 + end];
return Some(extract_package_name_from_path(package_path));
}
return None;
}
if trimmed.starts_with('"') && trimmed.ends_with('"') {
let package_path = &trimmed[1..trimmed.len() - 1];
return Some(extract_package_name_from_path(package_path));
}
if !trimmed.starts_with("import") {
return None;
}
let start = trimmed.find('"')?;
let end = trimmed[start + 1..].find('"')?;
let package_path = &trimmed[start + 1..start + 1 + end];
Some(extract_package_name_from_path(package_path))
}
fn extract_package_name_from_path(package_path: &str) -> String {
let mut parts: Vec<&str> = package_path.split('/').collect();
if parts.is_empty() {
return package_path.to_string();
}
let mut last_part = parts.pop().unwrap();
if last_part.len() > 1
&& last_part.starts_with('v')
&& last_part[1..].chars().all(|c| c.is_ascii_digit())
&& !parts.is_empty()
{
last_part = parts.pop().unwrap();
}
if let Some(last_dash) = last_part.rfind('-') {
return last_part[last_dash + 1..].to_string();
}
last_part.to_string()
}
#[allow(dead_code)]
fn find_used_types(src: &str, n: Node) -> HashSet<String> {
fn trav(src: &[u8], n: Node, out: &mut HashSet<String>) {
if n.kind() == "type_identifier" {
out.insert(n.utf8_text(src).unwrap().to_string());
}
for i in 0..n.child_count() {
trav(src, n.child(i).unwrap(), out);
}
}
let mut res = HashSet::new();
trav(src.as_bytes(), n, &mut res);
res
}
fn analyze_source(
root_node: Node,
source: &[u8],
) -> (
MultiMap<String, Declaration>,
HashMap<String, Range>,
HashSet<String>,
) {
let mut declarations = MultiMap::new();
let mut imports = HashMap::new();
for node in root_node.children(&mut root_node.walk()) {
parse_node(node, source, &mut declarations, &mut imports);
}
let mut package_usages = HashSet::new();
find_all_package_usages(root_node, source, &mut package_usages);
(declarations, imports, package_usages)
}
fn parse_node(
node: Node,
source: &[u8],
declarations: &mut MultiMap<String, Declaration>,
imports: &mut HashMap<String, Range>,
) {
match node.kind() {
"import_declaration" => parse_imports(node, source, imports),
"function_declaration"
| "method_declaration"
| "type_declaration"
| "var_declaration"
| "const_declaration" => {
go_parse_declaration(node, source, declarations);
}
_ => {
for child in node.children(&mut node.walk()) {
parse_node(child, source, declarations, imports);
}
}
}
}
fn calculate_live_set(
declarations: &MultiMap<String, Declaration>,
_remove_exported: bool,
) -> HashSet<String> {
let mut live_set = HashSet::new();
let mut worklist: VecDeque<String> = VecDeque::new();
for name in ["main", "init"] {
if declarations.contains_key(name) && live_set.insert(name.to_string()) {
worklist.push_back(name.to_string());
}
}
perform_reachability_analysis(&mut live_set, &mut worklist, declarations);
let required_methods = collect_required_methods(&live_set, declarations);
if !required_methods.is_empty() {
add_required_methods(
&mut live_set,
&mut worklist,
declarations,
&required_methods,
);
perform_reachability_analysis(&mut live_set, &mut worklist, declarations);
}
let receiver_types = find_receiver_types(&live_set, declarations);
for receiver_type in receiver_types {
if live_set.insert(receiver_type.clone()) {
worklist.push_back(receiver_type);
}
}
let type_methods = find_type_methods(&live_set, declarations);
for method_name in type_methods {
if live_set.insert(method_name.clone()) {
worklist.push_back(method_name);
}
}
let method_names = find_special_method_names(&live_set);
if !method_names.is_empty() {
add_special_methods(&mut live_set, &mut worklist, declarations, &method_names);
}
if !worklist.is_empty() {
perform_reachability_analysis(&mut live_set, &mut worklist, declarations);
}
live_set
}
fn collect_required_methods(
live_set: &HashSet<String>,
declarations: &MultiMap<String, Declaration>,
) -> HashSet<String> {
live_set
.iter()
.flat_map(|name| declarations.get(name).into_iter())
.filter_map(|decl| match &decl.kind {
DeclKind::Interface { methods } => Some(methods.iter().cloned()),
_ => None,
})
.flatten()
.collect()
}
fn add_required_methods(
live_set: &mut HashSet<String>,
worklist: &mut VecDeque<String>,
declarations: &MultiMap<String, Declaration>,
required_methods: &HashSet<String>,
) {
for (name, decl) in declarations.iter_flat() {
let DeclKind::Method = &decl.kind else {
continue;
};
let (receiver_type, method_name) = if let Some(dot_pos) = name.find('.') {
(&name[..dot_pos], &name[dot_pos + 1..])
} else {
continue;
};
if required_methods.contains(method_name)
&& live_set.contains(receiver_type)
&& live_set.insert(name.clone())
{
worklist.push_back(name.clone());
}
}
}
fn find_receiver_types(
live_set: &HashSet<String>,
declarations: &MultiMap<String, Declaration>,
) -> HashSet<String> {
let mut receiver_types = HashSet::new();
for (name, decl) in declarations.iter_flat() {
if !live_set.contains(name) {
continue;
}
let DeclKind::Method = &decl.kind else {
continue;
};
if let Some(dot_pos) = name.find('.') {
let receiver_type = &name[..dot_pos];
receiver_types.insert(receiver_type.to_string());
}
}
receiver_types
}
fn find_type_methods(
live_set: &HashSet<String>,
declarations: &MultiMap<String, Declaration>,
) -> Vec<String> {
let mut type_methods = Vec::new();
for (name, decl) in declarations.iter_flat() {
if !live_set.contains(name) {
continue;
}
let is_type_or_interface = matches!(decl.kind, DeclKind::Type | DeclKind::Interface { .. });
if !is_type_or_interface {
continue;
}
for (method_name, method_decl) in declarations.iter_flat() {
let DeclKind::Method = &method_decl.kind else {
continue;
};
if let Some(dot_pos) = method_name.find('.') {
let receiver_type = &method_name[..dot_pos];
if receiver_type == name {
type_methods.push(method_name.clone());
}
}
}
}
type_methods
}
fn find_special_method_names(live_set: &HashSet<String>) -> HashSet<String> {
live_set.iter().cloned().collect()
}
fn add_special_methods(
live_set: &mut HashSet<String>,
worklist: &mut VecDeque<String>,
declarations: &MultiMap<String, Declaration>,
method_names: &HashSet<String>,
) {
for (name, decl) in declarations.iter_flat() {
let DeclKind::Method = &decl.kind else {
continue;
};
if let Some(dot_pos) = name.find('.') {
let method_name = &name[dot_pos + 1..];
if method_names.contains(method_name) {
let receiver_type = &name[..dot_pos];
if live_set.insert(receiver_type.to_string()) {
worklist.push_back(receiver_type.to_string());
}
}
}
}
}
fn perform_reachability_analysis(
live_set: &mut HashSet<String>,
worklist: &mut VecDeque<String>,
declarations: &MultiMap<String, Declaration>,
) {
while let Some(name) = worklist.pop_front() {
for decl in declarations.get(&name) {
for dep in &decl.dependencies {
if live_set.insert(dep.clone()) {
worklist.push_back(dep.clone());
}
}
}
}
}
fn drain_ranges(go_source: &str, mut ranges: Vec<Range>) -> String {
ranges.sort_by_key(|r| (r.start_byte, r.end_byte));
ranges.dedup_by_key(|r| (r.start_byte, r.end_byte));
ranges.sort_by_key(|r| std::cmp::Reverse(r.start_byte));
let mut new_source = go_source.to_string();
for range in ranges {
if range.start_byte < new_source.len() && range.end_byte <= new_source.len() {
new_source.drain(range.start_byte..range.end_byte);
}
}
new_source
.lines()
.map(|line| line.trim_end())
.collect::<Vec<_>>()
.join("\n")
.replace("\n\n\n", "\n\n")
}
fn go_parse_declaration(
node: Node,
source: &[u8],
declarations: &mut MultiMap<String, Declaration>,
) {
let kind = match node.kind() {
"function_declaration" => DeclKind::Function,
"method_declaration" => DeclKind::Method,
"type_declaration" => DeclKind::Type,
"var_declaration" => DeclKind::Var,
"const_declaration" => DeclKind::Const,
_ => return,
};
let nodes_to_process = if matches!(
node.kind(),
"var_declaration" | "const_declaration" | "type_declaration"
) {
find_spec_nodes(node)
} else {
vec![node]
};
for item_node in nodes_to_process {
let Some(name_node) = item_node.child_by_field_name("name") else {
continue;
};
let name = name_node.utf8_text(source).unwrap().to_string();
let mut dependencies = HashSet::new();
find_dependencies(item_node, source, &mut dependencies);
dependencies.remove(&name);
if item_node.kind() == "method_declaration"
&& let Some(receiver_type) = find_receiver_type_name(&item_node, source)
{
dependencies.insert(receiver_type.clone());
let method_name_with_receiver = format!("{receiver_type}.{name}");
declarations.insert(
method_name_with_receiver.clone(),
Declaration {
_name: method_name_with_receiver,
kind: DeclKind::Method,
range: item_node.range(),
is_exported: name.chars().next().unwrap_or('a').is_uppercase(),
dependencies: dependencies.clone(),
},
);
}
let is_exported = name.chars().next().unwrap_or('a').is_uppercase();
let final_kind = if let Some(type_body) = item_node.child_by_field_name("type") {
if type_body.kind() == "interface_type" {
let methods = extract_interface_methods(type_body, source);
DeclKind::Interface { methods }
} else {
kind.clone()
}
} else {
kind.clone()
};
let range = if matches!(
node.kind(),
"type_declaration" | "var_declaration" | "const_declaration"
) {
node.range()
} else {
item_node.range()
};
declarations.insert(
name.clone(),
Declaration {
_name: name,
kind: final_kind,
range,
is_exported,
dependencies,
},
);
}
}
fn find_spec_nodes(node: Node) -> Vec<Node> {
let mut nodes_to_process = Vec::new();
let target_kind = node.kind().replace("declaration", "spec");
let mut queue = VecDeque::from_iter(node.children(&mut node.walk()));
while let Some(current) = queue.pop_front() {
if current.kind() == target_kind {
nodes_to_process.push(current);
} else {
queue.extend(current.children(&mut current.walk()));
}
}
nodes_to_process
}
fn extract_interface_methods(type_body: Node, source: &[u8]) -> HashSet<String> {
let mut methods = HashSet::new();
let Some(list) = type_body.child_by_field_name("methods") else {
return methods;
};
for method_spec in list.children(&mut list.walk()) {
if let Some(m_name) = method_spec.child_by_field_name("name") {
methods.insert(m_name.utf8_text(source).unwrap().to_string());
}
}
methods
}
fn find_receiver_type_name(node: &Node, source: &[u8]) -> Option<String> {
let receiver_node = node.child_by_field_name("receiver")?;
let mut queue = VecDeque::from_iter(receiver_node.children(&mut receiver_node.walk()));
while let Some(current) = queue.pop_front() {
if current.kind() == "type_identifier" {
return Some(current.utf8_text(source).unwrap().to_string());
}
queue.extend(current.children(&mut current.walk()));
}
None
}
fn find_dependencies(node: Node, source: &[u8], deps: &mut HashSet<String>) {
match node.kind() {
"identifier" | "type_identifier" | "field_identifier" | "package_identifier" => {
deps.insert(node.utf8_text(source).unwrap().to_string());
}
"selector_expression" => {
if let Some(operand) = node.child(0)
&& operand.kind() == "identifier"
{
deps.insert(operand.utf8_text(source).unwrap().to_string());
}
if let Some(field) = node.child(1)
&& field.kind() == "field_identifier"
{
deps.insert(field.utf8_text(source).unwrap().to_string());
}
}
"type_assertion" => {
if let Some(type_node) = node.child(1)
&& type_node.kind() == "type_identifier"
{
deps.insert(type_node.utf8_text(source).unwrap().to_string());
}
}
"type_switch_expression" => {
if let Some(expr) = node.child(0)
&& expr.kind() == "type_assertion"
&& let Some(type_node) = expr.child(1)
&& type_node.kind() == "type_identifier"
{
deps.insert(type_node.utf8_text(source).unwrap().to_string());
}
}
"type_case" => {
for child in node.children(&mut node.walk()) {
if child.kind() == "type_identifier" {
deps.insert(child.utf8_text(source).unwrap().to_string());
}
}
}
"type_switch_statement" => {
for child in node.children(&mut node.walk()) {
if child.kind() == "type_identifier" {
deps.insert(child.utf8_text(source).unwrap().to_string());
}
}
}
"composite_literal" => {
if let Some(type_node) = node.child(0)
&& type_node.kind() == "type_identifier"
{
deps.insert(type_node.utf8_text(source).unwrap().to_string());
}
}
"field_declaration" => {
for child in node.children(&mut node.walk()) {
if child.kind() == "type_identifier" {
deps.insert(child.utf8_text(source).unwrap().to_string());
}
}
}
_ => {}
}
for child in node.children(&mut node.walk()) {
find_dependencies(child, source, deps);
}
}
fn find_all_package_usages(node: Node, source: &[u8], usages: &mut HashSet<String>) {
if node.kind() == "selector_expression"
&& let Some(operand) = node.child(0)
&& operand.kind() == "identifier"
{
usages.insert(operand.utf8_text(source).unwrap().to_string());
}
for child in node.children(&mut node.walk()) {
find_all_package_usages(child, source, usages);
}
}
fn parse_imports(node: Node, source: &[u8], imports: &mut HashMap<String, Range>) {
for spec in node
.children(&mut node.walk())
.filter(|c| c.kind() == "import_spec")
{
if let Some(path_node) = spec.child(spec.child_count() - 1)
&& path_node.kind() == "interpreted_string_literal"
{
let import_path = path_node.utf8_text(source).unwrap().trim_matches('"');
if let Some(name) = get_import_package_name(&spec, import_path, source) {
imports.insert(name, spec.range());
}
}
}
}
fn get_import_package_name(node: &Node, path: &str, source: &[u8]) -> Option<String> {
if let Some(name_node) = node.child(0)
&& name_node.kind() != "interpreted_string_literal"
{
return match name_node.kind() {
"identifier" => Some(name_node.utf8_text(source).unwrap().to_string()),
"blank_identifier" => Some("_".to_string()), // handle blank imports
"dot" => Some(extract_package_name_from_path(path)), // handle dot imports
_ => None,
};
}
Some(extract_package_name_from_path(path))
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_parser() -> Parser {
let mut parser = Parser::new();
parser
.set_language(&tree_sitter_go::LANGUAGE.into())
.unwrap();
parser
}
fn parse_go_code(code: &str) -> tree_sitter::Tree {
let mut parser = create_test_parser();
parser.parse(code, None).unwrap()
}
fn assert_cleanup_result(input: &str, expected: &str, remove_exported: bool) {
let result = cleanup_go_source(input, remove_exported);
let normalized_result = normalize_whitespace(&result);
let normalized_expected = normalize_whitespace(expected);
let res = normalized_expected == normalized_result;
if !res {
std::fs::write("a.txt", expected).unwrap();
std::fs::write("b.txt", &result).unwrap();
}
assert_eq!(
normalized_result, normalized_expected,
"input: {expected}\n\ne:{result}\n\n{}expected: {}\n\ngot: {}",
input, normalized_expected, normalized_result
);
}
fn normalize_whitespace(text: &str) -> String {
text.split_whitespace().collect::<Vec<&str>>().join(" ")
}
#[test]
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | true |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/cli/go_cli.rs | src/cli/go_cli.rs | use std::{
env,
ffi::{OsStr, OsString},
path::{Path, PathBuf},
process::{Command, Stdio},
};
use crate::tags::Tag;
#[derive(Debug)]
pub enum GoCliErrKind {
SpawnProcess,
WaitProcess,
CompileFailed,
FmtFailed,
}
fn resolve_go_bin() -> OsString {
let home_dir = env::var_os("HOME").or_else(|| env::var_os("USERPROFILE"));
if let Some(home) = home_dir {
let mut duck_go = PathBuf::from(home);
duck_go.push(".duck");
duck_go.push("go-compiler");
duck_go.push("bin");
duck_go.push("go");
#[cfg(target_os = "windows")]
let duck_go = duck_go.with_extension("exe");
if duck_go.exists() {
return duck_go.into_os_string();
}
}
OsString::from("go")
}
pub fn format(go_source_file: &Path) -> Result<(), (String, GoCliErrKind)> {
let go_bin = resolve_go_bin();
let cmd_result = Command::new(go_bin)
.args([OsString::from("fmt"), go_source_file.as_os_str().to_owned()])
.spawn()
.map_err(|err| {
(
format!(
"{}{} couldn't spawn go process\n -> {err}",
Tag::Go,
Tag::Err,
),
GoCliErrKind::SpawnProcess,
)
})?
.wait()
.map_err(|err| {
(
format!(
"{}{} couldn't wait for go compile process\n -> {err}",
Tag::Go,
Tag::Err,
),
GoCliErrKind::WaitProcess,
)
})?;
if !cmd_result.success() {
return Err((
format!(
"{}{} couldn't format the generated go code",
Tag::Go,
Tag::Err,
),
GoCliErrKind::FmtFailed,
));
}
Ok(())
}
pub fn build(
dot_dir: &Path,
designated_binary_name: &OsStr,
go_output_file: &Path,
) -> Result<PathBuf, (String, GoCliErrKind)> {
let mut dot_dir_buf = dot_dir.to_owned();
let go_bin = resolve_go_bin();
let working_directory = dot_dir_buf.clone();
let run_command =
|cmd: &mut Command, ignore_fail: bool| -> Result<(), (String, GoCliErrKind)> {
let spawned = cmd
.current_dir(&working_directory)
.spawn()
.map_err(|err| {
(
format!(
"{}{} couldn't spawn go process\n -> {err}",
Tag::Go,
Tag::Err,
),
GoCliErrKind::SpawnProcess,
)
})?
.wait()
.map_err(|err| {
(
format!(
"{}{} couldn't wait for go compile process\n -> {err}",
Tag::Go,
Tag::Err,
),
GoCliErrKind::WaitProcess,
)
})?;
if !ignore_fail && !spawned.success() {
return Err((
format!(
"{}{} couldn't compile the generated go code",
Tag::Go,
Tag::Err,
),
GoCliErrKind::CompileFailed,
));
}
Ok(())
};
let mut mod_init_cmd = Command::new(&go_bin);
mod_init_cmd
.args([
OsString::from("mod"),
OsString::from("init"),
designated_binary_name.to_owned(),
])
.stdout(Stdio::null())
.stderr(Stdio::null());
let mut tidy_cmd = Command::new(&go_bin);
tidy_cmd.args([OsString::from("mod"), OsString::from("tidy")]);
let mut build_command = Command::new(&go_bin);
build_command.args([
OsString::from("build"),
OsString::from("-o"),
designated_binary_name.to_owned(),
go_output_file.as_os_str().to_owned(),
]);
run_command(&mut mod_init_cmd, true)?;
run_command(&mut tidy_cmd, false)?;
run_command(&mut build_command, false)?;
dot_dir_buf.push(designated_binary_name);
Ok(dot_dir_buf)
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/cli/mod.rs | src/cli/mod.rs | pub mod git_cli;
pub mod go_cli;
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
duck-compiler/duckc | https://github.com/duck-compiler/duckc/blob/03febfa849f54a237380dfed2d91a52a2df0313b/src/cli/git_cli.rs | src/cli/git_cli.rs | use std::io::ErrorKind as IOErrKind;
use std::path::{Path, PathBuf};
use std::process::{Command, Output};
use crate::tags::Tag;
#[derive(Debug)]
pub enum GitCliErrKind {
IOErr(IOErrKind),
CannotPullRepo,
CannotCloneRepo,
}
pub fn pull_repository(
repo_url: &str,
target_dir: &PathBuf,
) -> Result<(), (String, GitCliErrKind)> {
let target_dir_path = target_dir;
if target_dir_path.exists() && target_dir_path.join(".git").is_dir() {
// verbose
println!(
"Directory {target_dir_path:?} already exists and appears to be a Git repository. Performing 'git pull'..."
);
let output = Command::new("git")
.arg("pull")
.current_dir(target_dir_path)
.output()
.map_err(|err| {
(
format!(
"{}{} couldn't get output of git pull command - {err}",
Tag::IO,
Tag::Err,
),
GitCliErrKind::IOErr(err.kind()),
)
})?;
return handle_git_output(output, "git pull");
}
if target_dir_path.exists() && !target_dir_path.is_dir() {
return Err((
format!("Target path {target_dir_path:?} exists but is not a directory."),
GitCliErrKind::IOErr(IOErrKind::AlreadyExists),
));
}
// verbose
println!(
"Directory {target_dir_path:?} does not exist or is not a Git repository. Performing 'git clone'..."
);
let parent_dir = target_dir_path.parent().unwrap_or_else(|| Path::new("."));
let directory_name = target_dir_path.file_name().ok_or_else(|| {
(
format!(
"{}{} Couldn't determine directory name from target path.",
Tag::Git,
Tag::Err,
),
GitCliErrKind::IOErr(IOErrKind::InvalidInput),
)
})?;
let output = Command::new("git")
.arg("clone")
.arg(repo_url)
.arg(directory_name)
.current_dir(parent_dir)
.output()
.map_err(|err| {
(
format!(
"{}{} Couldn't execute git command\n -> {}",
Tag::Git,
Tag::Err,
err
),
GitCliErrKind::CannotPullRepo,
)
})?;
return handle_git_output(output, "git clone");
}
fn handle_git_output(output: Output, command_name: &str) -> Result<(), (String, GitCliErrKind)> {
if output.status.success() {
return Ok(());
}
Err((
format!(
"{}{} Couldn't git {}\n -> {}",
Tag::Git,
Tag::Err,
command_name,
String::from_utf8_lossy(&output.stderr)
),
GitCliErrKind::CannotPullRepo,
))
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_pull_repository_clone_new() {
let temp_dir = tempdir().expect("Failed to create temporary directory");
let repo_url = "https://github.com/Mvmo/sicklang";
let target_path = temp_dir.path().join("test-repo");
println!("Attempting to clone into: {:?}", target_path);
let result = pull_repository(repo_url, &target_path);
assert!(result.is_ok(), "Failed to clone repository");
assert!(
target_path.join(".git").is_dir(),
"Cloned directory should contain .git"
);
temp_dir
.close()
.expect("Failed to close temporary directory");
}
}
| rust | MIT | 03febfa849f54a237380dfed2d91a52a2df0313b | 2026-01-04T20:22:21.418266Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/config.rs | src/config.rs | use serde::{Deserialize, Serialize};
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
/// Move a directory, handling cross-device moves by copying + deleting
fn move_dir_all(src: &Path, dst: &Path) -> io::Result<()> {
// Try rename first (fast, same filesystem)
match fs::rename(src, dst) {
Ok(()) => return Ok(()),
Err(e) if e.raw_os_error() == Some(18) => {
// EXDEV (18) = cross-device link, need to copy + delete
}
Err(e) => return Err(e),
}
// Cross-device move: recursive copy then delete
copy_dir_all(src, dst)?;
fs::remove_dir_all(src)?;
Ok(())
}
/// Recursively copy a directory
fn copy_dir_all(src: &Path, dst: &Path) -> io::Result<()> {
fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if src_path.is_dir() {
copy_dir_all(&src_path, &dst_path)?;
} else if src_path.is_symlink() {
// Preserve symlinks
let target = fs::read_link(&src_path)?;
let _ = fs::remove_file(&dst_path); // Remove if exists
std::os::unix::fs::symlink(target, &dst_path)?;
} else {
fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
fn get_home() -> String {
std::env::var("HOME").unwrap_or_default()
}
fn default_data_path() -> String {
format!("{}/NaK", get_home())
}
// ============================================================================
// Main App Config - stored in ~/.config/nak/config.json
// ============================================================================
#[derive(Serialize, Deserialize, Clone)]
pub struct AppConfig {
pub selected_proton: Option<String>,
pub active_nxm_prefix: Option<String>,
/// Whether to use Steam Linux Runtime (pressure-vessel) for launching
#[serde(default = "default_true")]
pub use_steam_runtime: bool,
/// Whether the first-run setup has been completed
#[serde(default)]
pub first_run_completed: bool,
/// Path to NaK data folder (Prefixes, ProtonGE, cache, etc.)
/// Defaults to ~/NaK
#[serde(default = "default_data_path")]
pub data_path: String,
}
fn default_true() -> bool {
true
}
impl Default for AppConfig {
fn default() -> Self {
Self {
selected_proton: None,
active_nxm_prefix: None,
use_steam_runtime: true,
first_run_completed: false,
data_path: default_data_path(),
}
}
}
impl AppConfig {
/// Config file path: ~/.config/nak/config.json
fn get_config_path() -> PathBuf {
PathBuf::from(format!("{}/.config/nak/config.json", get_home()))
}
/// Legacy config path for migration: ~/NaK/config.json
fn get_legacy_path() -> PathBuf {
PathBuf::from(format!("{}/NaK/config.json", get_home()))
}
pub fn load() -> Self {
let config_path = Self::get_config_path();
let legacy_path = Self::get_legacy_path();
// Try new location first
if config_path.exists() {
if let Ok(content) = fs::read_to_string(&config_path) {
if let Ok(config) = serde_json::from_str(&content) {
return config;
}
}
}
// Try legacy location and migrate if found
if legacy_path.exists() {
if let Ok(content) = fs::read_to_string(&legacy_path) {
if let Ok(mut config) = serde_json::from_str::<AppConfig>(&content) {
// Ensure data_path is set (old configs won't have it)
if config.data_path.is_empty() {
config.data_path = default_data_path();
}
// Save to new location
config.save();
// Remove old config
let _ = fs::remove_file(&legacy_path);
return config;
}
}
}
Self::default()
}
pub fn save(&self) {
let path = Self::get_config_path();
if let Some(parent) = path.parent() {
let _ = fs::create_dir_all(parent);
}
if let Ok(json) = serde_json::to_string_pretty(self) {
let _ = fs::write(path, json);
}
}
/// Get the NaK data directory path
pub fn get_data_path(&self) -> PathBuf {
PathBuf::from(&self.data_path)
}
/// Get path to Prefixes directory
pub fn get_prefixes_path(&self) -> PathBuf {
self.get_data_path().join("Prefixes")
}
/// Get path to cache directory
pub fn get_cache_path(&self) -> PathBuf {
self.get_data_path().join("cache")
}
}
// ============================================================================
// Cache Config - also stored in ~/.config/nak/
// ============================================================================
#[derive(Serialize, Deserialize, Clone)]
pub struct CacheConfig {
pub cache_enabled: bool,
pub cache_dependencies: bool,
pub cache_mo2: bool,
pub cache_vortex: bool,
#[serde(default)]
pub cache_location: String, // Deprecated - now uses AppConfig::get_cache_path()
}
impl Default for CacheConfig {
fn default() -> Self {
Self {
cache_enabled: true,
cache_dependencies: true,
cache_mo2: true,
cache_vortex: true,
cache_location: String::new(),
}
}
}
impl CacheConfig {
fn get_config_path() -> PathBuf {
PathBuf::from(format!("{}/.config/nak/cache_config.json", get_home()))
}
fn get_legacy_path() -> PathBuf {
PathBuf::from(format!("{}/NaK/cache_config.json", get_home()))
}
pub fn load() -> Self {
let config_path = Self::get_config_path();
let legacy_path = Self::get_legacy_path();
// Try new location first
if config_path.exists() {
if let Ok(content) = fs::read_to_string(&config_path) {
if let Ok(config) = serde_json::from_str(&content) {
return config;
}
}
}
// Try legacy location and migrate
if legacy_path.exists() {
if let Ok(content) = fs::read_to_string(&legacy_path) {
if let Ok(config) = serde_json::from_str::<CacheConfig>(&content) {
config.save();
let _ = fs::remove_file(&legacy_path);
return config;
}
}
}
Self::default()
}
pub fn save(&self) {
let path = Self::get_config_path();
if let Some(parent) = path.parent() {
let _ = fs::create_dir_all(parent);
}
if let Ok(json) = serde_json::to_string_pretty(self) {
let _ = fs::write(path, json);
}
}
/// Clear the cache directory
pub fn clear_cache(&self, app_config: &AppConfig) -> Result<(), std::io::Error> {
let cache_dir = app_config.get_cache_path();
if cache_dir.exists() {
fs::remove_dir_all(&cache_dir)?;
fs::create_dir_all(&cache_dir)?;
}
Ok(())
}
}
// ============================================================================
// Storage Manager - for storage info and data path migration
// ============================================================================
pub struct StorageManager;
impl StorageManager {
/// Get storage info for a given data path
pub fn get_storage_info(data_path: &Path) -> StorageInfo {
let exists = data_path.exists();
let (free_space_gb, used_space_gb, cache_size_gb, proton_size_gb, prefixes_size_gb, other_size_gb) =
if exists {
let free = Self::get_free_space(data_path);
let used = Self::get_directory_size(data_path);
let cache_size = Self::get_directory_size(&data_path.join("cache"));
let proton_size = Self::get_directory_size(&data_path.join("ProtonGE"));
let prefixes_size = Self::get_directory_size(&data_path.join("Prefixes"));
let known_sum = cache_size + proton_size + prefixes_size;
let other_size = (used - known_sum).max(0.0);
(free, used, cache_size, proton_size, prefixes_size, other_size)
} else {
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
};
StorageInfo {
data_path: data_path.to_string_lossy().to_string(),
exists,
free_space_gb,
used_space_gb,
cache_size_gb,
proton_size_gb,
prefixes_size_gb,
other_size_gb,
}
}
/// Get free space on a path in GB
fn get_free_space(path: &Path) -> f64 {
use std::process::Command;
if let Ok(output) = Command::new("df").arg("-B1").arg(path).output() {
if output.status.success() {
let output_str = String::from_utf8_lossy(&output.stdout);
if let Some(line) = output_str.lines().nth(1) {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 4 {
if let Ok(bytes) = parts[3].parse::<u64>() {
return bytes as f64 / (1024.0 * 1024.0 * 1024.0);
}
}
}
}
}
0.0
}
/// Get directory size in GB using du
fn get_directory_size(path: &Path) -> f64 {
use std::process::Command;
if !path.exists() {
return 0.0;
}
if let Ok(output) = Command::new("du").arg("-sb").arg(path).output() {
if output.status.success() {
let output_str = String::from_utf8_lossy(&output.stdout);
if let Some(size_str) = output_str.split_whitespace().next() {
if let Ok(bytes) = size_str.parse::<u64>() {
return bytes as f64 / (1024.0 * 1024.0 * 1024.0);
}
}
}
}
0.0
}
/// Validate a storage location
pub fn validate_location(location: &Path) -> Result<(), String> {
// Create parent if it doesn't exist
if !location.exists() {
if let Err(e) = fs::create_dir_all(location) {
return Err(format!("Cannot create directory: {}", e));
}
}
if !location.is_dir() {
return Err(format!("Location is not a directory: {}", location.display()));
}
// Check write permission
let test_file = location.join(".nak_write_test");
if fs::write(&test_file, "test").is_err() {
return Err(format!("No write permission for: {}", location.display()));
}
let _ = fs::remove_file(&test_file);
// Check space
let free_gb = Self::get_free_space(location);
if free_gb < 5.0 {
return Err(format!(
"Insufficient space: {:.2}GB available (minimum 5GB recommended)",
free_gb
));
}
Ok(())
}
/// Move NaK data to a new location
pub fn move_data(config: &mut AppConfig, new_location: &Path) -> Result<String, String> {
let target_nak = new_location.join("NaK");
// Validate target
Self::validate_location(new_location)?;
// Check if target NaK folder already exists
if target_nak.exists() {
// Check if it's empty or only has hidden files (safe to overwrite)
let is_empty = match fs::read_dir(&target_nak) {
Ok(entries) => entries
.filter_map(|e| e.ok())
.filter(|e| {
// Filter out hidden files (starting with .)
!e.file_name().to_string_lossy().starts_with('.')
})
.count() == 0,
Err(_) => false,
};
if is_empty {
// Empty folder - safe to remove and proceed
let _ = fs::remove_dir_all(&target_nak);
} else {
return Err(format!(
"Target location already has a NaK folder with data: {}\n\
Please remove it manually or choose a different location.",
target_nak.display()
));
}
}
let current_path = config.get_data_path();
// Move the data
if current_path.exists() {
move_dir_all(¤t_path, &target_nak)
.map_err(|e| format!("Failed to move NaK folder: {}", e))?;
} else {
// No existing data, just create the new directory
fs::create_dir_all(&target_nak)
.map_err(|e| format!("Failed to create NaK directory: {}", e))?;
}
// Update config
config.data_path = target_nak.to_string_lossy().to_string();
config.save();
// Fix all symlinks that have absolute paths (they now point to old location)
// This updates manager_link, convenience symlinks in mod manager folders, etc.
if let Err(e) = crate::scripts::fix_symlinks_after_move() {
eprintln!("Warning: Failed to fix symlinks after move: {}", e);
}
// Regenerate NXM handler - the .desktop file has absolute path to the script
// which needs to be updated to the new location
if let Err(e) = crate::nxm::NxmHandler::setup() {
eprintln!("Warning: Failed to regenerate NXM handler: {}", e);
}
Ok(format!(
"Successfully moved NaK data to {}",
target_nak.display()
))
}
}
#[derive(Clone, Default)]
pub struct StorageInfo {
pub data_path: String,
pub exists: bool,
pub free_space_gb: f64,
pub used_space_gb: f64,
pub cache_size_gb: f64,
pub proton_size_gb: f64,
pub prefixes_size_gb: f64,
pub other_size_gb: f64,
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/app.rs | src/app.rs | //! Application state and initialization
use std::path::PathBuf;
use std::sync::atomic::AtomicBool;
use std::sync::{Arc, Mutex};
use std::thread;
use crate::config::{AppConfig, CacheConfig};
use crate::games::{DetectedGame, GameFinder};
use crate::nxm::NxmHandler;
use crate::utils::detect_steam_path_checked;
use crate::wine::{
check_command_available, ensure_cabextract, ensure_winetricks, fetch_cachyos_releases,
fetch_ge_releases,
};
use crate::wine::{GithubRelease, NakPrefix, PrefixManager, ProtonFinder, ProtonInfo};
// ============================================================================
// Types
// ============================================================================
#[derive(PartialEq, Clone, Copy)]
pub enum Page {
FirstRunSetup,
GettingStarted,
ModManagers,
GameFixer,
Marketplace,
ProtonTools,
Settings,
Updater,
}
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum ModManagerView {
Dashboard,
PrefixManager,
Mo2Manager,
VortexManager,
}
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum WizardStep {
Selection,
NameInput,
PathInput,
DpiSetup, // DPI scaling configuration (after install, before finished)
Finished,
}
#[derive(Clone, Debug)]
pub struct InstallWizard {
pub step: WizardStep,
pub manager_type: String, // "MO2" or "Vortex"
pub install_type: String, // "New" or "Existing"
pub name: String,
pub path: String,
pub validation_error: Option<String>,
pub force_install: bool,
pub last_install_error: Option<String>,
pub use_slr: bool, // Whether to use Steam Linux Runtime for this installation
// DPI Setup state
pub selected_dpi: u32, // Selected DPI value (96, 120, 144, 192, or custom)
pub custom_dpi_input: String, // Text input for custom DPI value
}
impl Default for InstallWizard {
fn default() -> Self {
Self {
step: WizardStep::Selection,
manager_type: String::new(),
install_type: String::new(),
name: String::new(),
path: String::new(),
validation_error: None,
force_install: false,
last_install_error: None,
use_slr: true, // Default to using SLR (user can toggle)
selected_dpi: 96, // Default 100% scaling
custom_dpi_input: String::new(),
}
}
}
// ============================================================================
// Application State
// ============================================================================
pub struct MyApp {
// Navigation
pub current_page: Page,
pub mod_manager_view: ModManagerView,
// Installation Wizard State
pub install_wizard: InstallWizard,
pub is_installing_manager: Arc<Mutex<bool>>,
pub install_status: Arc<Mutex<String>>,
// Removed old scattered input fields in favor of InstallWizard
pub logs: Arc<Mutex<Vec<String>>>,
pub install_progress: Arc<Mutex<f32>>,
pub cancel_install: Arc<AtomicBool>,
// Page State: Prefix Manager
pub detected_prefixes: Vec<NakPrefix>,
pub prefix_manager: PrefixManager,
pub winetricks_path: Arc<Mutex<Option<PathBuf>>>,
// Configuration (Persisted)
pub config: AppConfig,
pub cache_config: CacheConfig,
// Page State: Proton Tools
pub proton_versions: Vec<ProtonInfo>,
// Proton Downloader State (unified)
pub available_ge_versions: Arc<Mutex<Vec<GithubRelease>>>,
pub available_cachyos_versions: Arc<Mutex<Vec<GithubRelease>>>,
pub proton_search_query: String,
pub proton_download_source: String, // "ge" or "cachyos"
pub is_fetching_ge: Arc<Mutex<bool>>,
pub is_fetching_cachyos: Arc<Mutex<bool>>,
pub is_downloading: Arc<Mutex<bool>>,
pub download_status: Arc<Mutex<String>>,
pub download_progress: Arc<Mutex<f32>>,
// Flags
pub should_refresh_proton: bool,
pub download_needs_refresh: Arc<AtomicBool>, // Signal from background downloads to refresh UI
pub missing_deps: Arc<Mutex<Vec<String>>>,
// Steam Detection
pub steam_detected: bool,
pub steam_path: Option<String>,
// Settings page state
pub migration_path_input: String,
pub cached_storage_info: Option<crate::config::StorageInfo>,
pub storage_info_last_update: std::time::Instant,
// Confirmation dialog state
pub pending_prefix_delete: Option<String>,
pub pending_proton_delete: Option<(String, String)>, // (name, type: "ge" or "cachyos")
// Game Modding Helper state
pub detected_games: Vec<DetectedGame>,
pub game_search_query: String,
pub is_applying_game_fix: Arc<Mutex<bool>>,
pub game_fix_status: Arc<Mutex<String>>,
pub game_fix_logs: Arc<Mutex<Vec<String>>>,
// Updater state
pub update_info: Arc<Mutex<Option<crate::updater::UpdateInfo>>>,
pub is_checking_update: Arc<Mutex<bool>>,
pub is_installing_update: Arc<Mutex<bool>>,
pub update_error: Arc<Mutex<Option<String>>>,
pub update_installed: Arc<Mutex<bool>>,
// DPI test process tracking
pub dpi_test_processes: Arc<Mutex<Vec<u32>>>, // PIDs of running test apps
// Prefix manager DPI custom input (shared across all prefixes)
pub prefix_custom_dpi_input: String,
}
impl Default for MyApp {
fn default() -> Self {
// Initialize Proton Finder
let finder = ProtonFinder::new();
let protons = finder.find_all();
// Initialize Prefix Manager
let prefix_mgr = PrefixManager::new();
let prefixes = prefix_mgr.scan_prefixes();
// Initialize Game Finder
let game_finder = GameFinder::new();
let detected_games = game_finder.find_all_games();
let winetricks_path_arc = Arc::new(Mutex::new(None));
// Load Configuration
let config = AppConfig::load();
let cache_config = CacheConfig::load();
// Ensure data directories exist (for new installs or after data path change)
let data_path = config.get_data_path();
let _ = std::fs::create_dir_all(&data_path);
let _ = std::fs::create_dir_all(data_path.join("Prefixes"));
let _ = std::fs::create_dir_all(data_path.join("ProtonGE"));
let _ = std::fs::create_dir_all(data_path.join("tmp"));
let _ = std::fs::create_dir_all(data_path.join("bin"));
let _ = std::fs::create_dir_all(data_path.join("logs"));
// Ensure active proton symlink exists (for updating users)
// If user has a selected proton but no active symlink, create it
let active_link = data_path.join("ProtonGE/active");
if !active_link.exists() {
if let Some(selected_name) = &config.selected_proton {
// Find the selected proton in available versions
if let Some(selected_proton) = protons.iter().find(|p| &p.name == selected_name) {
let _ = crate::wine::set_active_proton(selected_proton);
}
} else if let Some(first_proton) = protons.first() {
// No proton selected but we have some available - select the first one
let _ = crate::wine::set_active_proton(first_proton);
}
}
// Detect Steam at startup (with logging)
let steam_path = detect_steam_path_checked();
let steam_detected = steam_path.is_some();
// Check Dependencies (uses check_command_available which also checks $DATA_PATH/bin)
// Note: cabextract will be auto-downloaded if missing, so we check it later
// Note: 7z extraction is now handled natively in Rust (sevenz-rust crate)
let mut missing = Vec::new();
if !check_command_available("curl") && !check_command_available("wget") {
missing.push("curl or wget".to_string());
}
let missing_deps_arc = Arc::new(Mutex::new(missing));
// Determine starting page based on first-run status
let starting_page = if config.first_run_completed {
Page::GettingStarted
} else {
Page::FirstRunSetup
};
let app = Self {
current_page: starting_page,
mod_manager_view: ModManagerView::Dashboard,
install_wizard: InstallWizard::default(),
is_installing_manager: Arc::new(Mutex::new(false)),
install_status: Arc::new(Mutex::new(String::new())),
logs: Arc::new(Mutex::new(Vec::new())),
install_progress: Arc::new(Mutex::new(0.0)),
cancel_install: Arc::new(AtomicBool::new(false)),
detected_prefixes: prefixes,
prefix_manager: prefix_mgr,
winetricks_path: winetricks_path_arc.clone(),
config,
cache_config,
proton_versions: protons,
available_ge_versions: Arc::new(Mutex::new(Vec::new())),
available_cachyos_versions: Arc::new(Mutex::new(Vec::new())),
proton_search_query: String::new(),
proton_download_source: "ge".to_string(),
is_fetching_ge: Arc::new(Mutex::new(true)),
is_fetching_cachyos: Arc::new(Mutex::new(true)),
is_downloading: Arc::new(Mutex::new(false)),
download_status: Arc::new(Mutex::new(String::new())),
download_progress: Arc::new(Mutex::new(0.0)),
should_refresh_proton: false,
download_needs_refresh: Arc::new(AtomicBool::new(false)),
missing_deps: missing_deps_arc.clone(),
steam_detected,
steam_path,
migration_path_input: String::new(),
cached_storage_info: None,
storage_info_last_update: std::time::Instant::now(),
pending_prefix_delete: None,
pending_proton_delete: None,
detected_games,
game_search_query: String::new(),
is_applying_game_fix: Arc::new(Mutex::new(false)),
game_fix_status: Arc::new(Mutex::new(String::new())),
game_fix_logs: Arc::new(Mutex::new(Vec::new())),
// Updater
update_info: Arc::new(Mutex::new(None)),
is_checking_update: Arc::new(Mutex::new(false)),
is_installing_update: Arc::new(Mutex::new(false)),
update_error: Arc::new(Mutex::new(None)),
update_installed: Arc::new(Mutex::new(false)),
// DPI test processes
dpi_test_processes: Arc::new(Mutex::new(Vec::new())),
// Prefix manager DPI input
prefix_custom_dpi_input: String::new(),
};
// Auto-fetch on startup (GE Proton)
let is_fetching = app.is_fetching_ge.clone();
let versions = app.available_ge_versions.clone();
thread::spawn(move || {
match fetch_ge_releases() {
Ok(releases) => {
*versions.lock().unwrap() = releases;
}
Err(e) => {
eprintln!("Failed to fetch GE releases: {}", e);
}
}
*is_fetching.lock().unwrap() = false;
});
// Auto-fetch on startup (CachyOS Proton)
let is_fetching_cachyos = app.is_fetching_cachyos.clone();
let cachyos_versions = app.available_cachyos_versions.clone();
thread::spawn(move || {
match fetch_cachyos_releases() {
Ok(releases) => {
*cachyos_versions.lock().unwrap() = releases;
}
Err(e) => {
eprintln!("Failed to fetch CachyOS releases: {}", e);
}
}
*is_fetching_cachyos.lock().unwrap() = false;
});
// Auto-download Steam Runtime only if:
// 1. First-run setup is complete (user has made their choice)
// 2. User has opted to use SLR
// 3. SLR is not already installed
if app.config.first_run_completed
&& app.config.use_steam_runtime
&& !crate::wine::runtime::is_runtime_installed()
{
let status = app.download_status.clone();
let progress = app.download_progress.clone();
let is_downloading = app.is_downloading.clone();
*is_downloading.lock().unwrap() = true;
*status.lock().unwrap() = "Initializing Steam Runtime...".to_string();
thread::spawn(move || {
let cb_status = status.clone();
let cb_progress = progress.clone();
let cb_downloading = is_downloading.clone();
// Create inner clones for the callback
let cb_status_inner = cb_status.clone();
let cb_progress_inner = cb_progress.clone();
let callback = move |current: u64, total: u64| {
if total > 0 {
let p = current as f32 / total as f32;
*cb_progress_inner.lock().unwrap() = p;
*cb_status_inner.lock().unwrap() = format!("Downloading Runtime: {:.1}%", p * 100.0);
}
};
match crate::wine::runtime::download_runtime(callback) {
Ok(_) => {
*cb_status.lock().unwrap() = "Runtime Ready!".to_string();
*cb_progress.lock().unwrap() = 1.0;
}
Err(e) => {
*cb_status.lock().unwrap() = format!("Error downloading runtime: {}", e);
}
}
*cb_downloading.lock().unwrap() = false;
});
}
// Auto-check for updates on startup
let update_info_arc = app.update_info.clone();
let is_checking_arc = app.is_checking_update.clone();
thread::spawn(move || {
match crate::updater::check_for_updates() {
Ok(info) => {
*update_info_arc.lock().unwrap() = Some(info);
}
Err(e) => {
eprintln!("Failed to check for updates: {}", e);
}
}
*is_checking_arc.lock().unwrap() = false;
});
// Ensure Winetricks and cabextract are downloaded
let wt_path = winetricks_path_arc.clone();
let missing_deps_for_thread = missing_deps_arc.clone();
thread::spawn(move || {
// Ensure cabextract (for SteamOS/immutable systems)
match ensure_cabextract() {
Ok(path) => println!("cabextract available at: {:?}", path),
Err(e) => {
eprintln!("Failed to ensure cabextract: {}", e);
// Add to missing deps if download failed
missing_deps_for_thread
.lock()
.unwrap()
.push("cabextract".to_string());
}
}
// Ensure winetricks
match ensure_winetricks() {
Ok(path) => *wt_path.lock().unwrap() = Some(path),
Err(e) => eprintln!("Failed to download winetricks: {}", e),
}
// Ensure NXM Handler
if let Err(e) = NxmHandler::setup() {
eprintln!("Failed to setup NXM handler: {}", e);
}
});
app
}
}
impl MyApp {
pub fn refresh_proton_versions(&mut self) {
let finder = ProtonFinder::new();
self.proton_versions = finder.find_all();
// Check if selected proton is still valid
let mut changed = false;
if let Some(selected) = &self.config.selected_proton {
let exists = self.proton_versions.iter().any(|p| &p.name == selected);
if !exists {
// Default to first available or None
if let Some(first) = self.proton_versions.first() {
self.config.selected_proton = Some(first.name.clone());
} else {
self.config.selected_proton = None;
}
changed = true;
}
} else if let Some(first) = self.proton_versions.first() {
// If nothing was selected but we have versions, select the first one
self.config.selected_proton = Some(first.name.clone());
changed = true;
}
if changed {
self.config.save();
}
// Also refresh prefixes
self.detected_prefixes = self.prefix_manager.scan_prefixes();
}
pub fn refresh_detected_games(&mut self) {
let game_finder = GameFinder::new();
self.detected_games = game_finder.find_all_games();
}
/// Trigger SLR download if not already installed/downloading
pub fn start_slr_download(&self) {
if crate::wine::runtime::is_runtime_installed() {
return; // Already installed
}
if *self.is_downloading.lock().unwrap() {
return; // Already downloading
}
let status = self.download_status.clone();
let progress = self.download_progress.clone();
let is_downloading = self.is_downloading.clone();
*is_downloading.lock().unwrap() = true;
*status.lock().unwrap() = "Starting Steam Runtime download...".to_string();
thread::spawn(move || {
let cb_status = status.clone();
let cb_progress = progress.clone();
let cb_downloading = is_downloading.clone();
let cb_status_inner = cb_status.clone();
let cb_progress_inner = cb_progress.clone();
let callback = move |current: u64, total: u64| {
if total > 0 {
let p = current as f32 / total as f32;
*cb_progress_inner.lock().unwrap() = p;
*cb_status_inner.lock().unwrap() = format!("Downloading Runtime: {:.1}%", p * 100.0);
}
};
match crate::wine::runtime::download_runtime(callback) {
Ok(_) => {
*cb_status.lock().unwrap() = "Runtime Ready!".to_string();
*cb_progress.lock().unwrap() = 1.0;
}
Err(e) => {
*cb_status.lock().unwrap() = format!("Error downloading runtime: {}", e);
}
}
*cb_downloading.lock().unwrap() = false;
});
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/nxm.rs | src/nxm.rs | use std::error::Error;
use std::fs;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use crate::config::AppConfig;
pub struct NxmHandler;
impl NxmHandler {
pub fn setup() -> Result<(), Box<dyn Error>> {
let home = std::env::var("HOME")?;
let config = AppConfig::load();
let nak_dir = config.get_data_path();
let script_path = nak_dir.join("nxm_handler.sh");
let applications_dir = PathBuf::from(format!("{}/.local/share/applications", home));
let desktop_path = applications_dir.join("nak-nxm-handler.desktop");
// Ensure directories exist
fs::create_dir_all(&nak_dir)?;
fs::create_dir_all(&applications_dir)?;
// Clean up old NXM handler variations
let old_handlers = [
"nxm-handler.desktop",
"nak_nxm_handler.desktop",
"NaK-nxm-handler.desktop",
"nak-nxm.desktop",
];
for old in &old_handlers {
let old_path = applications_dir.join(old);
if old_path.exists() {
let _ = fs::remove_file(&old_path);
}
}
// 1. Create the Handler Script
// Uses relative path from script location to find active_nxm_game symlink
// active_nxm_game points to prefix directory (e.g., $DATA_PATH/Prefixes/mo2_xxx)
// which contains manager_link symlink to the install directory
// Supports both MO2 (via nxmhandler.exe) and Vortex mod managers
let script_content = r#"#!/bin/bash
# NaK Global NXM Handler
# Forwards nxm:// links to the active mod manager instance (MO2 or Vortex)
# Derive paths relative to script location (portable after NaK folder moves)
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ACTIVE_LINK="$SCRIPT_DIR/active_nxm_game"
if [ ! -L "$ACTIVE_LINK" ]; then
zenity --error --text="No active mod manager instance selected in NaK!\n\nGo to NaK → Mod Managers and click 'Activate NXM' on your preferred prefix." --title="NaK Error"
exit 1
fi
# Check if symlink target exists (prefix might have been deleted)
if [ ! -e "$ACTIVE_LINK" ]; then
zenity --error --text="Active NXM prefix no longer exists!\n\nThe prefix was likely deleted. Go to NaK → Mod Managers and click 'Activate NXM' on another prefix." --title="NaK Error"
exit 1
fi
# Get prefix directory and install directory via manager_link
PREFIX_DIR=$(readlink -f "$ACTIVE_LINK")
MANAGER_LINK="$PREFIX_DIR/manager_link"
if [ ! -L "$MANAGER_LINK" ]; then
zenity --error --text="manager_link not found in prefix.\n\nPrefix: $PREFIX_DIR\n\nTry clicking 'Regenerate Scripts' in NaK for this prefix." --title="NaK Error"
exit 1
fi
# Check if manager_link target exists (mod manager folder might have been deleted/moved)
if [ ! -e "$MANAGER_LINK" ]; then
BROKEN_TARGET=$(readlink "$MANAGER_LINK")
zenity --error --text="Mod manager folder no longer exists!\n\nExpected location: $BROKEN_TARGET\n\nEither restore the folder or delete this prefix in NaK and set up a new one." --title="NaK Error"
exit 1
fi
INSTALL_DIR=$(readlink -f "$MANAGER_LINK")
# Detect mod manager type and find appropriate launcher
if [ -f "$INSTALL_DIR/nxmhandler.exe" ]; then
# MO2 installation - use Handle NXM symlink or fall back to Launch MO2
if [ -f "$INSTALL_DIR/Handle NXM" ]; then
LAUNCHER="$INSTALL_DIR/Handle NXM"
elif [ -f "$INSTALL_DIR/Launch MO2" ]; then
LAUNCHER="$INSTALL_DIR/Launch MO2"
else
zenity --error --text="MO2 detected but no launch script found.\n\nLocation: $INSTALL_DIR\n\nTry clicking 'Regenerate Scripts' in NaK." --title="NaK Error"
exit 1
fi
elif [ -f "$INSTALL_DIR/Vortex.exe" ]; then
# Vortex installation - use Handle NXM symlink or fall back to Launch Vortex
if [ -f "$INSTALL_DIR/Handle NXM" ]; then
LAUNCHER="$INSTALL_DIR/Handle NXM"
elif [ -f "$INSTALL_DIR/Launch Vortex" ]; then
LAUNCHER="$INSTALL_DIR/Launch Vortex"
else
zenity --error --text="Vortex detected but no launch script found.\n\nLocation: $INSTALL_DIR\n\nTry clicking 'Regenerate Scripts' in NaK." --title="NaK Error"
exit 1
fi
else
zenity --error --text="Could not detect mod manager type.\n\nLocation: $INSTALL_DIR\n\nExpected to find nxmhandler.exe (MO2) or Vortex.exe" --title="NaK Error"
exit 1
fi
# Run the launcher with the NXM link
"$LAUNCHER" "$1"
"#;
let mut file = fs::File::create(&script_path)?;
file.write_all(script_content.as_bytes())?;
let mut perms = fs::metadata(&script_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&script_path, perms)?;
// 2. Create Desktop Entry
let desktop_content = format!(
r#"[Desktop Entry]
Type=Application
Name=NaK NXM Handler
Comment=Handle Nexus Mods links via NaK
Exec="{}" %u
Icon=utilities-terminal
Terminal=false
Categories=Game;Utility;
MimeType=x-scheme-handler/nxm;
"#,
script_path.to_string_lossy()
);
let mut dfile = fs::File::create(&desktop_path)?;
dfile.write_all(desktop_content.as_bytes())?;
// 3. Register Mime Type (xdg-mime)
std::process::Command::new("xdg-mime")
.arg("default")
.arg("nak-nxm-handler.desktop")
.arg("x-scheme-handler/nxm")
.spawn()?;
println!("NXM Handler registered.");
Ok(())
}
/// Set active NXM instance - takes prefix base path (e.g., $DATA_PATH/Prefixes/mo2_xxx)
pub fn set_active_instance(prefix_base: &Path) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
let link_path = config.get_data_path().join("active_nxm_game");
if link_path.exists() || fs::symlink_metadata(&link_path).is_ok() {
let _ = fs::remove_file(&link_path);
}
std::os::unix::fs::symlink(prefix_base, &link_path)?;
println!("Set active NXM instance to {:?}", prefix_base);
Ok(())
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/games.rs | src/games.rs | //! Game Finder - Detects Steam and Heroic games and their Wine prefixes
//!
//! This module scans for installed games and allows applying fixes
//! (dependencies and registry settings) to their Wine prefixes.
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use crate::config::AppConfig;
use crate::logging::{log_info, log_warning};
// ============================================================================
// Types
// ============================================================================
#[derive(Clone, Debug)]
pub enum GameSource {
Steam { app_id: String },
Heroic { store: String }, // "gog", "legendary" (Epic), "nile" (Amazon)
}
#[derive(Clone, Debug)]
pub struct DetectedGame {
pub name: String,
pub source: GameSource,
pub install_path: PathBuf,
pub prefix_path: Option<PathBuf>,
pub has_prefix: bool,
}
// ============================================================================
// Game Finder
// ============================================================================
pub struct GameFinder {
steam_path: Option<PathBuf>,
heroic_config_path: Option<PathBuf>,
}
impl Default for GameFinder {
fn default() -> Self {
Self::new()
}
}
impl GameFinder {
#[must_use]
pub fn new() -> Self {
let home = std::env::var("HOME").unwrap_or_default();
// Find Steam path
let steam_paths = [
format!("{}/.steam/steam", home),
format!("{}/.local/share/Steam", home),
format!("{}/.var/app/com.valvesoftware.Steam/.steam/steam", home),
format!("{}/snap/steam/common/.steam/steam", home),
];
let steam_path = steam_paths
.iter()
.map(PathBuf::from)
.find(|p| p.exists());
// Find Heroic config path
let heroic_paths = [
format!("{}/.config/heroic", home),
format!("{}/.var/app/com.heroicgameslauncher.hgl/config/heroic", home),
];
let heroic_config_path = heroic_paths
.iter()
.map(PathBuf::from)
.find(|p| p.exists());
Self {
steam_path,
heroic_config_path,
}
}
/// Find all detectable games from Steam and Heroic
pub fn find_all_games(&self) -> Vec<DetectedGame> {
let mut games = Vec::new();
let mut seen_ids: std::collections::HashSet<String> = std::collections::HashSet::new();
// Find Steam games
if let Some(ref steam_path) = self.steam_path {
for game in self.find_steam_games(steam_path) {
// Deduplicate by app_id (same game can appear in multiple library folders)
let id = match &game.source {
GameSource::Steam { app_id } => app_id.clone(),
GameSource::Heroic { store } => format!("heroic_{}_{}", store, game.name),
};
if seen_ids.insert(id) {
games.push(game);
}
}
}
// Find Heroic games
if let Some(ref heroic_path) = self.heroic_config_path {
for game in self.find_heroic_games(heroic_path) {
let id = match &game.source {
GameSource::Steam { app_id } => app_id.clone(),
GameSource::Heroic { store } => format!("heroic_{}_{}", store, game.name),
};
if seen_ids.insert(id) {
games.push(game);
}
}
}
// Sort by name
games.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
games
}
/// Find Steam games by scanning steamapps
fn find_steam_games(&self, steam_path: &Path) -> Vec<DetectedGame> {
let mut games = Vec::new();
// Get all library folders
let library_folders = self.get_steam_library_folders(steam_path);
for library_path in library_folders {
let steamapps = library_path.join("steamapps");
let common = steamapps.join("common");
let compatdata = steamapps.join("compatdata");
if !common.exists() {
continue;
}
// Read app manifests to get app IDs and names
if let Ok(entries) = fs::read_dir(&steamapps) {
for entry in entries.flatten() {
let path = entry.path();
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.starts_with("appmanifest_") && name.ends_with(".acf") {
if let Some(game) = self.parse_steam_manifest(&path, &common, &compatdata) {
games.push(game);
}
}
}
}
}
}
log_info(&format!("Found {} Steam games", games.len()));
games
}
/// Get all Steam library folders from libraryfolders.vdf
fn get_steam_library_folders(&self, steam_path: &Path) -> Vec<PathBuf> {
let mut folders = vec![steam_path.to_path_buf()];
let vdf_path = steam_path.join("steamapps/libraryfolders.vdf");
if let Ok(content) = fs::read_to_string(&vdf_path) {
// Simple VDF parsing - look for "path" entries
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with("\"path\"") {
// Extract path value: "path" "/path/to/library"
if let Some(path_str) = trimmed.split('"').nth(3) {
let path = PathBuf::from(path_str);
if path.exists() && !folders.contains(&path) {
folders.push(path);
}
}
}
}
}
folders
}
/// Parse a Steam app manifest file
fn parse_steam_manifest(
&self,
manifest_path: &Path,
common_path: &Path,
compatdata_path: &Path,
) -> Option<DetectedGame> {
let content = fs::read_to_string(manifest_path).ok()?;
let mut app_id = None;
let mut name = None;
let mut install_dir = None;
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with("\"appid\"") {
app_id = trimmed.split('"').nth(3).map(String::from);
} else if trimmed.starts_with("\"name\"") {
name = trimmed.split('"').nth(3).map(String::from);
} else if trimmed.starts_with("\"installdir\"") {
install_dir = trimmed.split('"').nth(3).map(String::from);
}
}
let app_id = app_id?;
let name = name?;
let install_dir = install_dir?;
// Skip Proton and Steam tools
let skip_prefixes = ["Proton", "Steam Linux Runtime", "Steamworks"];
if skip_prefixes.iter().any(|p| name.starts_with(p)) {
return None;
}
let install_path = common_path.join(&install_dir);
if !install_path.exists() {
return None;
}
// Check for Wine prefix
let prefix_path = compatdata_path.join(&app_id).join("pfx");
let has_prefix = prefix_path.exists();
Some(DetectedGame {
name,
source: GameSource::Steam { app_id },
install_path,
prefix_path: if has_prefix { Some(prefix_path) } else { None },
has_prefix,
})
}
/// Find Heroic games (GOG, Epic, Amazon)
fn find_heroic_games(&self, heroic_path: &Path) -> Vec<DetectedGame> {
let mut games = Vec::new();
// Check GOG games
games.extend(self.find_heroic_gog_games(heroic_path));
// Check Epic/Legendary games
games.extend(self.find_heroic_legendary_games(heroic_path));
log_info(&format!("Found {} Heroic games", games.len()));
games
}
/// Find GOG games from Heroic
fn find_heroic_gog_games(&self, heroic_path: &Path) -> Vec<DetectedGame> {
let mut games = Vec::new();
let gog_installed = heroic_path.join("gog_store/installed.json");
if let Ok(content) = fs::read_to_string(&gog_installed) {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) {
if let Some(installed) = json.get("installed").and_then(|v| v.as_array()) {
for game in installed {
if let Some(game_info) = self.parse_heroic_game(game, heroic_path, "gog") {
games.push(game_info);
}
}
}
}
}
games
}
/// Find Epic/Legendary games from Heroic
fn find_heroic_legendary_games(&self, heroic_path: &Path) -> Vec<DetectedGame> {
let mut games = Vec::new();
let legendary_installed = heroic_path.join("legendaryConfig/legendary/installed.json");
if let Ok(content) = fs::read_to_string(&legendary_installed) {
if let Ok(json) = serde_json::from_str::<HashMap<String, serde_json::Value>>(&content) {
for (_app_name, game) in json {
if let Some(game_info) = self.parse_heroic_game(&game, heroic_path, "legendary") {
games.push(game_info);
}
}
}
}
games
}
/// Parse a Heroic game entry
fn parse_heroic_game(
&self,
game: &serde_json::Value,
heroic_path: &Path,
store: &str,
) -> Option<DetectedGame> {
let title = game.get("title")
.or_else(|| game.get("name"))
.and_then(|v| v.as_str())?;
let install_path_str = game.get("install_path")
.or_else(|| game.get("install_dir"))
.and_then(|v| v.as_str())?;
let install_path = PathBuf::from(install_path_str);
if !install_path.exists() {
return None;
}
// Find Wine prefix - Heroic stores them in various locations
let app_name = game.get("appName")
.or_else(|| game.get("app_name"))
.and_then(|v| v.as_str())
.unwrap_or(title);
// Check common prefix locations
let prefix_locations = [
heroic_path.join(format!("Prefixes/{}", app_name)),
heroic_path.join(format!("prefixes/{}", app_name)),
heroic_path.join(format!("GamesConfig/{}/pfx", app_name)),
];
let prefix_path = prefix_locations.into_iter().find(|p| p.exists());
let has_prefix = prefix_path.is_some();
Some(DetectedGame {
name: title.to_string(),
source: GameSource::Heroic { store: store.to_string() },
install_path,
prefix_path,
has_prefix,
})
}
}
// ============================================================================
// Game Fixer - Apply fixes to game prefixes
// ============================================================================
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use crate::wine::{DependencyManager, ProtonInfo};
use crate::installers::WINE_SETTINGS_REG;
pub struct GameFixer;
impl GameFixer {
/// Apply fixes to a game's Wine prefix
pub fn apply_fixes(
game: &DetectedGame,
proton: &ProtonInfo,
winetricks_path: &Path,
deps_to_install: &[&str],
apply_registry: bool,
log_callback: impl Fn(String) + Send + Sync + 'static,
cancel_flag: Arc<AtomicBool>,
) -> Result<(), Box<dyn std::error::Error>> {
let prefix_path = game.prefix_path.as_ref()
.ok_or("Game has no Wine prefix")?;
log_callback(format!("Applying fixes to: {}", game.name));
log_callback(format!("Prefix: {}", prefix_path.display()));
// Install dependencies
if !deps_to_install.is_empty() {
log_callback("Installing dependencies...".to_string());
let dep_mgr = DependencyManager::new(winetricks_path.to_path_buf());
for (i, dep) in deps_to_install.iter().enumerate() {
if cancel_flag.load(std::sync::atomic::Ordering::Relaxed) {
return Err("Cancelled".into());
}
log_callback(format!("Installing {}/{}: {}...", i + 1, deps_to_install.len(), dep));
let log_cb = |msg: String| {
// Inner callback - we can't easily forward here without more complexity
println!("{}", msg);
};
if let Err(e) = dep_mgr.install_dependencies(
prefix_path,
proton,
&[dep],
log_cb,
cancel_flag.clone(),
) {
log_callback(format!("Warning: Failed to install {}: {}", dep, e));
log_warning(&format!("Failed to install {} for {}: {}", dep, game.name, e));
}
}
}
// Apply registry settings
if apply_registry {
log_callback("Applying registry settings...".to_string());
Self::apply_registry_fixes(prefix_path, proton, &log_callback)?;
}
log_callback(format!("Fixes applied to {}!", game.name));
Ok(())
}
/// Apply Wine registry fixes to a prefix
fn apply_registry_fixes(
prefix_path: &Path,
proton: &ProtonInfo,
log_callback: &impl Fn(String),
) -> Result<(), Box<dyn std::error::Error>> {
use std::io::Write;
let config = AppConfig::load();
let tmp_dir = config.get_data_path().join("tmp");
fs::create_dir_all(&tmp_dir)?;
let reg_file = tmp_dir.join("game_fix_settings.reg");
let mut file = fs::File::create(®_file)?;
file.write_all(WINE_SETTINGS_REG.as_bytes())?;
// Get wine binary path
let wine_bin = proton.path.join("files/bin/wine");
if !wine_bin.exists() {
log_callback(format!("Warning: Wine binary not found at {:?}", wine_bin));
return Ok(());
}
log_callback("Running wine regedit...".to_string());
let status = std::process::Command::new(&wine_bin)
.arg("regedit")
.arg(®_file)
.env("WINEPREFIX", prefix_path)
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.env("PROTON_NO_XALIA", "1")
.status();
match status {
Ok(s) if s.success() => {
log_callback("Registry settings applied successfully".to_string());
}
Ok(s) => {
log_callback(format!("Warning: regedit exited with code {:?}", s.code()));
}
Err(e) => {
log_callback(format!("Warning: Failed to run regedit: {}", e));
}
}
// Cleanup
let _ = fs::remove_file(®_file);
Ok(())
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/scripts.rs | src/scripts.rs | //! Launch script generation for mod managers
//!
//! This module provides unified script generation for launching mod managers
//! and handling NXM links. Scripts are generated with optional SLR (Steam Linux Runtime)
//! container support.
use std::error::Error;
use std::fs;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use crate::config::AppConfig;
use crate::utils::detect_steam_path;
use crate::wine::runtime;
// ============================================================================
// Path Setup Constants (for portable scripts)
// ============================================================================
/// Path setup block for scripts using Steam Linux Runtime
const PATH_SETUP_SLR: &str = r#"# Derive paths from script location (portable after NaK moves)
SCRIPT_DIR="$(cd "$(dirname "$(realpath "$0")")" && pwd)"
PREFIX="$SCRIPT_DIR/pfx"
COMPAT_DATA="$SCRIPT_DIR"
PROTON_GE="$SCRIPT_DIR/../../ProtonGE/active"
ENTRY_POINT="$SCRIPT_DIR/../../Runtime/SteamLinuxRuntime_sniper/_v2-entry-point"
"#;
/// Path setup block for scripts running directly (no SLR)
const PATH_SETUP_DIRECT: &str = r#"# Derive paths from script location (portable after NaK moves)
SCRIPT_DIR="$(cd "$(dirname "$(realpath "$0")")" && pwd)"
PREFIX="$SCRIPT_DIR/pfx"
COMPAT_DATA="$SCRIPT_DIR"
PROTON_GE="$SCRIPT_DIR/../../ProtonGE/active"
"#;
// ============================================================================
// Script Generator
// ============================================================================
pub struct ScriptGenerator;
impl ScriptGenerator {
/// Check if we should use Steam Linux Runtime based on config
fn should_use_slr() -> bool {
AppConfig::load().use_steam_runtime
}
/// Detect if an existing script uses SLR by reading its content
pub fn script_uses_slr(script_path: &Path) -> Option<bool> {
let content = fs::read_to_string(script_path).ok()?;
// Check for SLR-specific markers
if content.contains("ENTRY_POINT=") && content.contains("--verb=waitforexitandrun") {
Some(true)
} else if content.contains("# Running WITHOUT Steam Linux Runtime") {
Some(false)
} else if content.contains("PROTON_GE=") {
// Has proton but no entry point = direct mode
Some(false)
} else {
None
}
}
/// Get common environment variables block for scripts
fn get_env_block(use_slr: bool) -> String {
if use_slr {
r#"# Set environment variables for the Container
export WINEPREFIX="$PREFIX"
export STEAM_COMPAT_DATA_PATH="$COMPAT_DATA"
export STEAM_COMPAT_CLIENT_INSTALL_PATH="$STEAM_PATH"
export PROTON_DIST_PATH="$PROTON_GE"
# Set GAMEID for protonfixes
export GAMEID="non-steam-game"
# DotNet Fixes
export DOTNET_ROOT=""
export DOTNET_MULTILEVEL_LOOKUP=0
export MO2_VFS_LOG_LEVEL=0
# DXVK Config - Disable Graphics Pipeline Library for compatibility
export DXVK_CONFIG="dxvk.enableGraphicsPipelineLibrary = False""#.to_string()
} else {
r#"# Set environment variables
export WINEPREFIX="$PREFIX"
export STEAM_COMPAT_DATA_PATH="$COMPAT_DATA"
export STEAM_COMPAT_CLIENT_INSTALL_PATH="$STEAM_PATH"
# Set GAMEID for protonfixes
export GAMEID="non-steam-game"
# DotNet Fixes
export DOTNET_ROOT=""
export DOTNET_MULTILEVEL_LOOKUP=0
export MO2_VFS_LOG_LEVEL=0
# DXVK Config - Disable Graphics Pipeline Library for compatibility
export DXVK_CONFIG="dxvk.enableGraphicsPipelineLibrary = False""#.to_string()
}
}
/// Get the entry point check block for SLR mode
fn get_slr_check_block() -> &'static str {
r#"
# Check environment
if [ ! -f "$ENTRY_POINT" ]; then
echo "ERROR: Steam Runtime entry point not found at $ENTRY_POINT"
exit 1
fi
"#
}
/// Get the auto game registry fix block for MO2 portable mode
/// NOTE: Disabled - auto registry fix was removed as it caused issues for users
/// with malformed registries or different setups. Users can still run the
/// standalone game_registry_fix.sh script manually if needed.
fn get_mo2_registry_fix_block(_use_slr: bool) -> String {
String::new()
}
/// Generate a launch command based on SLR setting
fn get_launch_command(use_slr: bool, exe_var: &str, extra_args: &str) -> String {
if use_slr {
format!(
r#""$ENTRY_POINT" --verb=waitforexitandrun -- "$PROTON_GE/proton" run "${}" {}"#,
exe_var, extra_args
)
} else {
format!(r#""$PROTON_GE/proton" run "${}" {}"#, exe_var, extra_args)
}
}
/// Generate a manager launch script (MO2 or Vortex)
/// If `use_slr_override` is Some, use that value instead of the global config
/// Scripts use relative paths from their location so they work after NaK is moved
fn generate_launch_script_with_override(
_prefix_path: &Path,
exe: &Path,
_proton_path: &Path, // No longer used - we use the active symlink
output_dir: &Path,
manager_name: &str,
use_slr_override: Option<bool>,
) -> Result<PathBuf, Box<dyn Error>> {
let steam_path = detect_steam_path();
let use_slr = use_slr_override.unwrap_or_else(Self::should_use_slr);
let runtime_entry = runtime::get_entry_point();
let exe_var_name = if manager_name == "MO2" { "MO2_EXE" } else { "VORTEX_EXE" };
let comment_name = if manager_name == "MO2" { "MO2" } else { "Vortex" };
// Only include auto registry fix for MO2 (portable mode)
let registry_fix_block = if manager_name == "MO2" {
Self::get_mo2_registry_fix_block(use_slr)
} else {
String::new()
};
let script_content = if use_slr {
// Just verify SLR exists during generation (the script uses relative path)
let _entry_point = runtime_entry.ok_or(
"Steam Linux Runtime (Sniper) not found! Please download it in Proton Picker or disable it in Settings."
)?;
format!(
r#"#!/bin/bash
# NaK Generated Launch Script for {comment_name}
# Running inside Steam Linux Runtime (Sniper) Container
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
{exe_var_name}='{exe}'
STEAM_PATH='{steam_path}'
{slr_check}
{env_block}
{registry_fix}
echo "Launching {comment_name} (Containerized)..."
{launch_cmd}
"#,
comment_name = comment_name,
path_setup = PATH_SETUP_SLR,
exe_var_name = exe_var_name,
exe = exe.to_string_lossy(),
steam_path = steam_path,
slr_check = Self::get_slr_check_block(),
env_block = Self::get_env_block(true),
registry_fix = registry_fix_block,
launch_cmd = Self::get_launch_command(true, exe_var_name, r#""$@""#)
)
} else {
format!(
r#"#!/bin/bash
# NaK Generated Launch Script for {comment_name}
# Running WITHOUT Steam Linux Runtime (Direct Proton)
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
{exe_var_name}='{exe}'
STEAM_PATH='{steam_path}'
{env_block}
{registry_fix}
echo "Launching {comment_name} (Direct Proton)..."
{launch_cmd}
"#,
comment_name = comment_name,
path_setup = PATH_SETUP_DIRECT,
exe_var_name = exe_var_name,
exe = exe.to_string_lossy(),
steam_path = steam_path,
env_block = Self::get_env_block(false),
registry_fix = registry_fix_block,
launch_cmd = Self::get_launch_command(false, exe_var_name, r#""$@""#)
)
};
Self::write_script(output_dir, "start.sh", &script_content)
}
/// Write a script file with proper permissions
fn write_script(output_dir: &Path, filename: &str, content: &str) -> Result<PathBuf, Box<dyn Error>> {
let script_path = output_dir.join(filename);
let mut file = fs::File::create(&script_path)?;
file.write_all(content.as_bytes())?;
let mut perms = fs::metadata(&script_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&script_path, perms)?;
Ok(script_path)
}
// ========================================================================
// Public API - Maintains backward compatibility
// ========================================================================
/// Generate NXM handler launch script for MO2
/// Uses relative paths so it works after NaK is moved
pub fn generate_mo2_nxm_script(
_prefix_path: &Path,
nxm_handler_exe: &Path,
_proton_ge_path: &Path, // No longer used - we use the active symlink
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
let steam_path = detect_steam_path();
let use_slr = Self::should_use_slr();
let runtime_entry = runtime::get_entry_point();
let script_content = if use_slr {
let _entry_point = runtime_entry.ok_or(
"Steam Linux Runtime (Sniper) not found! Please download it in Proton Picker or disable it in Settings."
)?;
format!(
r#"#!/bin/bash
# NaK Generated NXM Handler Script for MO2
# Running inside Steam Linux Runtime (Sniper) Container
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
NXM_HANDLER='{exe}'
STEAM_PATH='{steam_path}'
{slr_check}
{env_block}
echo "Handling NXM link via MO2 nxmhandler..."
{launch_cmd}
"#,
path_setup = PATH_SETUP_SLR,
exe = nxm_handler_exe.to_string_lossy(),
steam_path = steam_path,
slr_check = Self::get_slr_check_block(),
env_block = Self::get_env_block(true),
launch_cmd = Self::get_launch_command(true, "NXM_HANDLER", r#""$@""#)
)
} else {
format!(
r#"#!/bin/bash
# NaK Generated NXM Handler Script for MO2
# Running WITHOUT Steam Linux Runtime (Direct Proton)
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
NXM_HANDLER='{exe}'
STEAM_PATH='{steam_path}'
{env_block}
echo "Handling NXM link via MO2 nxmhandler (Direct Proton)..."
{launch_cmd}
"#,
path_setup = PATH_SETUP_DIRECT,
exe = nxm_handler_exe.to_string_lossy(),
steam_path = steam_path,
env_block = Self::get_env_block(false),
launch_cmd = Self::get_launch_command(false, "NXM_HANDLER", r#""$@""#)
)
};
Self::write_script(script_output_dir, "nxm_handler.sh", &script_content)
}
/// Generate NXM handler launch script for Vortex
/// Vortex.exe itself handles NXM links when passed as an argument
/// Uses relative paths so it works after NaK is moved
pub fn generate_vortex_nxm_script(
_prefix_path: &Path,
vortex_exe: &Path,
_proton_ge_path: &Path, // No longer used - we use the active symlink
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
let steam_path = detect_steam_path();
let use_slr = Self::should_use_slr();
let runtime_entry = runtime::get_entry_point();
let script_content = if use_slr {
let _entry_point = runtime_entry.ok_or(
"Steam Linux Runtime (Sniper) not found! Please download it in Proton Picker or disable it in Settings."
)?;
format!(
r#"#!/bin/bash
# NaK Generated NXM Handler Script for Vortex
# Running inside Steam Linux Runtime (Sniper) Container
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
VORTEX_EXE='{exe}'
STEAM_PATH='{steam_path}'
{slr_check}
{env_block}
echo "Handling NXM link via Vortex..."
{launch_cmd}
"#,
path_setup = PATH_SETUP_SLR,
exe = vortex_exe.to_string_lossy(),
steam_path = steam_path,
slr_check = Self::get_slr_check_block(),
env_block = Self::get_env_block(true),
launch_cmd = Self::get_launch_command(true, "VORTEX_EXE", r#"-d "$@""#)
)
} else {
format!(
r#"#!/bin/bash
# NaK Generated NXM Handler Script for Vortex
# Running WITHOUT Steam Linux Runtime (Direct Proton)
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{path_setup}
VORTEX_EXE='{exe}'
STEAM_PATH='{steam_path}'
{env_block}
echo "Handling NXM link via Vortex (Direct Proton)..."
{launch_cmd}
"#,
path_setup = PATH_SETUP_DIRECT,
exe = vortex_exe.to_string_lossy(),
steam_path = steam_path,
env_block = Self::get_env_block(false),
launch_cmd = Self::get_launch_command(false, "VORTEX_EXE", r#"-d "$@""#)
)
};
Self::write_script(script_output_dir, "nxm_handler.sh", &script_content)
}
/// Generate launch script for MO2
pub fn generate_mo2_launch_script(
prefix_path: &Path,
mo2_exe: &Path,
proton_ge_path: &Path,
_install_dir: &Path,
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
Self::generate_launch_script_with_override(prefix_path, mo2_exe, proton_ge_path, script_output_dir, "MO2", None)
}
/// Generate launch script for MO2 with explicit SLR setting
pub fn generate_mo2_launch_script_with_slr(
prefix_path: &Path,
mo2_exe: &Path,
proton_ge_path: &Path,
_install_dir: &Path,
script_output_dir: &Path,
use_slr: bool,
) -> Result<PathBuf, Box<dyn Error>> {
Self::generate_launch_script_with_override(prefix_path, mo2_exe, proton_ge_path, script_output_dir, "MO2", Some(use_slr))
}
/// Generate launch script for Vortex
pub fn generate_vortex_launch_script(
prefix_path: &Path,
vortex_exe: &Path,
proton_ge_path: &Path,
_install_dir: &Path,
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
Self::generate_launch_script_with_override(prefix_path, vortex_exe, proton_ge_path, script_output_dir, "Vortex", None)
}
/// Generate launch script for Vortex with explicit SLR setting
pub fn generate_vortex_launch_script_with_slr(
prefix_path: &Path,
vortex_exe: &Path,
proton_ge_path: &Path,
_install_dir: &Path,
script_output_dir: &Path,
use_slr: bool,
) -> Result<PathBuf, Box<dyn Error>> {
Self::generate_launch_script_with_override(prefix_path, vortex_exe, proton_ge_path, script_output_dir, "Vortex", Some(use_slr))
}
/// Generate kill prefix script
/// Uses relative paths so it works after NaK is moved
pub fn generate_kill_prefix_script(
_prefix_path: &Path,
_proton_ge_path: &Path, // No longer used - we use the active symlink
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
let script_content = r#"#!/bin/bash
# NaK Kill Prefix Script
# Uses active Proton symlink - change Proton in NaK's Proton Picker
# Derive paths from script location (portable after NaK moves)
SCRIPT_DIR="$(cd "$(dirname "$(realpath "$0")")" && pwd)"
PREFIX="$SCRIPT_DIR/pfx"
PROTON_GE="$SCRIPT_DIR/../../ProtonGE/active"
WINESERVER="$PROTON_GE/files/bin/wineserver"
echo "Killing Wine processes for prefix: $PREFIX"
export WINEPREFIX="$PREFIX"
if [ -f "$WINESERVER" ]; then
"$WINESERVER" -k
else
wineserver -k
fi
echo "Done."
"#;
Self::write_script(script_output_dir, ".kill_prefix.sh", script_content)
}
/// Get terminal auto-launch block for interactive scripts
fn get_terminal_relaunch_block() -> &'static str {
r#"
# Auto-launch in terminal if double-clicked from file manager
if [ ! -t 0 ]; then
# Not running in a terminal, try to open one
for term in konsole gnome-terminal xfce4-terminal kitty alacritty xterm; do
if command -v "$term" &> /dev/null; then
case "$term" in
konsole)
exec "$term" --hold -e "$0" "$@"
;;
gnome-terminal)
exec "$term" -- "$0" "$@"
;;
xfce4-terminal)
exec "$term" --hold -e "$0" "$@"
;;
kitty)
exec "$term" --hold "$0" "$@"
;;
alacritty)
exec "$term" --hold -e "$0" "$@"
;;
xterm)
exec "$term" -hold -e "$0" "$@"
;;
esac
fi
done
echo "ERROR: No terminal emulator found. Please run this script from a terminal."
exit 1
fi
"#
}
/// Generate game registry fix script
/// Uses relative paths so it works after NaK is moved
pub fn generate_fix_game_registry_script(
_prefix_path: &Path,
_proton_ge_path: &Path, // No longer used - we use the active symlink
instance_name: &str,
script_output_dir: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
let steam_path = detect_steam_path();
let use_slr = Self::should_use_slr();
let runtime_entry = runtime::get_entry_point();
let terminal_block = Self::get_terminal_relaunch_block();
let header = if use_slr {
let _entry_point = runtime_entry.ok_or(
"Steam Linux Runtime (Sniper) not found! Please download it in Proton Picker or disable it in Settings."
)?;
format!(
r#"#!/bin/bash
# NaK Game Registry Fixer
# Instance: {instance_name}
# Mode: Steam Linux Runtime (Containerized)
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{terminal_block}
{path_setup}
STEAM_PATH='{steam_path}'
USE_SLR=1
echo "=================================================="
echo "NaK Game Registry Fixer"
echo "Instance: {instance_name}"
echo "=================================================="
echo ""
{slr_check}
{env_block}
# Function to run proton command
run_proton() {{
"$ENTRY_POINT" --verb=waitforexitandrun -- "$PROTON_GE/proton" run "$@"
}}
"#,
instance_name = instance_name,
terminal_block = terminal_block,
path_setup = PATH_SETUP_SLR,
steam_path = steam_path,
slr_check = Self::get_slr_check_block(),
env_block = Self::get_env_block(true)
)
} else {
format!(
r#"#!/bin/bash
# NaK Game Registry Fixer
# Instance: {instance_name}
# Mode: Direct Proton (No Steam Linux Runtime)
# Uses active Proton symlink - change Proton in NaK's Proton Picker
{terminal_block}
{path_setup}
STEAM_PATH='{steam_path}'
USE_SLR=0
echo "=================================================="
echo "NaK Game Registry Fixer"
echo "Instance: {instance_name}"
echo "=================================================="
echo ""
{env_block}
# Function to run proton command
run_proton() {{
"$PROTON_GE/proton" run "$@"
}}
"#,
instance_name = instance_name,
terminal_block = terminal_block,
path_setup = PATH_SETUP_DIRECT,
steam_path = steam_path,
env_block = Self::get_env_block(false)
)
};
let full_script = format!("{}{}", header, REGISTRY_FIX_BODY);
Self::write_script(script_output_dir, "game_registry_fix.sh", &full_script)
}
}
// ============================================================================
// Registry Fix Script Body (shared)
// ============================================================================
const REGISTRY_FIX_BODY: &str = r#"
# Game registry configurations
# Format: "Game Name|Registry Path|Value Name"
declare -a GAMES=(
"Enderal|Software\SureAI\Enderal|Install_Path"
"Enderal Special Edition|Software\SureAI\Enderal SE|installed path"
"Fallout 3|Software\Bethesda Softworks\Fallout3|Installed Path"
"Fallout 4|Software\Bethesda Softworks\Fallout4|Installed Path"
"Fallout 4 VR|Software\Bethesda Softworks\Fallout 4 VR|Installed Path"
"Fallout New Vegas|Software\Bethesda Softworks\FalloutNV|Installed Path"
"Morrowind|Software\Bethesda Softworks\Morrowind|Installed Path"
"Oblivion|Software\Bethesda Softworks\Oblivion|Installed Path"
"Skyrim|Software\Bethesda Softworks\Skyrim|Installed Path"
"Skyrim Special Edition|Software\Bethesda Softworks\Skyrim Special Edition|Installed Path"
"Skyrim VR|Software\Bethesda Softworks\Skyrim VR|Installed Path"
"Starfield|Software\Bethesda Softworks\Starfield|Installed Path"
)
echo "Which Bethesda game are you modding?"
echo ""
for i in "${!GAMES[@]}"; do
game_name=$(echo "${GAMES[$i]}" | cut -d'|' -f1)
echo " $((i+1)). $game_name"
done
echo ""
read -p "Enter number (1-${#GAMES[@]}): " choice
# Validate input
if ! [[ "$choice" =~ ^[0-9]+$ ]] || [ "$choice" -lt 1 ] || [ "$choice" -gt "${#GAMES[@]}" ]; then
echo "ERROR: Invalid selection"
exit 1
fi
# Get selected game info
selected_game="${GAMES[$((choice-1))]}"
GAME_NAME=$(echo "$selected_game" | cut -d'|' -f1)
REG_PATH=$(echo "$selected_game" | cut -d'|' -f2)
VALUE_NAME=$(echo "$selected_game" | cut -d'|' -f3)
echo ""
echo "Selected: $GAME_NAME"
echo ""
# Ask for game installation path (with retry loop)
while true; do
echo "Where is $GAME_NAME installed?"
echo "Enter the LINUX path (e.g., /home/user/.steam/steam/steamapps/common/Skyrim Special Edition)"
echo ""
read -r -p "Game path: " GAME_PATH_LINUX
# Validate path exists
if [ -d "$GAME_PATH_LINUX" ]; then
break
else
echo ""
echo "WARNING: Directory does not exist: $GAME_PATH_LINUX"
read -r -p "Try again? (y/n): " retry_choice
if [ "$retry_choice" != "y" ] && [ "$retry_choice" != "Y" ]; then
echo "Cancelled."
exit 1
fi
echo ""
fi
done
# Convert Linux path to Wine path (Z:\...)
# Replace / with \
WINE_PATH="Z:${GAME_PATH_LINUX//\//\\}"
echo ""
echo "=================================================="
echo "Registry Fix Details"
echo "=================================================="
echo "Game: $GAME_NAME"
echo "Linux Path: $GAME_PATH_LINUX"
echo "Wine Path: $WINE_PATH"
echo "Registry Key: HKLM\\${REG_PATH}"
echo "Value Name: $VALUE_NAME"
echo "=================================================="
echo ""
read -p "Apply this registry fix? (y/n): " confirm
if [ "$confirm" != "y" ] && [ "$confirm" != "Y" ]; then
echo "Cancelled."
exit 1
fi
echo ""
echo "Applying registry fix..."
echo ""
# Function to set registry value
set_registry() {
local reg_key="$1"
local reg_flag="$2"
echo "Setting: $reg_key ($reg_flag)"
run_proton reg add "HKLM\\$reg_key" /v "$VALUE_NAME" /d "$WINE_PATH" /f $reg_flag
if [ $? -eq 0 ]; then
echo " ✓ Success"
return 0
else
echo " ✗ Failed"
return 1
fi
}
# Apply registry fix to both 32-bit and 64-bit views
success_count=0
# 32-bit registry view
set_registry "$REG_PATH" "/reg:32"
[ $? -eq 0 ] && ((success_count++))
# 64-bit registry view (Wow6432Node)
# Use ! as sed delimiter to handle backslashes safely
WOW64_PATH=$(echo "$REG_PATH" | sed 's!^Software\\!SOFTWARE\\Wow6432Node\\!')
set_registry "$WOW64_PATH" "/reg:64"
[ $? -eq 0 ] && ((success_count++))
echo ""
echo "=================================================="
if [ $success_count -eq 2 ]; then
echo "✓ Registry fix applied successfully!"
echo ""
echo "The game installation path has been set in the registry."
elif [ $success_count -gt 0 ]; then
echo "⚠ Registry fix partially applied ($success_count/2 succeeded)"
else
echo "✗ Registry fix failed"
fi
echo "=================================================="
# Offer to verify the registry
echo ""
read -p "Would you like to verify the registry values? (y/n): " verify_choice
if [ "$verify_choice" == "y" ] || [ "$verify_choice" == "Y" ]; then
echo ""
echo "Verifying registry..."
echo ""
# 32-bit check
run_proton reg query "HKLM\\${REG_PATH}" /v "$VALUE_NAME" /reg:32 > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo " ✓ 32-bit Key: FOUND"
else
echo " ✗ 32-bit Key: NOT FOUND"
fi
# 64-bit check
run_proton reg query "HKLM\\${WOW64_PATH}" /v "$VALUE_NAME" /reg:64 > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo " ✓ 64-bit Key: FOUND"
else
echo " ✗ 64-bit Key: NOT FOUND"
fi
fi
echo ""
echo "Done!"
"#;
// ============================================================================
// Fix Symlinks After Move
// ============================================================================
/// Fix all symlinks after NaK data folder is moved to a new location
/// This updates:
/// - manager_link in each prefix (points to mod manager install dir)
/// - Convenience symlinks in mod manager folders (Launch MO2, Kill Prefix, etc.)
/// - active_nxm_game global symlink
pub fn fix_symlinks_after_move() -> Result<usize, Box<dyn Error>> {
use crate::logging::{log_error, log_info, log_warning};
let config = AppConfig::load();
let prefixes_dir = config.get_prefixes_path();
let data_path = config.get_data_path();
if !prefixes_dir.exists() {
return Ok(0);
}
let mut fixed_count = 0;
for entry in fs::read_dir(&prefixes_dir)? {
let entry = entry?;
let prefix_dir = entry.path();
if !prefix_dir.is_dir() {
continue;
}
let prefix_name = prefix_dir
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown");
let pfx_path = prefix_dir.join("pfx");
if !pfx_path.exists() {
log_warning(&format!("Skipping {}: no pfx directory found", prefix_name));
continue;
}
// Try to find install directory - first try manager_link (for external installs), then search
let manager_link = prefix_dir.join("manager_link");
let install_dir = if manager_link.exists() || fs::symlink_metadata(&manager_link).is_ok() {
// Try to read the link - if target exists (even external), use it
match fs::read_link(&manager_link) {
Ok(path) if path.exists() => Some(path),
_ => {
// Link is broken, search for install dir inside prefix
find_mod_manager_install_dir(&pfx_path)
}
}
} else {
// No manager_link, search for install dir inside prefix
find_mod_manager_install_dir(&pfx_path)
};
if let Some(install_dir) = install_dir {
let is_mo2 = install_dir.join("ModOrganizer.exe").exists();
let is_vortex = install_dir.join("Vortex.exe").exists();
if manager_link.exists() || fs::symlink_metadata(&manager_link).is_ok() {
let _ = fs::remove_file(&manager_link);
}
if let Err(e) = std::os::unix::fs::symlink(&install_dir, &manager_link) {
log_error(&format!(
"Failed to update manager_link for {}: {}",
prefix_name, e
));
continue;
}
// Update convenience symlinks in the mod manager folder
let script_path = prefix_dir.join("start.sh");
let kill_script = prefix_dir.join(".kill_prefix.sh");
let reg_script = prefix_dir.join("game_registry_fix.sh");
let nxm_script = prefix_dir.join("nxm_handler.sh");
// Helper to create symlink with error logging
let create_link = |target: &Path, link_name: &str| {
let link_path = install_dir.join(link_name);
// Remove existing (broken or not)
if fs::symlink_metadata(&link_path).is_ok() {
if let Err(e) = fs::remove_file(&link_path) {
log_warning(&format!(
"Failed to remove old symlink {}: {}", link_path.display(), e
));
return;
}
}
// Create new symlink
if let Err(e) = std::os::unix::fs::symlink(target, &link_path) {
log_warning(&format!(
"Failed to create symlink {} -> {}: {}",
link_path.display(), target.display(), e
));
}
};
if is_mo2 {
create_link(&script_path, "Launch MO2");
create_link(&kill_script, "Kill MO2 Prefix");
create_link(®_script, "Fix Game Registry");
if nxm_script.exists() {
create_link(&nxm_script, "Handle NXM");
}
} else if is_vortex {
create_link(&script_path, "Launch Vortex");
create_link(&kill_script, "Kill Vortex Prefix");
create_link(®_script, "Fix Game Registry");
if nxm_script.exists() {
create_link(&nxm_script, "Handle NXM");
}
}
log_info(&format!("Fixed symlinks for: {}", prefix_name));
fixed_count += 1;
} else {
log_warning(&format!(
"Skipping {}: couldn't find mod manager install directory",
prefix_name
));
}
}
// Fix active_nxm_game symlink if it exists and is broken
let active_nxm = data_path.join("active_nxm_game");
if fs::symlink_metadata(&active_nxm).is_ok() {
// Read the old target
if let Ok(old_target) = fs::read_link(&active_nxm) {
// Extract just the prefix name from the old path
if let Some(prefix_name) = old_target.file_name() {
let new_target = prefixes_dir.join(prefix_name);
if new_target.exists() {
let _ = fs::remove_file(&active_nxm);
if let Err(e) = std::os::unix::fs::symlink(&new_target, &active_nxm) {
log_error(&format!("Failed to update active_nxm_game: {}", e));
} else {
log_info("Updated active_nxm_game symlink");
}
}
}
}
}
log_info(&format!(
"Symlink fix complete: {} prefix(es) updated",
fixed_count
));
Ok(fixed_count)
}
/// Find the mod manager installation directory inside a prefix
fn find_mod_manager_install_dir(pfx_path: &Path) -> Option<PathBuf> {
// Common locations to search
let search_dirs = [
"drive_c/modding tools",
"drive_c/Modding Tools",
"drive_c/MO2",
"drive_c/Mod Organizer 2",
"drive_c/Vortex",
"drive_c/Program Files",
"drive_c/Program Files (x86)",
"drive_c/users",
];
for search_dir in &search_dirs {
let dir_path = pfx_path.join(search_dir);
if dir_path.exists() {
// Search for ModOrganizer.exe or Vortex.exe
if let Some(found) = search_for_exe(&dir_path, "ModOrganizer.exe", 3) {
return Some(found);
}
if let Some(found) = search_for_exe(&dir_path, "Vortex.exe", 3) {
return Some(found);
}
}
}
None
}
/// Recursively search for an executable up to max_depth levels
fn search_for_exe(dir: &Path, exe_name: &str, max_depth: usize) -> Option<PathBuf> {
if max_depth == 0 {
return None;
}
// Check current directory
let exe_path = dir.join(exe_name);
if exe_path.exists() {
return Some(dir.to_path_buf());
}
// Search subdirectories
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
if let Some(found) = search_for_exe(&path, exe_name, max_depth - 1) {
return Some(found);
}
}
}
}
None
}
// ============================================================================
// Regenerate All Scripts
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | true |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/utils.rs | src/utils.rs | //! Shared utility functions used across the application
use std::error::Error;
use std::fs;
use std::path::Path;
use crate::logging::{log_info, log_warning};
/// Detect the Steam installation path
/// Checks common locations for native, Flatpak, and Snap Steam installs
/// Returns None if Steam is not found
#[must_use]
pub fn detect_steam_path_checked() -> Option<String> {
let home = std::env::var("HOME").unwrap_or_default();
// Check native Steam locations in order of preference
let native_paths = [
format!("{}/.steam/steam", home),
format!("{}/.local/share/Steam", home),
];
for path in &native_paths {
if Path::new(path).exists() {
log_info(&format!("Steam detected at: {}", path));
return Some(path.clone());
}
}
// Check Flatpak Steam location
let flatpak_path = format!("{}/.var/app/com.valvesoftware.Steam/.steam/steam", home);
if Path::new(&flatpak_path).exists() {
log_info(&format!("Steam detected (Flatpak) at: {}", flatpak_path));
return Some(flatpak_path);
}
// Check Snap Steam location
let snap_path = format!("{}/snap/steam/common/.steam/steam", home);
if Path::new(&snap_path).exists() {
log_info(&format!("Steam detected (Snap) at: {}", snap_path));
return Some(snap_path);
}
log_warning("Steam installation not detected! NaK requires Steam to be installed.");
None
}
/// Detect the Steam installation path (always returns a path)
/// Wrapper around detect_steam_path_checked that provides a fallback
#[must_use]
pub fn detect_steam_path() -> String {
detect_steam_path_checked().unwrap_or_else(|| {
let home = std::env::var("HOME").unwrap_or_default();
format!("{}/.steam/steam", home)
})
}
/// Download a file from URL to the specified path
pub fn download_file(url: &str, path: &Path) -> Result<(), Box<dyn Error>> {
// Ensure parent directory exists
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let resp = ureq::get(url).call()?;
let mut reader = resp.into_reader();
let mut file = fs::File::create(path)?;
std::io::copy(&mut reader, &mut file)?;
Ok(())
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/main.rs | src/main.rs | //! NaK - Linux Mod Manager Tool
//!
//! A tool to help manage modding tools (MO2, Vortex) on Linux via Proton/Wine.
use eframe::egui;
mod app;
mod config;
mod games;
mod installers;
mod logging;
mod nxm;
mod scripts;
mod ui;
mod updater;
mod utils;
mod wine;
use app::MyApp;
use logging::{init_logger, log_info};
fn main() -> eframe::Result<()> {
// Initialize NaK logging system (writes to ~/NaK/logs/)
init_logger();
log_info("NaK starting up...");
let options = eframe::NativeOptions {
viewport: egui::ViewportBuilder::default()
.with_inner_size([720.0, 720.0])
.with_min_inner_size([720.0, 720.0])
.with_title("NaK"),
..Default::default()
};
eframe::run_native(
"NaK",
options,
Box::new(|cc| {
egui_extras::install_image_loaders(&cc.egui_ctx);
Ok(Box::new(MyApp::default()))
}),
)
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/logging.rs | src/logging.rs | //! NaK Logging System
//!
//! Provides structured logging with system information header
use chrono::Local;
use std::fs::{self, File, OpenOptions};
use std::io::{BufRead, BufReader, Write};
use std::process::Command;
use std::sync::{Arc, Mutex, OnceLock};
use crate::config::AppConfig;
static LOGGER: OnceLock<Arc<Mutex<NakLogger>>> = OnceLock::new();
// ============================================================================
// System Information Detection
// ============================================================================
#[derive(Debug, Clone)]
pub struct SystemInfo {
pub app_version: String,
pub distro: String,
pub distro_version: String,
pub kernel: String,
pub session_type: String,
pub desktop_env: String,
pub cpu: String,
pub memory_gb: String,
pub gpu: String,
pub glibc_version: String,
pub disk_space_free: String,
}
impl SystemInfo {
pub fn detect() -> Self {
Self {
app_version: env!("CARGO_PKG_VERSION").to_string(),
distro: detect_distro(),
distro_version: detect_distro_version(),
kernel: detect_kernel(),
session_type: detect_session_type(),
desktop_env: detect_desktop_env(),
cpu: detect_cpu(),
memory_gb: detect_memory(),
gpu: detect_gpu(),
glibc_version: detect_glibc(),
disk_space_free: detect_disk_space(),
}
}
pub fn to_log_header(&self) -> String {
format!(
r#"================================================================================
NaK Log - {}
================================================================================
Application: NaK v{}
System Info:
Distro: {} {}
Kernel: {}
Session: {}
Desktop: {}
CPU: {}
Memory: {}
GPU: {}
GLIBC: {}
Disk Free: {}
================================================================================
"#,
Local::now().format("%Y-%m-%d %H:%M:%S"),
self.app_version,
self.distro,
self.distro_version,
self.kernel,
self.session_type,
self.desktop_env,
self.cpu,
self.memory_gb,
self.gpu,
self.glibc_version,
self.disk_space_free
)
}
}
fn detect_session_type() -> String {
std::env::var("XDG_SESSION_TYPE").unwrap_or_else(|_| "Unknown".to_string())
}
fn detect_glibc() -> String {
if let Ok(output) = Command::new("ldd").arg("--version").output() {
if output.status.success() {
// First line usually: "ldd (GNU libc) 2.35"
let out = String::from_utf8_lossy(&output.stdout);
if let Some(line) = out.lines().next() {
return line.split(')').next_back().unwrap_or("Unknown").trim().to_string();
}
}
}
"Unknown".to_string()
}
fn detect_disk_space() -> String {
// Check space on HOME
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
if let Ok(output) = Command::new("df").arg("-h").arg(&home).output() {
if output.status.success() {
// Output is header + 1 line. We want the 'Avail' column (usually 4th)
let out = String::from_utf8_lossy(&output.stdout);
if let Some(line) = out.lines().nth(1) {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 4 {
return format!("{} (on {})", parts[3], parts[5]); // Avail + Mount point
}
}
}
}
"Unknown".to_string()
}
fn detect_distro() -> String { // Try /etc/os-release first
if let Ok(file) = File::open("/etc/os-release") {
let reader = BufReader::new(file);
for line in reader.lines().map_while(Result::ok) {
if line.starts_with("NAME=") {
return line
.trim_start_matches("NAME=")
.trim_matches('"')
.to_string();
}
}
}
// Fallback to lsb_release
if let Ok(output) = Command::new("lsb_release").arg("-is").output() {
if output.status.success() {
return String::from_utf8_lossy(&output.stdout).trim().to_string();
}
}
"Unknown".to_string()
}
fn detect_distro_version() -> String {
// Try /etc/os-release first
if let Ok(file) = File::open("/etc/os-release") {
let reader = BufReader::new(file);
for line in reader.lines().map_while(Result::ok) {
if line.starts_with("VERSION_ID=") {
return line
.trim_start_matches("VERSION_ID=")
.trim_matches('"')
.to_string();
}
}
}
// Fallback to lsb_release
if let Ok(output) = Command::new("lsb_release").arg("-rs").output() {
if output.status.success() {
return String::from_utf8_lossy(&output.stdout).trim().to_string();
}
}
"".to_string()
}
fn detect_kernel() -> String {
if let Ok(output) = Command::new("uname").arg("-r").output() {
if output.status.success() {
return String::from_utf8_lossy(&output.stdout).trim().to_string();
}
}
"Unknown".to_string()
}
fn detect_desktop_env() -> String {
// Check common environment variables
if let Ok(de) = std::env::var("XDG_CURRENT_DESKTOP") {
return de;
}
if let Ok(de) = std::env::var("DESKTOP_SESSION") {
return de;
}
if let Ok(de) = std::env::var("XDG_SESSION_DESKTOP") {
return de;
}
// Check for specific DEs
if std::env::var("KDE_FULL_SESSION").is_ok() {
return "KDE".to_string();
}
if std::env::var("GNOME_DESKTOP_SESSION_ID").is_ok() {
return "GNOME".to_string();
}
"Unknown".to_string()
}
fn detect_cpu() -> String {
if let Ok(file) = File::open("/proc/cpuinfo") {
let reader = BufReader::new(file);
for line in reader.lines().map_while(Result::ok) {
if line.starts_with("model name") {
if let Some(name) = line.split(':').nth(1) {
return name.trim().to_string();
}
}
}
}
"Unknown".to_string()
}
fn detect_memory() -> String {
if let Ok(file) = File::open("/proc/meminfo") {
let reader = BufReader::new(file);
for line in reader.lines().map_while(Result::ok) {
if line.starts_with("MemTotal:") {
if let Some(kb_str) = line.split_whitespace().nth(1) {
if let Ok(kb) = kb_str.parse::<u64>() {
let gb = kb as f64 / 1024.0 / 1024.0;
return format!("{:.1} GB", gb);
}
}
}
}
}
"Unknown".to_string()
}
fn detect_gpu() -> String {
// Try lspci for GPU info
if let Ok(output) = Command::new("lspci").output() {
if output.status.success() {
let output_str = String::from_utf8_lossy(&output.stdout);
for line in output_str.lines() {
if line.contains("VGA") || line.contains("3D") || line.contains("Display") {
// Extract the device name after the colon
if let Some(device) = line.split(':').next_back() {
let gpu = device.trim();
// Shorten if too long
if gpu.len() > 60 {
return format!("{}...", &gpu[..57]);
}
return gpu.to_string();
}
}
}
}
}
"Unknown".to_string()
}
// ============================================================================
// Log Levels
// ============================================================================
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum LogLevel {
Info,
Action, // User actions (button clicks, etc.)
Download,
Install,
Warning,
Error,
}
impl LogLevel {
pub fn prefix(&self) -> &'static str {
match self {
LogLevel::Info => "[INFO]",
LogLevel::Action => "[ACTION]",
LogLevel::Download => "[DOWNLOAD]",
LogLevel::Install => "[INSTALL]",
LogLevel::Warning => "[WARNING]",
LogLevel::Error => "[ERROR]",
}
}
}
// ============================================================================
// NaK Logger
// ============================================================================
pub struct NakLogger {
log_file: Option<File>,
}
impl NakLogger {
pub fn new() -> Self {
let config = AppConfig::load();
let log_dir = config.get_data_path().join("logs");
let _ = fs::create_dir_all(&log_dir);
let timestamp = Local::now().format("%Y%m%d_%H%M%S");
let log_path = log_dir.join(format!("nak_{}.log", timestamp));
let log_file = OpenOptions::new()
.create(true)
.append(true)
.open(&log_path)
.ok();
let mut logger = Self { log_file };
// Write system info header
let sys_info = SystemInfo::detect();
let header = sys_info.to_log_header();
logger.write_raw(&header);
logger
}
fn write_raw(&mut self, msg: &str) {
// Write to file
if let Some(ref mut file) = self.log_file {
let _ = writeln!(file, "{}", msg);
let _ = file.flush();
}
// Also print to console
println!("{}", msg);
}
pub fn log(&mut self, level: LogLevel, message: &str) {
let timestamp = Local::now().format("%H:%M:%S");
let formatted = format!("[{}] {} {}", timestamp, level.prefix(), message);
self.write_raw(&formatted);
}
}
// ============================================================================
// Global Logger Access
// ============================================================================
/// Initialize the global logger (call once at startup)
pub fn init_logger() {
LOGGER.get_or_init(|| Arc::new(Mutex::new(NakLogger::new())));
}
/// Get the global logger instance
fn logger() -> Arc<Mutex<NakLogger>> {
LOGGER
.get_or_init(|| Arc::new(Mutex::new(NakLogger::new())))
.clone()
}
// ============================================================================
// Convenience Logging Functions
// ============================================================================
pub fn log_info(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Info, message);
}
}
pub fn log_action(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Action, message);
}
}
pub fn log_download(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Download, message);
}
}
pub fn log_install(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Install, message);
}
}
pub fn log_warning(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Warning, message);
}
}
pub fn log_error(message: &str) {
if let Ok(mut log) = logger().lock() {
log.log(LogLevel::Error, message);
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/updater.rs | src/updater.rs | //! In-place updater for NaK
//!
//! Checks GitHub releases and updates the application binary.
use std::error::Error;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use crate::logging::{log_download, log_error, log_info};
const GITHUB_REPO: &str = "SulfurNitride/NaK";
const CURRENT_VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Debug, Clone)]
pub struct UpdateInfo {
pub current_version: String,
pub latest_version: String,
pub download_url: Option<String>,
pub release_notes: String,
pub is_update_available: bool,
}
#[derive(serde::Deserialize)]
struct GitHubRelease {
tag_name: String,
body: Option<String>,
assets: Vec<GitHubAsset>,
}
#[derive(serde::Deserialize)]
struct GitHubAsset {
name: String,
browser_download_url: String,
}
/// Check GitHub for the latest release
pub fn check_for_updates() -> Result<UpdateInfo, Box<dyn Error>> {
let url = format!("https://api.github.com/repos/{}/releases/latest", GITHUB_REPO);
let response = ureq::get(&url)
.set("User-Agent", "NaK-Updater")
.call()?;
let release: GitHubRelease = response.into_json()?;
// Extract version number from tag (remove 'v' prefix if present)
let latest_version = release.tag_name.trim_start_matches('v').to_string();
// Check if update is available by comparing versions
let is_update_available = is_newer_version(&latest_version, CURRENT_VERSION);
// Find the Linux binary asset
let download_url = release.assets.iter()
.find(|a| {
let name = a.name.to_lowercase();
// Look for Linux binary - adjust pattern based on your release naming
name.contains("linux") || name == "nak" || name == "nak_rust"
})
.map(|a| a.browser_download_url.clone());
Ok(UpdateInfo {
current_version: CURRENT_VERSION.to_string(),
latest_version,
download_url,
release_notes: release.body.unwrap_or_default(),
is_update_available,
})
}
/// Compare version strings (simple semver comparison)
fn is_newer_version(latest: &str, current: &str) -> bool {
let parse_version = |v: &str| -> Vec<u32> {
v.split('.')
.filter_map(|s| s.parse::<u32>().ok())
.collect()
};
let latest_parts = parse_version(latest);
let current_parts = parse_version(current);
for i in 0..latest_parts.len().max(current_parts.len()) {
let l = latest_parts.get(i).copied().unwrap_or(0);
let c = current_parts.get(i).copied().unwrap_or(0);
if l > c {
return true;
} else if l < c {
return false;
}
}
false
}
/// Download and install the update
pub fn install_update(download_url: &str) -> Result<(), Box<dyn Error>> {
log_info(&format!("Downloading update from: {}", download_url));
// Get the current executable path
let current_exe = std::env::current_exe()?;
let exe_dir = current_exe.parent()
.ok_or("Failed to get executable directory")?;
// Create temp paths
let temp_download = exe_dir.join(".nak_update_download");
let temp_extract = exe_dir.join(".nak_update_extract");
let backup_path = exe_dir.join(".nak_backup");
// Clean up any previous failed update attempts
let _ = fs::remove_file(&temp_download);
let _ = fs::remove_dir_all(&temp_extract);
// Download the update
log_download("Downloading NaK update...");
let response = ureq::get(download_url)
.set("User-Agent", "NaK-Updater")
.call()?;
let mut file = File::create(&temp_download)?;
let mut reader = response.into_reader();
std::io::copy(&mut reader, &mut file)?;
file.flush()?;
drop(file);
log_info("Download complete, extracting...");
// Determine if it's an archive and extract if needed
let new_binary_path = if download_url.ends_with(".zip") {
extract_zip(&temp_download, &temp_extract)?
} else if download_url.ends_with(".tar.gz") || download_url.ends_with(".tgz") {
extract_tar_gz(&temp_download, &temp_extract)?
} else {
// Assume it's a raw binary
temp_download.clone()
};
// Make the new binary executable
let mut perms = fs::metadata(&new_binary_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&new_binary_path, perms)?;
// Backup current executable
if backup_path.exists() {
fs::remove_file(&backup_path)?;
}
fs::rename(¤t_exe, &backup_path)?;
// Move new binary into place
if let Err(e) = fs::rename(&new_binary_path, ¤t_exe) {
// Restore backup on failure
log_error(&format!("Failed to install update: {}", e));
let _ = fs::rename(&backup_path, ¤t_exe);
return Err(e.into());
}
// Clean up
let _ = fs::remove_file(&backup_path);
let _ = fs::remove_file(&temp_download);
let _ = fs::remove_dir_all(&temp_extract);
log_info("Update installed successfully!");
Ok(())
}
/// Extract a zip archive and return the path to the binary inside
fn extract_zip(zip_path: &Path, extract_dir: &Path) -> Result<std::path::PathBuf, Box<dyn Error>> {
fs::create_dir_all(extract_dir)?;
let file = File::open(zip_path)?;
let mut archive = zip::ZipArchive::new(file)?;
// Extract all files
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = extract_dir.join(file.mangled_name());
if file.name().ends_with('/') {
fs::create_dir_all(&outpath)?;
} else {
if let Some(p) = outpath.parent() {
if !p.exists() {
fs::create_dir_all(p)?;
}
}
let mut outfile = File::create(&outpath)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
// Find the binary (look for nak_rust or nak)
find_binary_in_dir(extract_dir)
}
/// Extract a tar.gz archive and return the path to the binary inside
fn extract_tar_gz(tar_path: &Path, extract_dir: &Path) -> Result<std::path::PathBuf, Box<dyn Error>> {
fs::create_dir_all(extract_dir)?;
let file = File::open(tar_path)?;
let decoder = flate2::read::GzDecoder::new(file);
let mut archive = tar::Archive::new(decoder);
archive.unpack(extract_dir)?;
// Find the binary
find_binary_in_dir(extract_dir)
}
/// Find the NaK binary in an extracted directory
fn find_binary_in_dir(dir: &Path) -> Result<std::path::PathBuf, Box<dyn Error>> {
// Look for common binary names
let binary_names = ["nak_rust", "nak", "NaK"];
for entry in walkdir::WalkDir::new(dir).into_iter().filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_file() {
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
// Check exact matches first
if binary_names.contains(&name) {
return Ok(path.to_path_buf());
}
// Check if it's an executable (no extension, not a known non-binary)
if !name.contains('.') && !name.starts_with('.') {
// Verify it's actually executable by checking for ELF header
if let Ok(mut f) = File::open(path) {
let mut magic = [0u8; 4];
if f.read_exact(&mut magic).is_ok() && magic == [0x7f, b'E', b'L', b'F'] {
return Ok(path.to_path_buf());
}
}
}
}
}
}
Err("Could not find binary in update archive".into())
}
/// Check if the current executable is in a writable location
pub fn can_self_update() -> bool {
match std::env::current_exe() {
Ok(exe_path) => {
// Check if we can write to the directory
if let Some(parent) = exe_path.parent() {
let test_file = parent.join(".nak_write_test");
if fs::write(&test_file, "test").is_ok() {
let _ = fs::remove_file(&test_file);
return true;
}
}
false
}
Err(_) => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_version_comparison() {
assert!(is_newer_version("4.2.0", "4.1.2"));
assert!(is_newer_version("4.1.3", "4.1.2"));
assert!(is_newer_version("5.0.0", "4.9.9"));
assert!(!is_newer_version("4.1.2", "4.1.2"));
assert!(!is_newer_version("4.1.1", "4.1.2"));
assert!(!is_newer_version("3.9.9", "4.0.0"));
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/installers/mo2.rs | src/installers/mo2.rs | //! Mod Organizer 2 installation
use std::error::Error;
use std::fs;
use std::path::PathBuf;
use super::{
brief_launch_and_kill, create_game_folders, fetch_latest_mo2_release, get_install_proton,
install_all_dependencies, TaskContext,
};
use crate::config::AppConfig;
use crate::logging::{log_download, log_error, log_install};
use crate::scripts::ScriptGenerator;
use crate::utils::download_file;
use crate::wine::{PrefixManager, ProtonInfo};
pub fn install_mo2(
install_name: &str,
target_install_path: PathBuf,
proton: &ProtonInfo,
ctx: TaskContext,
) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
// Collision Check
let prefix_mgr = PrefixManager::new();
let base_name = format!("mo2_{}", install_name.replace(" ", "_").to_lowercase());
let unique_name = prefix_mgr.get_unique_prefix_name(&base_name);
let prefix_root = config.get_prefixes_path().join(&unique_name).join("pfx");
let install_dir = target_install_path;
log_install(&format!(
"Starting MO2 installation: {} -> {:?}",
install_name, install_dir
));
log_install(&format!("Using Proton: {}", proton.name));
// For Proton 10+, use GE-Proton10-18 for the entire installation process
let install_proton = get_install_proton(proton, &ctx);
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 1. Create Directories
ctx.set_status("Creating directories...".to_string());
ctx.set_progress(0.05);
fs::create_dir_all(&prefix_root)?;
fs::create_dir_all(&install_dir)?;
log_install(&format!("Created prefix at: {:?}", prefix_root));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 2. Download MO2
ctx.set_status("Fetching MO2 release info...".to_string());
let release = fetch_latest_mo2_release()?;
let invalid_terms = ["Linux", "pdbs", "src", "uibase", "commits"];
let asset = release
.assets
.iter()
.find(|a| {
a.name.starts_with("Mod.Organizer-2")
&& a.name.ends_with(".7z")
&& !invalid_terms.iter().any(|term| a.name.contains(term))
})
.ok_or("No valid MO2 archive found")?;
ctx.set_status(format!("Downloading {}...", asset.name));
ctx.set_progress(0.10);
log_download(&format!("Downloading MO2: {}", asset.name));
let tmp_dir = config.get_data_path().join("tmp");
fs::create_dir_all(&tmp_dir)?;
let archive_path = tmp_dir.join(&asset.name);
download_file(&asset.browser_download_url, &archive_path)?;
log_download(&format!("MO2 downloaded to: {:?}", archive_path));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 3. Extract using native Rust 7z library
ctx.set_status("Extracting MO2...".to_string());
ctx.set_progress(0.15);
if let Err(e) = sevenz_rust::decompress_file(&archive_path, &install_dir) {
log_error(&format!("Failed to extract MO2 archive: {}", e));
return Err(format!("Failed to extract MO2: {}", e).into());
}
ctx.set_progress(0.20);
// 4. Install all dependencies (dotnet48, registry, standard deps, dotnet9sdk)
install_all_dependencies(&prefix_root, &install_proton, &ctx, 0.20, 0.90)?;
ctx.set_progress(0.92);
// 5. Brief launch to initialize prefix, then kill
let mo2_exe = install_dir.join("ModOrganizer.exe");
if !mo2_exe.exists() {
log_error("ModOrganizer.exe not found after extraction");
return Err("ModOrganizer.exe not found after extraction".into());
}
brief_launch_and_kill(&mo2_exe, &prefix_root, &install_proton, &ctx, "MO2");
ctx.set_progress(0.95);
// 6. Generate Scripts (using user's selected proton)
ctx.set_status("Generating launch scripts...".to_string());
let script_dir = prefix_root.parent().ok_or("Invalid prefix root")?;
let script_path = ScriptGenerator::generate_mo2_launch_script(
&prefix_root,
&mo2_exe,
&proton.path,
&install_dir,
script_dir,
)?;
let kill_script =
ScriptGenerator::generate_kill_prefix_script(&prefix_root, &proton.path, script_dir)?;
let reg_script = ScriptGenerator::generate_fix_game_registry_script(
&prefix_root,
&proton.path,
install_name,
script_dir,
)?;
// Generate NXM handler script (for nxmhandler.exe)
let nxm_handler_exe = install_dir.join("nxmhandler.exe");
let _nxm_script = if nxm_handler_exe.exists() {
Some(ScriptGenerator::generate_mo2_nxm_script(
&prefix_root,
&nxm_handler_exe,
&proton.path,
script_dir,
)?)
} else {
None
};
// Create symlinks in the MO2 folder for easy access
let create_link = |target: &std::path::Path, link_name: &str| {
let link_path = install_dir.join(link_name);
if link_path.exists() || fs::symlink_metadata(&link_path).is_ok() {
let _ = fs::remove_file(&link_path);
}
let _ = std::os::unix::fs::symlink(target, &link_path);
};
create_link(&script_path, "Launch MO2");
create_link(&kill_script, "Kill MO2 Prefix");
create_link(®_script, "Fix Game Registry");
log_install("Created shortcuts in MO2 folder: Launch MO2, Kill MO2 Prefix, Fix Game Registry");
if let Some(prefix_base) = prefix_root.parent() {
let backlink = prefix_base.join("manager_link");
if backlink.exists() || fs::symlink_metadata(&backlink).is_ok() {
let _ = fs::remove_file(&backlink);
}
let _ = std::os::unix::fs::symlink(&install_dir, &backlink);
}
// Setup Global Instance support
setup_global_instance_symlink(&prefix_root, &install_dir);
// Create game folders (prevents crashes for games that require them)
create_game_folders(&prefix_root);
ctx.set_progress(1.0);
ctx.set_status("MO2 Installed Successfully!".to_string());
log_install(&format!("MO2 installation complete: {}", install_name));
Ok(())
}
/// Setup an existing MO2 installation with a new prefix
pub fn setup_existing_mo2(
install_name: &str,
existing_path: PathBuf,
proton: &ProtonInfo,
ctx: TaskContext,
) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
// Verify MO2 exists at path
let mo2_exe = existing_path.join("ModOrganizer.exe");
if !mo2_exe.exists() {
log_error("ModOrganizer.exe not found at selected path");
return Err("ModOrganizer.exe not found at selected path".into());
}
log_install(&format!(
"Setting up existing MO2: {} at {:?}",
install_name, existing_path
));
log_install(&format!("Using Proton: {}", proton.name));
// For Proton 10+, use GE-Proton10-18 for the entire installation process
let install_proton = get_install_proton(proton, &ctx);
// Collision Check
let prefix_mgr = PrefixManager::new();
let base_name = format!("mo2_{}", install_name.replace(" ", "_").to_lowercase());
let unique_name = prefix_mgr.get_unique_prefix_name(&base_name);
let prefix_root = config.get_prefixes_path().join(&unique_name).join("pfx");
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 1. Create Prefix Directory
ctx.set_status("Creating prefix...".to_string());
ctx.set_progress(0.05);
fs::create_dir_all(&prefix_root)?;
log_install(&format!("Created prefix at: {:?}", prefix_root));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 2. Install all dependencies (dotnet48, registry, standard deps, dotnet9sdk)
install_all_dependencies(&prefix_root, &install_proton, &ctx, 0.10, 0.85)?;
ctx.set_progress(0.88);
// 3. Brief launch to initialize prefix, then kill
brief_launch_and_kill(&mo2_exe, &prefix_root, &install_proton, &ctx, "MO2");
ctx.set_progress(0.92);
// 4. Generate Scripts (using user's selected proton)
ctx.set_status("Generating launch scripts...".to_string());
let script_dir = prefix_root.parent().ok_or("Invalid prefix root")?;
let script_path = ScriptGenerator::generate_mo2_launch_script(
&prefix_root,
&mo2_exe,
&proton.path,
&existing_path,
script_dir,
)?;
let kill_script =
ScriptGenerator::generate_kill_prefix_script(&prefix_root, &proton.path, script_dir)?;
let reg_script = ScriptGenerator::generate_fix_game_registry_script(
&prefix_root,
&proton.path,
install_name,
script_dir,
)?;
// Generate NXM handler script (for nxmhandler.exe)
let nxm_handler_exe = existing_path.join("nxmhandler.exe");
let _nxm_script = if nxm_handler_exe.exists() {
Some(ScriptGenerator::generate_mo2_nxm_script(
&prefix_root,
&nxm_handler_exe,
&proton.path,
script_dir,
)?)
} else {
None
};
// Create symlinks in the MO2 folder for easy access
let create_link = |target: &std::path::Path, link_name: &str| {
let link_path = existing_path.join(link_name);
if link_path.exists() || fs::symlink_metadata(&link_path).is_ok() {
let _ = fs::remove_file(&link_path);
}
let _ = std::os::unix::fs::symlink(target, &link_path);
};
create_link(&script_path, "Launch MO2");
create_link(&kill_script, "Kill MO2 Prefix");
create_link(®_script, "Fix Game Registry");
log_install("Created shortcuts in MO2 folder: Launch MO2, Kill MO2 Prefix, Fix Game Registry");
if let Some(prefix_base) = prefix_root.parent() {
let backlink = prefix_base.join("manager_link");
if backlink.exists() || fs::symlink_metadata(&backlink).is_ok() {
let _ = fs::remove_file(&backlink);
}
let _ = std::os::unix::fs::symlink(&existing_path, &backlink);
}
// Setup Global Instance support
setup_global_instance_symlink(&prefix_root, &existing_path);
// Create game folders (prevents crashes for games that require them)
create_game_folders(&prefix_root);
ctx.set_progress(1.0);
ctx.set_status("MO2 Setup Complete!".to_string());
log_install(&format!("MO2 setup complete: {}", install_name));
Ok(())
}
/// Sets up the symlink for Global Instance support
/// Symlinks `.../pfx/drive_c/users/<user>/AppData/Local/ModOrganizer` -> `install_dir/Global Instance`
fn setup_global_instance_symlink(prefix_root: &std::path::Path, install_dir: &std::path::Path) {
use std::fs;
let users_dir = prefix_root.join("drive_c/users");
let mut username = "steamuser".to_string(); // Default fallback
// Try to detect the correct user folder (not Public, not root if possible)
if let Ok(entries) = fs::read_dir(&users_dir) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_string();
if name != "Public" && name != "root" {
username = name;
break;
}
}
}
let appdata_local = users_dir.join(&username).join("AppData/Local");
let mo2_global_path = appdata_local.join("ModOrganizer");
let target_global_instance = install_dir.join("Global Instance");
// 1. Ensure target "Global Instance" folder exists in our managed directory
if !target_global_instance.exists() {
if let Err(e) = fs::create_dir_all(&target_global_instance) {
crate::logging::log_error(&format!("Failed to create Global Instance folder: {}", e));
return;
}
}
// 2. Ensure parent AppData/Local exists in prefix
if !appdata_local.exists() {
if let Err(e) = fs::create_dir_all(&appdata_local) {
crate::logging::log_error(&format!("Failed to create AppData/Local in prefix: {}", e));
return;
}
}
// 3. Create Symlink: AppData/Local/ModOrganizer -> Global Instance
if mo2_global_path.exists() || fs::symlink_metadata(&mo2_global_path).is_ok() {
// If it's already a symlink or folder, remove it to overwrite (unless it has data we care about?
// For a new/setup install, we prioritize our structure).
// If it's a real folder with data, we might be destroying it.
// But this runs on setup. Safe assumption is we want to link it.
let _ = fs::remove_dir_all(&mo2_global_path); // recursive remove works on symlinks too? No, remove_file for symlink.
let _ = fs::remove_file(&mo2_global_path);
}
if let Err(e) = std::os::unix::fs::symlink(&target_global_instance, &mo2_global_path) {
crate::logging::log_error(&format!("Failed to create Global Instance symlink: {}", e));
} else {
crate::logging::log_install("Enabled Global Instance support (symlinked AppData/Local/ModOrganizer)");
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/installers/mod.rs | src/installers/mod.rs | //! Mod manager installation logic
mod mo2;
mod prefix_setup;
mod vortex;
pub use mo2::{install_mo2, setup_existing_mo2};
pub use prefix_setup::{
apply_dpi, brief_launch_and_kill, create_game_folders, get_install_proton,
install_all_dependencies, kill_wineserver, launch_dpi_test_app, DPI_PRESETS,
};
pub use vortex::{install_vortex, setup_existing_vortex};
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use crate::config::AppConfig;
use crate::logging::{log_install, log_warning};
use crate::wine::{GithubRelease, ProtonInfo};
// ============================================================================
// Shared Types
// ============================================================================
/// Context for background installation tasks
#[derive(Clone)]
pub struct TaskContext {
pub status_callback: Arc<dyn Fn(String) + Send + Sync>,
pub log_callback: Arc<dyn Fn(String) + Send + Sync>,
pub progress_callback: Arc<dyn Fn(f32) + Send + Sync>,
pub cancel_flag: Arc<AtomicBool>,
pub winetricks_path: PathBuf,
}
impl TaskContext {
pub fn new(
status: impl Fn(String) + Send + Sync + 'static,
log: impl Fn(String) + Send + Sync + 'static,
progress: impl Fn(f32) + Send + Sync + 'static,
cancel: Arc<AtomicBool>,
winetricks: PathBuf,
) -> Self {
Self {
status_callback: Arc::new(status),
log_callback: Arc::new(log),
progress_callback: Arc::new(progress),
cancel_flag: cancel,
winetricks_path: winetricks,
}
}
pub fn set_status(&self, msg: String) {
(self.status_callback)(msg);
}
pub fn log(&self, msg: String) {
(self.log_callback)(msg);
}
pub fn set_progress(&self, p: f32) {
(self.progress_callback)(p);
}
pub fn is_cancelled(&self) -> bool {
self.cancel_flag.load(std::sync::atomic::Ordering::Relaxed)
}
}
// ============================================================================
// Shared Wine Registry Settings
// ============================================================================
/// Wine registry settings from src/utils/wine_settings.reg (exact match)
pub const WINE_SETTINGS_REG: &str = r#"Windows Registry Editor Version 5.00
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dwrite.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dwrite"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"winmm.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"winmm"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"version.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"version"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"ArchiveXL.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"ArchiveXL"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"Codeware.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"Codeware"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"TweakXL.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"TweakXL"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"input_loader.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"input_loader"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"RED4ext.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"RED4ext"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"mod_settings.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"mod_settings"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"scc_lib.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"scc_lib"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dxgi.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dxgi"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dbghelp.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dbghelp"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"d3d12.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"d3d12"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"wininet.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"wininet"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"winhttp.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"winhttp"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dinput.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dinput8"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"dinput8.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"mscoree.dll"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine\DllOverrides]
"mscoree"="native,builtin"
[HKEY_CURRENT_USER\Software\Wine]
"ShowDotFiles"="Y"
[HKEY_CURRENT_USER\Control Panel\Desktop]
"FontSmoothing"="2"
"FontSmoothingGamma"=dword:00000578
"FontSmoothingOrientation"=dword:00000001
"FontSmoothingType"=dword:00000002
[HKEY_CURRENT_USER\Software\Microsoft\Windows NT\CurrentVersion\AppCompatFlags\Layers]
@="~ HIGHDPIAWARE"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\Pandora Behaviour Engine+.exe\X11 Driver]
"Decorated"="N"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\Vortex.exe\X11 Driver]
"Decorated"="N"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\SSEEdit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\SSEEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FO4Edit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FO4Edit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\TES4Edit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\TES4Edit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\SF1Edit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FNVEdit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FNVEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xFOEdit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xFOEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xSFEEdit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xSFEEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xTESEdit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\xTESEdit64.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FO3Edit.exe]
"Version"="winxp"
[HKEY_CURRENT_USER\Software\Wine\AppDefaults\FO3Edit64.exe]
"Version"="winxp"
"#;
// ============================================================================
// Shared Functions
// ============================================================================
/// Apply Wine registry settings to a prefix
pub fn apply_wine_registry_settings(
prefix_path: &Path,
proton: &ProtonInfo,
log_callback: &impl Fn(String),
) -> Result<(), Box<dyn Error>> {
use std::io::Write;
// Create temp file for registry
let config = AppConfig::load();
let tmp_dir = config.get_data_path().join("tmp");
fs::create_dir_all(&tmp_dir)?;
let reg_file = tmp_dir.join("wine_settings.reg");
let mut file = fs::File::create(®_file)?;
file.write_all(WINE_SETTINGS_REG.as_bytes())?;
// Get wine binary path
let wine_bin = proton.path.join("files/bin/wine");
let wineboot_bin = proton.path.join("files/bin/wineboot");
if !wine_bin.exists() {
log_callback(format!("Warning: Wine binary not found at {:?}", wine_bin));
return Ok(());
}
// Initialize prefix with wineboot first (required before regedit)
log_callback("Initializing Wine prefix...".to_string());
log_install("Initializing Wine prefix with wineboot...");
let wineboot_status = std::process::Command::new(&wineboot_bin)
.arg("-u")
.env("WINEPREFIX", prefix_path)
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.env("WINEDLLOVERRIDES", "mscoree=d;mshtml=d")
.env("PROTON_NO_XALIA", "1")
.status();
match wineboot_status {
Ok(s) if s.success() => {
log_callback("Prefix initialized successfully".to_string());
}
Ok(s) => {
log_callback(format!("Warning: wineboot exited with code {:?}", s.code()));
}
Err(e) => {
log_callback(format!("Warning: Failed to run wineboot: {}", e));
}
}
// Give Wine a moment to settle
std::thread::sleep(std::time::Duration::from_secs(2));
log_callback("Running wine regedit...".to_string());
log_install("Applying Wine registry settings...");
// Run wine regedit
let status = std::process::Command::new(&wine_bin)
.arg("regedit")
.arg(®_file)
.env("WINEPREFIX", prefix_path)
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.env("PROTON_NO_XALIA", "1")
.status();
match status {
Ok(s) if s.success() => {
log_callback("Registry settings applied successfully".to_string());
log_install("Wine registry settings applied successfully");
}
Ok(s) => {
log_callback(format!("Warning: regedit exited with code {:?}", s.code()));
log_warning(&format!("regedit exited with code {:?}", s.code()));
}
Err(e) => {
log_callback(format!("Warning: Failed to run regedit: {}", e));
log_warning(&format!("Failed to run regedit: {}", e));
}
}
// Cleanup temp file
let _ = fs::remove_file(®_file);
Ok(())
}
/// Fetch the latest MO2 release from GitHub
pub fn fetch_latest_mo2_release() -> Result<GithubRelease, Box<dyn Error>> {
let url = "https://api.github.com/repos/ModOrganizer2/modorganizer/releases/latest";
let res = ureq::get(url)
.set("User-Agent", "NaK-Rust")
.call()?
.into_json()?;
Ok(res)
}
/// Fetch the latest Vortex release from GitHub
pub fn fetch_latest_vortex_release() -> Result<GithubRelease, Box<dyn Error>> {
let url = "https://api.github.com/repos/Nexus-Mods/Vortex/releases/latest";
let res = ureq::get(url)
.set("User-Agent", "NaK-Rust")
.call()?
.into_json()?;
Ok(res)
}
/// Standard dependencies for mod managers
pub const STANDARD_DEPS: &[&str] = &[
"xact",
"xact_x64",
"vcrun2022",
"dotnet6",
"dotnet7",
"dotnet8",
"dotnet9",
"dotnetdesktop6",
"d3dcompiler_47",
"d3dx11_43",
"d3dcompiler_43",
"d3dx9_43",
"d3dx9",
"vkd3d",
];
/// .NET 9 SDK URL
pub const DOTNET9_SDK_URL: &str =
"https://builds.dotnet.microsoft.com/dotnet/Sdk/9.0.203/dotnet-sdk-9.0.203-win-x64.exe";
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/installers/prefix_setup.rs | src/installers/prefix_setup.rs | //! Unified prefix setup for mod managers
//!
//! This module handles all the common dependency installation logic
//! shared between MO2 and Vortex installers.
//!
//! Key approach for .NET Framework installation:
//! 1. Remove mono
//! 2. Set winxp → install dotnet40
//! 3. Set win7 → install dotnet48 + dotnet481 (win7 avoids "already installed" detection)
//! 4. Set win11 for normal operation
//! 5. Install remaining dependencies via winetricks
use std::error::Error;
use std::fs;
use std::path::Path;
use std::process::{Child, Command};
use super::{apply_wine_registry_settings, TaskContext, DOTNET9_SDK_URL, STANDARD_DEPS};
use crate::config::AppConfig;
use crate::logging::{log_download, log_error, log_install, log_warning};
use crate::utils::{detect_steam_path, download_file};
use crate::wine::{ensure_dotnet48_proton, DependencyManager, ProtonInfo};
// .NET Framework installer URLs
const DOTNET40_URL: &str = "https://download.microsoft.com/download/9/5/A/95A9616B-7A37-4AF6-BC36-D6EA96C8DAAE/dotNetFx40_Full_x86_x64.exe";
const DOTNET48_URL: &str = "https://download.visualstudio.microsoft.com/download/pr/7afca223-55d2-470a-8edc-6a1739ae3252/abd170b4b0ec15ad0222a809b761a036/ndp48-x86-x64-allos-enu.exe";
const DOTNET481_URL: &str = "https://download.visualstudio.microsoft.com/download/pr/6f083c7e-bd40-44d4-9e3f-ffba71ec8b09/3951fd5af6098f2c7e8ff5c331a0679c/ndp481-x86-x64-allos-enu.exe";
/// Get the proton to use for .NET Framework installation.
/// Valve Proton versions fail to install dotnet48 properly, so we use GE-Proton10-18 for those.
/// GE-Proton versions work fine with the win7 approach.
pub fn get_install_proton(proton: &ProtonInfo, ctx: &TaskContext) -> ProtonInfo {
if proton.needs_ge_proton_workaround() {
log_install(&format!(
"{} needs GE-Proton10-18 workaround for dotnet installation",
proton.name
));
ctx.set_status("Preparing GE-Proton10-18 for dotnet installation...".to_string());
let status_cb = {
let ctx = ctx.clone();
move |msg: &str| ctx.log(msg.to_string())
};
match ensure_dotnet48_proton(status_cb) {
Ok(p) => {
log_install(&format!("Using {} for dotnet installation", p.name));
p
}
Err(e) => {
log_warning(&format!(
"Failed to get GE-Proton10-18: {}, falling back to {}",
e, proton.name
));
proton.clone()
}
}
} else {
log_install(&format!("Using {} for dotnet installation (no workaround needed)", proton.name));
proton.clone()
}
}
/// Install all dependencies to a prefix.
/// For Proton 10+: Installs .NET Framework 4.0/4.8/4.8.1 using manual approach.
/// For Proton < 10: Skips .NET Framework installation entirely.
/// Always installs: registry settings, standard deps via winetricks, and .NET 9 SDK.
///
/// # Arguments
/// * `prefix_root` - The prefix path (ending in /pfx)
/// * `install_proton` - The proton to use for installation
/// * `ctx` - Task context for status updates and cancellation
/// * `start_progress` - Starting progress value (0.0-1.0)
/// * `end_progress` - Ending progress value (0.0-1.0)
/// * `user_proton` - The user's originally selected proton (for version checking)
pub fn install_all_dependencies(
prefix_root: &Path,
install_proton: &ProtonInfo,
ctx: &TaskContext,
start_progress: f32,
end_progress: f32,
) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
let dep_mgr = DependencyManager::new(ctx.winetricks_path.clone());
let tmp_dir = config.get_data_path().join("tmp");
fs::create_dir_all(&tmp_dir)?;
// Check if we need to install .NET Framework (only for Proton 10+)
let needs_dotnet = install_proton.is_proton_10_plus();
// Calculate progress ranges
let dotnet_steps = if needs_dotnet { 4 } else { 0 }; // remove_mono, dotnet40, dotnet48, dotnet481
let total_steps = dotnet_steps + 1 + STANDARD_DEPS.len() + 1; // + registry + deps + dotnet9sdk
let progress_per_step = (end_progress - start_progress) / total_steps as f32;
let mut current_step = 0;
let log_cb = {
let ctx = ctx.clone();
move |msg: String| ctx.log(msg)
};
// Helper to run winetricks commands
let run_winetricks = |cmd: &str, ctx: &TaskContext| -> Result<(), Box<dyn Error>> {
dep_mgr.run_winetricks_command(prefix_root, install_proton, cmd, {
let ctx = ctx.clone();
move |msg: String| ctx.log(msg)
}, ctx.cancel_flag.clone())
};
// =========================================================================
// .NET Framework Installation (Only for Proton 10+)
// =========================================================================
if needs_dotnet {
log_install(&format!(
"Proton 10+ detected ({}) - installing .NET Framework",
install_proton.name
));
// 1. Remove Mono and set winxp for dotnet40
ctx.set_status("Removing Wine Mono...".to_string());
log_install("Removing Wine Mono for .NET Framework installation");
if let Err(e) = run_winetricks("remove_mono", ctx) {
ctx.log(format!("Warning: remove_mono failed: {}", e));
log_warning(&format!("remove_mono failed: {}", e));
}
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
ctx.set_status("Setting Windows XP for dotnet40...".to_string());
log_install("Setting Windows version to winxp for dotnet40");
if let Err(e) = run_winetricks("winxp", ctx) {
log_warning(&format!("Failed to set winxp: {}", e));
}
current_step += 1;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 2. Install .NET Framework 4.0
ctx.set_status("Installing .NET Framework 4.0...".to_string());
log_install("Installing .NET Framework 4.0");
let dotnet40_installer = tmp_dir.join("dotNetFx40_Full_x86_x64.exe");
if !dotnet40_installer.exists() {
ctx.log("Downloading .NET Framework 4.0...".to_string());
log_download("Downloading .NET Framework 4.0...");
download_file(DOTNET40_URL, &dotnet40_installer)?;
log_download("Downloaded .NET Framework 4.0");
}
run_dotnet_installer(prefix_root, install_proton, &dotnet40_installer, ctx)?;
log_install(".NET Framework 4.0 installed successfully");
current_step += 1;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 3. Set win7 and install .NET Framework 4.8
// IMPORTANT: Use win7, NOT win11! The dotnet48 installer detects Windows 11
// as having .NET 4.8 built-in and skips installation.
ctx.set_status("Setting Windows 7 for dotnet48...".to_string());
log_install("Setting Windows version to win7 for dotnet48 (avoids built-in detection)");
if let Err(e) = run_winetricks("win7", ctx) {
log_warning(&format!("Failed to set win7: {}", e));
}
ctx.set_status("Installing .NET Framework 4.8...".to_string());
log_install("Installing .NET Framework 4.8");
let dotnet48_installer = tmp_dir.join("ndp48-x86-x64-allos-enu.exe");
if !dotnet48_installer.exists() {
ctx.log("Downloading .NET Framework 4.8...".to_string());
log_download("Downloading .NET Framework 4.8...");
download_file(DOTNET48_URL, &dotnet48_installer)?;
log_download("Downloaded .NET Framework 4.8");
}
run_dotnet_installer(prefix_root, install_proton, &dotnet48_installer, ctx)?;
log_install(".NET Framework 4.8 installed successfully");
current_step += 1;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 4. Install .NET Framework 4.8.1
ctx.set_status("Installing .NET Framework 4.8.1...".to_string());
log_install("Installing .NET Framework 4.8.1");
let dotnet481_installer = tmp_dir.join("ndp481-x86-x64-allos-enu.exe");
if !dotnet481_installer.exists() {
ctx.log("Downloading .NET Framework 4.8.1...".to_string());
log_download("Downloading .NET Framework 4.8.1...");
download_file(DOTNET481_URL, &dotnet481_installer)?;
log_download("Downloaded .NET Framework 4.8.1");
}
run_dotnet_installer(prefix_root, install_proton, &dotnet481_installer, ctx)?;
log_install(".NET Framework 4.8.1 installed successfully");
current_step += 1;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 5. Set win11 for normal operation
ctx.set_status("Setting Windows 11 for normal operation...".to_string());
log_install("Setting Windows version to win11");
if let Err(e) = run_winetricks("win11", ctx) {
log_warning(&format!("Failed to set win11: {}", e));
}
} else {
// Proton < 10: Skip .NET Framework installation
log_install(&format!(
"Proton < 10 detected ({}) - skipping .NET Framework installation",
install_proton.name
));
ctx.log("Skipping .NET Framework installation (not needed for Proton < 10)".to_string());
}
// =========================================================================
// Registry Settings
// =========================================================================
ctx.set_status("Applying Wine Registry Settings...".to_string());
ctx.log("Applying Wine Registry Settings...".to_string());
log_install("Applying Wine registry settings");
apply_wine_registry_settings(prefix_root, install_proton, &log_cb)?;
current_step += 1;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// =========================================================================
// 7. Standard Dependencies via winetricks
// =========================================================================
let total = STANDARD_DEPS.len();
for (i, dep) in STANDARD_DEPS.iter().enumerate() {
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
ctx.set_progress(start_progress + ((current_step + i) as f32 * progress_per_step));
ctx.set_status(format!(
"Installing dependency {}/{}: {}...",
i + 1,
total,
dep
));
log_install(&format!(
"Installing dependency {}/{}: {}",
i + 1,
total,
dep
));
let log_cb = {
let ctx = ctx.clone();
move |msg: String| ctx.log(msg)
};
if let Err(e) = dep_mgr.install_dependencies(
prefix_root,
install_proton,
&[dep],
log_cb,
ctx.cancel_flag.clone(),
) {
ctx.set_status(format!(
"Warning: Failed to install {}: {} (Continuing...)",
dep, e
));
ctx.log(format!("Warning: Failed to install {}: {}", dep, e));
log_warning(&format!("Failed to install {}: {}", dep, e));
} else {
log_install(&format!("Dependency {} installed successfully", dep));
}
}
current_step += total;
ctx.set_progress(start_progress + (current_step as f32 * progress_per_step));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// =========================================================================
// 8. .NET 9 SDK
// =========================================================================
ctx.set_status("Installing .NET 9 SDK...".to_string());
ctx.log("Installing .NET 9 SDK...".to_string());
log_install("Installing .NET 9 SDK...");
let dotnet9_installer = tmp_dir.join("dotnet9_sdk.exe");
if !dotnet9_installer.exists() {
ctx.log("Downloading .NET 9 SDK...".to_string());
log_download("Downloading .NET 9 SDK...");
download_file(DOTNET9_SDK_URL, &dotnet9_installer)?;
log_download("Downloaded .NET 9 SDK");
}
let install_proton_bin = install_proton.path.join("proton");
let compat_data = prefix_root.parent().unwrap_or(prefix_root);
let steam_path = detect_steam_path();
ctx.log("Running .NET 9 SDK installer...".to_string());
match std::process::Command::new(&install_proton_bin)
.arg("run")
.arg(&dotnet9_installer)
.arg("/quiet")
.arg("/norestart")
.env("WINEPREFIX", prefix_root)
.env("STEAM_COMPAT_DATA_PATH", compat_data)
.env("STEAM_COMPAT_CLIENT_INSTALL_PATH", &steam_path)
.env("PROTON_NO_XALIA", "1")
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.status()
{
Ok(status) => {
if status.success() {
ctx.log(".NET 9 SDK installed successfully".to_string());
log_install(".NET 9 SDK installed successfully");
} else {
ctx.log(format!(
".NET 9 SDK installer exited with code: {:?}",
status.code()
));
log_warning(&format!(
".NET 9 SDK installer exited with code: {:?}",
status.code()
));
}
}
Err(e) => {
ctx.log(format!("Failed to run .NET 9 SDK installer: {}", e));
log_error(&format!("Failed to run .NET 9 SDK installer: {}", e));
}
}
ctx.set_progress(end_progress);
Ok(())
}
/// Run a .NET Framework installer using Proton
fn run_dotnet_installer(
prefix_root: &Path,
proton: &ProtonInfo,
installer_path: &Path,
ctx: &TaskContext,
) -> Result<(), Box<dyn Error>> {
let proton_bin = proton.path.join("proton");
let wineserver = proton.path.join("files/bin/wineserver");
let compat_data = prefix_root.parent().unwrap_or(prefix_root);
let steam_path = detect_steam_path();
ctx.log(format!(
"Running installer: {}",
installer_path.file_name().unwrap_or_default().to_string_lossy()
));
// Kill any xalia processes that might be lingering
kill_xalia_processes();
let mut child = std::process::Command::new(&proton_bin)
.arg("run")
.arg(installer_path)
.arg("/q")
.arg("/norestart")
.env("WINEPREFIX", prefix_root)
.env("STEAM_COMPAT_DATA_PATH", compat_data)
.env("STEAM_COMPAT_CLIENT_INSTALL_PATH", &steam_path)
// Disable xalia - it requires .NET 4.8 to run, causing a chicken-and-egg problem
.env("PROTON_NO_XALIA", "1")
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.spawn()?;
// Poll for completion with cancel check
loop {
if ctx.is_cancelled() {
let _ = child.kill();
let _ = child.wait();
// Kill wineserver to stop all Wine processes
let _ = Command::new(&wineserver)
.arg("-k")
.env("WINEPREFIX", prefix_root)
.status();
kill_xalia_processes();
return Err("Installation Cancelled by User".into());
}
match child.try_wait() {
Ok(Some(status)) => {
if !status.success() {
let code = status.code();
// Exit code 236 = reboot required, which is expected
if code != Some(236) {
let msg = format!(
"Installer {} exited with code: {:?}",
installer_path.file_name().unwrap_or_default().to_string_lossy(),
code
);
log_warning(&msg);
ctx.log(format!("Warning: {}", msg));
}
}
break;
}
Ok(None) => {
// Still running - also kill any xalia that pops up
kill_xalia_processes();
std::thread::sleep(std::time::Duration::from_millis(200));
}
Err(e) => return Err(e.into()),
}
}
// Final cleanup of any xalia
kill_xalia_processes();
// Kill wineserver to simulate reboot (required after .NET installation)
ctx.log("Simulating reboot (killing wineserver)...".to_string());
let _ = Command::new(&wineserver)
.arg("-k")
.env("WINEPREFIX", prefix_root)
.status();
// Wait for wineserver to fully shut down
std::thread::sleep(std::time::Duration::from_secs(2));
Ok(())
}
/// Kill any xalia processes that might be running
fn kill_xalia_processes() {
// Use pkill to kill any xalia.exe processes
let _ = Command::new("pkill")
.arg("-f")
.arg("xalia")
.status();
}
/// Brief launch of an executable to initialize the prefix, then kill it.
/// This ensures the prefix is properly initialized before the user runs the mod manager.
///
/// # Arguments
/// * `exe_path` - Path to the executable to launch
/// * `prefix_root` - The prefix path (ending in /pfx)
/// * `install_proton` - The proton to use for the brief launch
/// * `ctx` - Task context for status updates
/// * `app_name` - Name of the application (for logging)
pub fn brief_launch_and_kill(
exe_path: &Path,
prefix_root: &Path,
install_proton: &ProtonInfo,
ctx: &TaskContext,
app_name: &str,
) {
ctx.set_status("Initializing prefix (brief launch)...".to_string());
log_install(&format!(
"Launching {} briefly to initialize prefix...",
app_name
));
let compat_data = prefix_root.parent().unwrap_or(prefix_root);
let steam_path = detect_steam_path();
let mut child = std::process::Command::new(install_proton.path.join("proton"))
.arg("run")
.arg(exe_path)
.env("WINEPREFIX", prefix_root)
.env("STEAM_COMPAT_DATA_PATH", compat_data)
.env("STEAM_COMPAT_CLIENT_INSTALL_PATH", &steam_path)
.env("PROTON_NO_XALIA", "1")
.spawn();
match &mut child {
Ok(process) => {
// Wait 8 seconds
std::thread::sleep(std::time::Duration::from_secs(8));
ctx.set_status("Killing prefix after initialization...".to_string());
log_install("Killing prefix after brief launch");
let _ = process.kill();
let _ = process.wait();
// Also kill any remaining wine processes in prefix
let _ = std::process::Command::new(install_proton.path.join("files/bin/wineserver"))
.arg("-k")
.env("WINEPREFIX", prefix_root)
.status();
}
Err(e) => {
log_warning(&format!(
"Failed to launch {} for initialization: {}",
app_name, e
));
}
}
}
// =============================================================================
// DPI Configuration
// =============================================================================
/// Common DPI presets with their percentage labels
pub const DPI_PRESETS: &[(u32, &str)] = &[
(96, "100%"),
(120, "125%"),
(144, "150%"),
(192, "200%"),
];
/// Apply DPI setting to a Wine prefix via registry
/// Uses: HKCU\Control Panel\Desktop\LogPixels
pub fn apply_dpi(
prefix_root: &Path,
proton: &ProtonInfo,
dpi_value: u32,
) -> Result<(), Box<dyn Error>> {
log_install(&format!("Applying DPI {} to prefix", dpi_value));
let wine_bin = proton.path.join("files/bin/wine");
let status = Command::new(&wine_bin)
.arg("reg")
.arg("add")
.arg(r"HKCU\Control Panel\Desktop")
.arg("/v")
.arg("LogPixels")
.arg("/t")
.arg("REG_DWORD")
.arg("/d")
.arg(dpi_value.to_string())
.arg("/f")
.env("WINEPREFIX", prefix_root)
.env("PROTON_NO_XALIA", "1")
.status()?;
if !status.success() {
return Err(format!("Failed to apply DPI setting: exit code {:?}", status.code()).into());
}
log_install(&format!("DPI {} applied successfully", dpi_value));
Ok(())
}
/// Get current DPI setting from a Wine prefix
#[allow(dead_code)]
pub fn get_current_dpi(prefix_root: &Path, proton: &ProtonInfo) -> Option<u32> {
let proton_bin = proton.path.join("proton");
let compat_data = prefix_root.parent().unwrap_or(prefix_root);
let steam_path = detect_steam_path();
let output = Command::new(&proton_bin)
.arg("run")
.arg("reg")
.arg("query")
.arg(r"HKCU\Control Panel\Desktop")
.arg("/v")
.arg("LogPixels")
.env("WINEPREFIX", prefix_root)
.env("STEAM_COMPAT_DATA_PATH", compat_data)
.env("STEAM_COMPAT_CLIENT_INSTALL_PATH", &steam_path)
.env("PROTON_NO_XALIA", "1")
.output()
.ok()?;
let stdout = String::from_utf8_lossy(&output.stdout);
// Parse output like: " LogPixels REG_DWORD 0x60" (0x60 = 96)
for line in stdout.lines() {
if line.contains("LogPixels") {
// Find the hex value
if let Some(hex_part) = line.split_whitespace().last() {
if let Some(stripped) = hex_part.strip_prefix("0x") {
if let Ok(val) = u32::from_str_radix(stripped, 16) {
return Some(val);
}
}
}
}
}
None
}
/// Launch a test application (winecfg, regedit, notepad, control) and return its PID
pub fn launch_dpi_test_app(
prefix_root: &Path,
proton: &ProtonInfo,
app_name: &str,
) -> Result<Child, Box<dyn Error>> {
let wine_bin = proton.path.join("files/bin/wine");
log_install(&format!(
"Launching {} with wine={:?} prefix={:?}",
app_name, wine_bin, prefix_root
));
if !wine_bin.exists() {
return Err(format!("Wine binary not found: {:?}", wine_bin).into());
}
if !prefix_root.exists() {
return Err(format!("Prefix not found: {:?}", prefix_root).into());
}
let child = Command::new(&wine_bin)
.arg(app_name)
.env("WINEPREFIX", prefix_root)
.env("PROTON_NO_XALIA", "1")
.spawn()?;
Ok(child)
}
/// Kill the wineserver for a prefix (terminates all Wine processes in that prefix)
pub fn kill_wineserver(prefix_root: &Path, proton: &ProtonInfo) {
log_install("Killing wineserver for prefix");
let wineserver_bin = proton.path.join("files/bin/wineserver");
let _ = Command::new(&wineserver_bin)
.arg("-k")
.env("WINEPREFIX", prefix_root)
.status();
}
/// Kill specific processes by PID
#[allow(dead_code)]
pub fn kill_processes(pids: &[u32]) {
for pid in pids {
let _ = Command::new("kill")
.arg("-9")
.arg(pid.to_string())
.status();
}
}
// Enderal Special Edition config files (embedded in binary)
const ENDERAL_SE_INI: &str = include_str!("../../resources/game_configs/enderal_se/Enderal.ini");
const ENDERAL_SE_PREFS_INI: &str =
include_str!("../../resources/game_configs/enderal_se/EnderalPrefs.ini");
/// Create game-specific folders in the Wine prefix
///
/// Some games crash on startup if their Documents/My Games folder doesn't exist.
/// This creates the necessary folder structure for all supported Bethesda/SureAI games.
/// Also copies premade config files for games that need them (e.g., Enderal SE).
pub fn create_game_folders(prefix_root: &Path) {
let users_dir = prefix_root.join("drive_c/users");
let mut username = "steamuser".to_string();
// Detect the correct user folder
if let Ok(entries) = fs::read_dir(&users_dir) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_string();
if name != "Public" && name != "root" {
username = name;
break;
}
}
}
let user_dir = users_dir.join(&username);
let documents_dir = user_dir.join("Documents");
let my_games_dir = documents_dir.join("My Games");
let appdata_local = user_dir.join("AppData/Local");
// Games that need Documents/My Games/<name>/
let my_games_folders = [
"Enderal",
"Enderal Special Edition",
"Fallout3",
"Fallout4",
"Fallout4VR",
"FalloutNV",
"Morrowind",
"Oblivion",
"Skyrim",
"Skyrim Special Edition",
"Skyrim VR",
"Starfield",
];
// Games that also need AppData/Local/<name>/
let appdata_folders = [
"Fallout3",
"Fallout4",
"FalloutNV",
"Oblivion",
"Skyrim",
"Skyrim Special Edition",
];
// Create My Games folders
for game in &my_games_folders {
let game_dir = my_games_dir.join(game);
if !game_dir.exists() {
if let Err(e) = fs::create_dir_all(&game_dir) {
log_warning(&format!("Failed to create My Games/{}: {}", game, e));
}
}
}
// Create AppData/Local folders
for game in &appdata_folders {
let game_dir = appdata_local.join(game);
if !game_dir.exists() {
if let Err(e) = fs::create_dir_all(&game_dir) {
log_warning(&format!("Failed to create AppData/Local/{}: {}", game, e));
}
}
}
// Create "My Documents" symlink if it doesn't exist (some games expect this)
let my_documents_link = user_dir.join("My Documents");
if !my_documents_link.exists() && fs::symlink_metadata(&my_documents_link).is_err() {
// Create relative symlink: "My Documents" -> "Documents"
if let Err(e) = std::os::unix::fs::symlink("Documents", &my_documents_link) {
log_warning(&format!("Failed to create My Documents symlink: {}", e));
}
}
// Copy Enderal Special Edition config files
// These fix the Enderal Launcher not working properly and set sensible defaults
let enderal_se_dir = my_games_dir.join("Enderal Special Edition");
if enderal_se_dir.exists() {
let enderal_ini = enderal_se_dir.join("Enderal.ini");
let enderal_prefs_ini = enderal_se_dir.join("EnderalPrefs.ini");
if !enderal_ini.exists() {
if let Err(e) = fs::write(&enderal_ini, ENDERAL_SE_INI) {
log_warning(&format!("Failed to write Enderal.ini: {}", e));
}
}
if !enderal_prefs_ini.exists() {
if let Err(e) = fs::write(&enderal_prefs_ini, ENDERAL_SE_PREFS_INI) {
log_warning(&format!("Failed to write EnderalPrefs.ini: {}", e));
}
}
}
log_install("Created game folders in prefix (Documents/My Games, AppData/Local)");
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/installers/vortex.rs | src/installers/vortex.rs | //! Vortex installation
use std::error::Error;
use std::fs;
use std::path::PathBuf;
use std::time::Duration;
use wait_timeout::ChildExt;
use super::{
brief_launch_and_kill, create_game_folders, fetch_latest_vortex_release, get_install_proton,
install_all_dependencies, TaskContext,
};
use crate::config::AppConfig;
use crate::logging::{log_download, log_error, log_install};
use crate::scripts::ScriptGenerator;
use crate::utils::{detect_steam_path, download_file};
use crate::wine::{PrefixManager, ProtonInfo};
pub fn install_vortex(
install_name: &str,
target_install_path: PathBuf,
proton: &ProtonInfo,
ctx: TaskContext,
) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
// Collision Check
let prefix_mgr = PrefixManager::new();
let base_name = format!("vortex_{}", install_name.replace(" ", "_").to_lowercase());
let unique_name = prefix_mgr.get_unique_prefix_name(&base_name);
let prefix_root = config.get_prefixes_path().join(&unique_name).join("pfx");
let install_dir = target_install_path;
log_install(&format!(
"Starting Vortex installation: {} -> {:?}",
install_name, install_dir
));
log_install(&format!("Using Proton: {}", proton.name));
// For Proton 10+, use GE-Proton10-18 for the entire installation process
let install_proton = get_install_proton(proton, &ctx);
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 1. Create Directories
ctx.set_status("Creating directories...".to_string());
ctx.set_progress(0.05);
fs::create_dir_all(&prefix_root)?;
fs::create_dir_all(&install_dir)?;
log_install(&format!("Created prefix at: {:?}", prefix_root));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 2. Download Vortex
ctx.set_status("Fetching Vortex release info...".to_string());
let release = fetch_latest_vortex_release()?;
ctx.log(format!("Found release: {}", release.tag_name));
// Find asset: Vortex-setup-*.exe (Case insensitive)
let asset = release.assets.iter().find(|a| {
let name = a.name.to_lowercase();
name.starts_with("vortex-setup") && name.ends_with(".exe")
});
if asset.is_none() {
ctx.log("Available assets:".to_string());
for a in &release.assets {
ctx.log(format!(" - {}", a.name));
}
log_error("No valid Vortex installer found (expected Vortex-setup-*.exe)");
return Err("No valid Vortex installer found (expected Vortex-setup-*.exe)".into());
}
let asset = asset.unwrap();
ctx.set_status(format!("Downloading {}...", asset.name));
ctx.set_progress(0.10);
log_download(&format!("Downloading Vortex: {}", asset.name));
let tmp_dir = config.get_data_path().join("tmp");
fs::create_dir_all(&tmp_dir)?;
let installer_path = tmp_dir.join(&asset.name);
download_file(&asset.browser_download_url, &installer_path)?;
log_download(&format!("Vortex downloaded to: {:?}", installer_path));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 3. Run Installer (Silent) - Use proton run, NOT wine directly
ctx.set_status("Running Vortex Installer...".to_string());
ctx.set_progress(0.15);
let install_proton_bin = install_proton.path.join("proton");
// Convert install_dir to Windows path Z:\...
let win_install_path = format!("Z:{}", install_dir.to_string_lossy().replace("/", "\\"));
// Get compat_data path (parent of prefix)
let compat_data = prefix_root.parent().unwrap_or(&prefix_root);
// Detect Steam path for proper DRM support
let steam_path = detect_steam_path();
ctx.log(format!("Using Steam path: {}", steam_path));
// Run installer with proper Proton environment (matching Python implementation)
let mut child = std::process::Command::new(&install_proton_bin)
.arg("run")
.arg(&installer_path)
.arg("/S")
.arg(format!("/D={}", win_install_path))
.env("WINEPREFIX", &prefix_root)
.env("STEAM_COMPAT_DATA_PATH", compat_data)
.env("STEAM_COMPAT_CLIENT_INSTALL_PATH", &steam_path)
// Reset LD_LIBRARY_PATH to prevent AppImage libs from breaking system binaries
.env(
"LD_LIBRARY_PATH",
"/usr/lib:/usr/lib/x86_64-linux-gnu:/lib:/lib/x86_64-linux-gnu",
)
.env("PROTON_NO_XALIA", "1")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()?;
// Wait with timeout (5 minutes, same as Python)
let timeout = Duration::from_secs(300);
let status = match child.wait_timeout(timeout)? {
Some(status) => status,
None => {
// Timeout - kill the process
let _ = child.kill();
log_error("Vortex installer timed out after 5 minutes");
return Err("Vortex installer timed out after 5 minutes".into());
}
};
if !status.success() {
// Capture stdout and stderr for better error reporting
let stdout = child.stdout.take()
.map(|mut s| {
let mut buf = String::new();
use std::io::Read;
let _ = s.read_to_string(&mut buf);
buf
})
.unwrap_or_default();
let stderr = child.stderr.take()
.map(|mut s| {
let mut buf = String::new();
use std::io::Read;
let _ = s.read_to_string(&mut buf);
buf
})
.unwrap_or_default();
ctx.log(format!("Installer exit code: {:?}", status.code()));
log_error(&format!(
"Vortex installer failed with exit code: {:?}",
status.code()
));
if !stdout.is_empty() {
ctx.log(format!("Installer stdout:\n{}", stdout));
log_error(&format!("Vortex installer stdout:\n{}", stdout));
}
if !stderr.is_empty() {
ctx.log(format!("Installer stderr:\n{}", stderr));
log_error(&format!("Vortex installer stderr:\n{}", stderr));
}
return Err(format!("Vortex installer failed with exit code: {:?}", status.code()).into());
}
// Wait for files to settle (same as Python)
std::thread::sleep(Duration::from_secs(2));
ctx.set_progress(0.20);
// 4. Install all dependencies (dotnet48, registry, standard deps, dotnet9sdk)
install_all_dependencies(&prefix_root, &install_proton, &ctx, 0.20, 0.90)?;
ctx.set_progress(0.92);
// 5. Brief launch to initialize prefix, then kill
// Vortex.exe location might vary slightly
let mut vortex_exe = install_dir.join("Vortex.exe");
if !vortex_exe.exists() {
// Check subdir
let sub = install_dir.join("Vortex").join("Vortex.exe");
if sub.exists() {
vortex_exe = sub;
} else {
log_error("Vortex.exe not found after installation");
return Err("Vortex.exe not found after installation".into());
}
}
brief_launch_and_kill(&vortex_exe, &prefix_root, &install_proton, &ctx, "Vortex");
ctx.set_progress(0.95);
// 6. Generate Scripts (using user's selected proton)
ctx.set_status("Generating launch scripts...".to_string());
let script_dir = prefix_root.parent().ok_or("Invalid prefix root")?;
let script_path = ScriptGenerator::generate_vortex_launch_script(
&prefix_root,
&vortex_exe,
&proton.path,
&install_dir,
script_dir,
)?;
let kill_script =
ScriptGenerator::generate_kill_prefix_script(&prefix_root, &proton.path, script_dir)?;
let reg_script = ScriptGenerator::generate_fix_game_registry_script(
&prefix_root,
&proton.path,
install_name,
script_dir,
)?;
// Generate NXM handler script (Vortex.exe handles NXM links directly)
let nxm_script = ScriptGenerator::generate_vortex_nxm_script(
&prefix_root,
&vortex_exe,
&proton.path,
script_dir,
)?;
// Create symlinks in the Vortex folder for easy access
let create_link = |target: &std::path::Path, link_name: &str| {
let link_path = install_dir.join(link_name);
if link_path.exists() || fs::symlink_metadata(&link_path).is_ok() {
let _ = fs::remove_file(&link_path);
}
let _ = std::os::unix::fs::symlink(target, &link_path);
};
create_link(&script_path, "Launch Vortex");
create_link(&kill_script, "Kill Vortex Prefix");
create_link(®_script, "Fix Game Registry");
create_link(&nxm_script, "Handle NXM");
log_install("Created shortcuts in Vortex folder: Launch Vortex, Kill Vortex Prefix, Fix Game Registry, Handle NXM");
if let Some(prefix_base) = prefix_root.parent() {
let backlink = prefix_base.join("manager_link");
if backlink.exists() || fs::symlink_metadata(&backlink).is_ok() {
let _ = fs::remove_file(&backlink);
}
let _ = std::os::unix::fs::symlink(&install_dir, &backlink);
}
// Create game folders (prevents crashes for games that require them)
create_game_folders(&prefix_root);
ctx.set_progress(1.0);
ctx.set_status("Vortex Installed Successfully!".to_string());
log_install(&format!("Vortex installation complete: {}", install_name));
Ok(())
}
/// Setup an existing Vortex installation with a new prefix
pub fn setup_existing_vortex(
install_name: &str,
existing_path: PathBuf,
proton: &ProtonInfo,
ctx: TaskContext,
) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
// Verify Vortex exists at path
let mut vortex_exe = existing_path.join("Vortex.exe");
if !vortex_exe.exists() {
// Check subdir
let sub = existing_path.join("Vortex").join("Vortex.exe");
if sub.exists() {
vortex_exe = sub;
} else {
log_error("Vortex.exe not found at selected path");
return Err("Vortex.exe not found at selected path".into());
}
}
log_install(&format!(
"Setting up existing Vortex: {} at {:?}",
install_name, existing_path
));
log_install(&format!("Using Proton: {}", proton.name));
// For Proton 10+, use GE-Proton10-18 for the entire installation process
let install_proton = get_install_proton(proton, &ctx);
// Collision Check
let prefix_mgr = PrefixManager::new();
let base_name = format!("vortex_{}", install_name.replace(" ", "_").to_lowercase());
let unique_name = prefix_mgr.get_unique_prefix_name(&base_name);
let prefix_root = config.get_prefixes_path().join(&unique_name).join("pfx");
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 1. Create Prefix Directory
ctx.set_status("Creating prefix...".to_string());
ctx.set_progress(0.05);
fs::create_dir_all(&prefix_root)?;
log_install(&format!("Created prefix at: {:?}", prefix_root));
if ctx.is_cancelled() {
return Err("Cancelled".into());
}
// 2. Install all dependencies (dotnet48, registry, standard deps, dotnet9sdk)
install_all_dependencies(&prefix_root, &install_proton, &ctx, 0.10, 0.85)?;
ctx.set_progress(0.88);
// 3. Brief launch to initialize prefix, then kill
brief_launch_and_kill(&vortex_exe, &prefix_root, &install_proton, &ctx, "Vortex");
ctx.set_progress(0.92);
// 4. Generate Scripts
ctx.set_status("Generating launch scripts...".to_string());
let script_dir = prefix_root.parent().ok_or("Invalid prefix root")?;
let script_path = ScriptGenerator::generate_vortex_launch_script(
&prefix_root,
&vortex_exe,
&proton.path,
&existing_path,
script_dir,
)?;
let kill_script =
ScriptGenerator::generate_kill_prefix_script(&prefix_root, &proton.path, script_dir)?;
let reg_script = ScriptGenerator::generate_fix_game_registry_script(
&prefix_root,
&proton.path,
install_name,
script_dir,
)?;
// Generate NXM handler script (Vortex.exe handles NXM links directly)
let nxm_script = ScriptGenerator::generate_vortex_nxm_script(
&prefix_root,
&vortex_exe,
&proton.path,
script_dir,
)?;
// Create symlinks in the Vortex folder for easy access
let create_link = |target: &std::path::Path, link_name: &str| {
let link_path = existing_path.join(link_name);
if link_path.exists() || fs::symlink_metadata(&link_path).is_ok() {
let _ = fs::remove_file(&link_path);
}
let _ = std::os::unix::fs::symlink(target, &link_path);
};
create_link(&script_path, "Launch Vortex");
create_link(&kill_script, "Kill Vortex Prefix");
create_link(®_script, "Fix Game Registry");
create_link(&nxm_script, "Handle NXM");
log_install("Created shortcuts in Vortex folder: Launch Vortex, Kill Vortex Prefix, Fix Game Registry, Handle NXM");
if let Some(prefix_base) = prefix_root.parent() {
let backlink = prefix_base.join("manager_link");
if backlink.exists() || fs::symlink_metadata(&backlink).is_ok() {
let _ = fs::remove_file(&backlink);
}
let _ = std::os::unix::fs::symlink(&existing_path, &backlink);
}
// Create game folders (prevents crashes for games that require them)
create_game_folders(&prefix_root);
ctx.set_progress(1.0);
ctx.set_status("Vortex Setup Complete!".to_string());
log_install(&format!("Vortex setup complete: {}", install_name));
Ok(())
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/wine/proton.rs | src/wine/proton.rs | //! Proton detection and GE-Proton management
use flate2::read::GzDecoder;
use serde::Deserialize;
use std::error::Error;
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
use tar::Archive;
use crate::config::AppConfig;
// ============================================================================
// Proton Info & Finder
// ============================================================================
#[derive(Debug, Clone, PartialEq)]
pub struct ProtonInfo {
pub name: String,
pub path: PathBuf,
pub version: String,
pub is_experimental: bool,
}
pub struct ProtonFinder {
pub steam_root: PathBuf,
pub nak_proton_ge_root: PathBuf,
pub nak_proton_cachyos_root: PathBuf,
}
impl ProtonFinder {
pub fn new() -> Self {
let home = std::env::var("HOME").expect("Failed to get HOME directory");
let config = AppConfig::load();
let data_path = config.get_data_path();
Self {
steam_root: PathBuf::from(format!("{}/.steam/steam", home)),
nak_proton_ge_root: data_path.join("ProtonGE"),
nak_proton_cachyos_root: data_path.join("ProtonCachyOS"),
}
}
pub fn find_all(&self) -> Vec<ProtonInfo> {
let mut protons = Vec::new();
// 1. Find Steam Proton Versions
protons.extend(self.find_steam_protons());
// 2. Find NaK Proton-GE Versions
protons.extend(self.find_ge_protons());
// 3. Find NaK Proton-CachyOS Versions
protons.extend(self.find_cachyos_protons());
protons
}
fn find_steam_protons(&self) -> Vec<ProtonInfo> {
let mut found = Vec::new();
let common_dir = self.steam_root.join("steamapps/common");
if let Ok(entries) = fs::read_dir(common_dir) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let name = entry.file_name().to_string_lossy().to_string();
// Check for valid Proton directory (must have 'proton' script)
if name.starts_with("Proton") && path.join("proton").exists() {
let is_experimental = name.contains("Experimental");
let version = if is_experimental {
"Experimental".to_string()
} else {
name.replace("Proton ", "")
};
found.push(ProtonInfo {
name: name.clone(),
path,
version,
is_experimental,
});
}
}
}
found
}
fn find_ge_protons(&self) -> Vec<ProtonInfo> {
let mut found = Vec::new();
// Canonicalize the root path to resolve symlinks
let ge_root =
fs::canonicalize(&self.nak_proton_ge_root).unwrap_or(self.nak_proton_ge_root.clone());
if let Ok(entries) = fs::read_dir(&ge_root) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let name = entry.file_name().to_string_lossy().to_string();
// Skip the 'active' symlink
if name == "active" {
continue;
}
// Must look like "GE-Proton..."
if name.starts_with("GE-Proton") {
// Canonicalize the full path to resolve any symlinks
let real_path = fs::canonicalize(&path).unwrap_or(path);
found.push(ProtonInfo {
name: name.clone(),
path: real_path,
version: name.clone(),
is_experimental: false,
});
}
}
}
found
}
fn find_cachyos_protons(&self) -> Vec<ProtonInfo> {
let mut found = Vec::new();
// Canonicalize the root path to resolve symlinks
let cachyos_root = fs::canonicalize(&self.nak_proton_cachyos_root)
.unwrap_or(self.nak_proton_cachyos_root.clone());
if let Ok(entries) = fs::read_dir(&cachyos_root) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let name = entry.file_name().to_string_lossy().to_string();
// Skip the 'active' symlink
if name == "active" {
continue;
}
// Must look like "proton-cachyos..."
if name.starts_with("proton-cachyos") {
// Canonicalize the full path to resolve any symlinks
let real_path = fs::canonicalize(&path).unwrap_or(path);
found.push(ProtonInfo {
name: name.clone(),
path: real_path,
version: name.clone(),
is_experimental: false,
});
}
}
}
found
}
}
/// Sets the 'active' symlink for the selected proton (at $DATA_PATH/ProtonGE/active)
pub fn set_active_proton(proton: &ProtonInfo) -> Result<(), Box<dyn std::error::Error>> {
let config = AppConfig::load();
let active_link = config.get_data_path().join("ProtonGE/active");
// Ensure parent directory exists
if let Some(parent) = active_link.parent() {
fs::create_dir_all(parent)?;
}
// Remove existing symlink if present
if active_link.exists() || fs::symlink_metadata(&active_link).is_ok() {
let _ = fs::remove_file(&active_link);
}
// Create new symlink pointing to the selected proton
std::os::unix::fs::symlink(&proton.path, &active_link)?;
Ok(())
}
// ============================================================================
// GE-Proton10-18 Workaround for dotnet48
// ============================================================================
// Valve's Proton Experimental and some other Proton versions fail to install
// .NET Framework 4.8 properly. GE-Proton10-18 is known to work reliably.
const DOTNET48_PROTON_VERSION: &str = "GE-Proton10-18";
const DOTNET48_PROTON_URL: &str = "https://github.com/GloriousEggroll/proton-ge-custom/releases/download/GE-Proton10-18/GE-Proton10-18.tar.gz";
impl ProtonInfo {
/// Parse the major and minor version numbers from the Proton name.
/// Returns (major, minor) tuple, (0, 0) if parsing fails.
pub fn version(&self) -> (u32, u32) {
// GE-Proton format: "GE-Proton10-18" -> (10, 18)
if self.name.starts_with("GE-Proton") {
if let Some(ver_part) = self.name.strip_prefix("GE-Proton") {
let parts: Vec<&str> = ver_part.split('-').collect();
let major = parts.first().and_then(|s| s.parse().ok()).unwrap_or(0);
let minor = parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(0);
return (major, minor);
}
return (0, 0);
}
// Valve Proton format: "Proton 9.0 (Beta)" or "Proton - Experimental"
if self.name.starts_with("Proton") {
// Experimental is always latest, assume 10+
if self.name.contains("Experimental") {
return (10, 0);
}
let version_str = self.name
.replace("Proton ", "")
.replace(" (Beta)", "");
let parts: Vec<&str> = version_str.split('.').collect();
let major = parts.first().and_then(|s| s.parse().ok()).unwrap_or(0);
let minor = parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(0);
return (major, minor);
}
// CachyOS format: "proton-cachyos-10.0" -> (10, 0)
if self.name.starts_with("proton-cachyos") {
if let Some(ver) = self.name.strip_prefix("proton-cachyos-") {
let parts: Vec<&str> = ver.split('.').collect();
let major = parts.first().and_then(|s| s.parse().ok()).unwrap_or(0);
let minor = parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(0);
return (major, minor);
}
return (0, 0);
}
(0, 0)
}
/// Check if this Proton version is 10 or higher (requires manual dotnet installation).
pub fn is_proton_10_plus(&self) -> bool {
self.version().0 >= 10
}
/// Check if this Proton version needs the GE-Proton10-18 workaround for dotnet installation.
/// ALL Proton 10+ versions use GE-Proton10-18 for dependency installation, then switch
/// to the user's selected Proton for runtime.
pub fn needs_ge_proton_workaround(&self) -> bool {
self.is_proton_10_plus()
}
}
/// Ensures GE-Proton10-18 is available for dotnet installation workaround.
/// Returns the ProtonInfo for GE-Proton10-18.
pub fn ensure_dotnet48_proton<S>(status_callback: S) -> Result<ProtonInfo, Box<dyn Error>>
where
S: Fn(&str) + Send + 'static,
{
let config = AppConfig::load();
let data_path = config.get_data_path();
let install_root = data_path.join("ProtonGE");
let proton_path = install_root.join(DOTNET48_PROTON_VERSION);
// Check if already installed
if proton_path.exists() && proton_path.join("proton").exists() {
status_callback(&format!("{} already available", DOTNET48_PROTON_VERSION));
return Ok(ProtonInfo {
name: DOTNET48_PROTON_VERSION.to_string(),
path: proton_path,
version: DOTNET48_PROTON_VERSION.to_string(),
is_experimental: false,
});
}
// Download GE-Proton10-18
status_callback(&format!(
"Downloading {} for dotnet compatibility...",
DOTNET48_PROTON_VERSION
));
let temp_dir = data_path.join("tmp");
fs::create_dir_all(&install_root)?;
fs::create_dir_all(&temp_dir)?;
let file_name = format!("{}.tar.gz", DOTNET48_PROTON_VERSION);
let temp_file_path = temp_dir.join(&file_name);
// Download
let response = ureq::get(DOTNET48_PROTON_URL)
.set("User-Agent", "NaK-Rust-Agent")
.call()?;
let mut file = fs::File::create(&temp_file_path)?;
let mut buffer = [0; 65536];
let mut reader = response.into_reader();
loop {
let bytes_read = reader.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
file.write_all(&buffer[..bytes_read])?;
}
// Extract
status_callback("Extracting archive...");
let tar_gz = fs::File::open(&temp_file_path)?;
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
archive.unpack(&install_root)?;
// Cleanup
fs::remove_file(temp_file_path)?;
status_callback(&format!("{} ready", DOTNET48_PROTON_VERSION));
Ok(ProtonInfo {
name: DOTNET48_PROTON_VERSION.to_string(),
path: proton_path,
version: DOTNET48_PROTON_VERSION.to_string(),
is_experimental: false,
})
}
// ============================================================================
// GitHub Release Types (shared for GE-Proton, MO2, Vortex)
// ============================================================================
#[derive(Deserialize, Debug, Clone)]
#[allow(dead_code)]
pub struct GithubRelease {
pub tag_name: String,
pub html_url: String,
pub assets: Vec<GithubAsset>,
}
#[derive(Deserialize, Debug, Clone)]
#[allow(dead_code)]
pub struct GithubAsset {
pub name: String,
pub browser_download_url: String,
pub size: u64,
}
// ============================================================================
// GE-Proton Download/Delete
// ============================================================================
/// Fetches the latest 100 releases from GitHub
pub fn fetch_ge_releases() -> Result<Vec<GithubRelease>, Box<dyn Error>> {
let releases: Vec<GithubRelease> = ureq::get(
"https://api.github.com/repos/GloriousEggroll/proton-ge-custom/releases?per_page=100",
)
.set("User-Agent", "NaK-Rust-Agent")
.call()?
.into_json()?;
Ok(releases)
}
/// Downloads and extracts a GE-Proton release with progress tracking
pub fn download_ge_proton<F, S>(
asset_url: String,
file_name: String,
progress_callback: F,
status_callback: S,
) -> Result<(), Box<dyn Error>>
where
F: Fn(u64, u64) + Send + 'static,
S: Fn(&str) + Send + 'static,
{
let config = AppConfig::load();
let data_path = config.get_data_path();
let install_root = data_path.join("ProtonGE");
let temp_dir = data_path.join("tmp");
fs::create_dir_all(&install_root)?;
fs::create_dir_all(&temp_dir)?;
let temp_file_path = temp_dir.join(&file_name);
// 1. Download
let response = ureq::get(&asset_url)
.set("User-Agent", "NaK-Rust-Agent")
.call()?;
let total_size = response
.header("Content-Length")
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let mut file = fs::File::create(&temp_file_path)?;
let mut buffer = [0; 65536]; // 64KB buffer for faster downloads
let mut downloaded: u64 = 0;
let mut reader = response.into_reader();
loop {
let bytes_read = reader.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
file.write_all(&buffer[..bytes_read])?;
downloaded += bytes_read as u64;
if total_size > 0 {
progress_callback(downloaded, total_size);
}
}
// 2. Extract
status_callback("Extracting archive (this may take a moment)...");
let tar_gz = fs::File::open(&temp_file_path)?;
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
// Extract to ~/NaK/ProtonGE/
archive.unpack(&install_root)?;
// 3. Cleanup
fs::remove_file(temp_file_path)?;
Ok(())
}
/// Deletes a GE-Proton version
pub fn delete_ge_proton(version_name: &str) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
let install_path = config.get_data_path().join("ProtonGE").join(version_name);
if install_path.exists() {
fs::remove_dir_all(install_path)?;
}
Ok(())
}
// ============================================================================
// CachyOS Proton Download/Delete
// ============================================================================
/// Fetches the latest releases from CachyOS Proton GitHub
pub fn fetch_cachyos_releases() -> Result<Vec<GithubRelease>, Box<dyn Error>> {
let releases: Vec<GithubRelease> =
ureq::get("https://api.github.com/repos/CachyOS/proton-cachyos/releases?per_page=50")
.set("User-Agent", "NaK-Rust-Agent")
.call()?
.into_json()?;
Ok(releases)
}
/// Downloads and extracts a CachyOS Proton release with progress tracking
/// Note: CachyOS uses .tar.xz format
pub fn download_cachyos_proton<F, S>(
asset_url: String,
file_name: String,
progress_callback: F,
status_callback: S,
) -> Result<(), Box<dyn Error>>
where
F: Fn(u64, u64) + Send + 'static,
S: Fn(&str) + Send + 'static,
{
use xz2::read::XzDecoder;
let config = AppConfig::load();
let data_path = config.get_data_path();
let install_root = data_path.join("ProtonCachyOS");
let temp_dir = data_path.join("tmp");
fs::create_dir_all(&install_root)?;
fs::create_dir_all(&temp_dir)?;
let temp_file_path = temp_dir.join(&file_name);
// 1. Download
let response = ureq::get(&asset_url)
.set("User-Agent", "NaK-Rust-Agent")
.call()?;
let total_size = response
.header("Content-Length")
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let mut file = fs::File::create(&temp_file_path)?;
let mut buffer = [0; 65536]; // 64KB buffer for faster downloads
let mut downloaded: u64 = 0;
let mut reader = response.into_reader();
loop {
let bytes_read = reader.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
file.write_all(&buffer[..bytes_read])?;
downloaded += bytes_read as u64;
if total_size > 0 {
progress_callback(downloaded, total_size);
}
}
// 2. Extract (.tar.xz)
status_callback("Extracting archive (this may take a moment)...");
let tar_xz = fs::File::open(&temp_file_path)?;
let tar = XzDecoder::new(tar_xz);
let mut archive = Archive::new(tar);
// Extract to ~/NaK/ProtonCachyOS/
archive.unpack(&install_root)?;
// 3. Cleanup
fs::remove_file(temp_file_path)?;
Ok(())
}
/// Deletes a CachyOS Proton version
pub fn delete_cachyos_proton(version_name: &str) -> Result<(), Box<dyn Error>> {
let config = AppConfig::load();
let install_path = config.get_data_path().join("ProtonCachyOS").join(version_name);
if install_path.exists() {
fs::remove_dir_all(install_path)?;
}
Ok(())
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/wine/deps.rs | src/wine/deps.rs | //! Dependency management via Winetricks
use std::error::Error;
use std::fs;
use std::io::BufRead;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use super::ProtonInfo;
use crate::config::AppConfig;
use crate::logging::{log_error, log_info, log_warning};
/// Kill any xalia processes that might be running (accessibility helper that needs .NET 4.8)
fn kill_xalia_processes() {
let _ = Command::new("pkill")
.arg("-f")
.arg("xalia")
.status();
}
// ============================================================================
// NaK Bin Directory (for bundled tools like cabextract)
// ============================================================================
/// Get the NaK data directory path from config
pub fn get_nak_real_path() -> PathBuf {
AppConfig::load().get_data_path()
}
/// Get the NaK bin directory path ($DATA_PATH/bin)
pub fn get_nak_bin_path() -> PathBuf {
get_nak_real_path().join("bin")
}
/// Resolve a path - with config-based paths, just canonicalize if possible
pub fn resolve_nak_path(path: &Path) -> PathBuf {
fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf())
}
/// Check if a command exists (either in system PATH or NaK bin)
pub fn check_command_available(cmd: &str) -> bool {
// Check system PATH first
if Command::new("which")
.arg(cmd)
.output()
.map(|o| o.status.success())
.unwrap_or(false)
{
return true;
}
// Check NaK bin directory
let nak_bin = get_nak_bin_path().join(cmd);
nak_bin.exists()
}
// ============================================================================
// Cabextract Download (for SteamOS/immutable systems)
// ============================================================================
/// URL for static cabextract binary (zip file)
const CABEXTRACT_URL: &str =
"https://github.com/SulfurNitride/NaK/releases/download/Cabextract/cabextract-linux-x86_64.zip";
/// Ensures cabextract is available (either system or downloaded)
pub fn ensure_cabextract() -> Result<PathBuf, Box<dyn Error>> {
// First check if system has cabextract
if Command::new("which")
.arg("cabextract")
.output()
.map(|o| o.status.success())
.unwrap_or(false)
{
// Return a marker that system cabextract is available
return Ok(PathBuf::from("cabextract"));
}
// Check if we already downloaded it
let bin_dir = get_nak_bin_path();
let cabextract_path = bin_dir.join("cabextract");
if cabextract_path.exists() {
return Ok(cabextract_path);
}
// Download cabextract zip
log_warning("System cabextract not found, downloading...");
fs::create_dir_all(&bin_dir)?;
let response = ureq::get(CABEXTRACT_URL).call().map_err(|e| {
format!(
"Failed to download cabextract: {}. Please install cabextract manually.",
e
)
})?;
// Download to temp zip file
let zip_path = bin_dir.join("cabextract.zip");
let mut zip_file = fs::File::create(&zip_path)?;
std::io::copy(&mut response.into_reader(), &mut zip_file)?;
// Extract using unzip command (available on most systems including SteamOS)
let status = Command::new("unzip")
.arg("-o")
.arg(&zip_path)
.arg("-d")
.arg(&bin_dir)
.status()?;
if !status.success() {
// Fallback: try with busybox unzip or python
let _ = Command::new("python3")
.arg("-c")
.arg(format!(
"import zipfile; zipfile.ZipFile('{}').extractall('{}')",
zip_path.display(),
bin_dir.display()
))
.status();
}
// Clean up zip file
let _ = fs::remove_file(&zip_path);
// Make executable
if cabextract_path.exists() {
let mut perms = fs::metadata(&cabextract_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&cabextract_path, perms)?;
log_info(&format!("cabextract downloaded to {:?}", cabextract_path));
Ok(cabextract_path)
} else {
log_error("Failed to extract cabextract from zip");
Err("Failed to extract cabextract from zip".into())
}
}
// ============================================================================
// Winetricks Download
// ============================================================================
/// Ensures winetricks is downloaded and available (stored in $DATA_PATH/bin)
pub fn ensure_winetricks() -> Result<PathBuf, Box<dyn Error>> {
let bin_dir = get_nak_bin_path();
let winetricks_path = bin_dir.join("winetricks");
fs::create_dir_all(&bin_dir)?;
// Check if it exists (we could add version checking later)
if !winetricks_path.exists() {
println!("Downloading winetricks...");
let response = ureq::get(
"https://raw.githubusercontent.com/Winetricks/winetricks/master/src/winetricks",
)
.call()?;
let mut file = fs::File::create(&winetricks_path)?;
std::io::copy(&mut response.into_reader(), &mut file)?;
// Make executable (chmod +x)
let mut perms = fs::metadata(&winetricks_path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(&winetricks_path, perms)?;
println!("Winetricks downloaded to {:?}", winetricks_path);
}
Ok(winetricks_path)
}
// ============================================================================
// Dependency Manager
// ============================================================================
pub struct DependencyManager;
impl DependencyManager {
pub fn new(_winetricks_path: PathBuf) -> Self {
Self
}
pub fn install_dependencies(
&self,
prefix_path: &Path,
proton: &ProtonInfo,
dependencies: &[&str],
status_callback: impl Fn(String) + Clone + Send + 'static,
cancel_flag: Arc<AtomicBool>,
) -> Result<(), Box<dyn Error>> {
// Get winetricks from NaK bin directory (handles symlinks properly)
let nak_bin = get_nak_bin_path();
let winetricks_real = nak_bin.join("winetricks");
if !winetricks_real.exists() {
return Err(format!("Winetricks not found at {:?}", winetricks_real).into());
}
// Prepare environment - resolve proton path through NaK symlink
let proton_real = resolve_nak_path(&proton.path);
let wine_bin = proton_real.join("files/bin/wine");
let wineserver = proton_real.join("files/bin/wineserver");
// Also resolve prefix path through NaK symlink
let prefix_real = resolve_nak_path(prefix_path);
// Include NaK bin directory for bundled tools (cabextract, winetricks, etc.)
let path_env = format!(
"{}:{}:{}",
proton_real.join("files/bin").to_string_lossy(),
nak_bin.to_string_lossy(),
std::env::var("PATH").unwrap_or_default()
);
if !wine_bin.exists() {
return Err(format!("Wine binary not found at {:?}", wine_bin).into());
}
status_callback(format!(
"Installing dependencies: {}",
dependencies.join(", ")
));
let mut cmd = Command::new(&winetricks_real);
cmd.arg("--unattended")
.args(dependencies)
.env("WINEPREFIX", &prefix_real)
.env("WINE", &wine_bin)
.env("WINESERVER", &wineserver)
.env("PATH", &path_env)
// Disable xalia - it requires .NET 4.8 to run, causing popups during .NET install
.env("PROTON_NO_XALIA", "1")
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = cmd.spawn().map_err(|e| {
format!(
"Failed to spawn winetricks: {} | winetricks={:?} wine={:?} prefix={:?}",
e, winetricks_real, wine_bin, prefix_real
)
})?;
// Stream Stdout
let stdout = child.stdout.take().unwrap();
let cb_out = status_callback.clone();
thread::spawn(move || {
let reader = std::io::BufReader::new(stdout);
for line in reader.lines().map_while(Result::ok) {
// Hard block Wine internal logs
if line.contains(":err:") || line.contains(":fixme:") || line.contains(":warn:") {
continue;
}
let l = line.to_lowercase();
if l.contains("executing")
|| l.contains("installing")
|| l.contains("downloading")
|| l.contains("completed")
|| l.contains("success")
|| l.contains("fail")
|| l.contains("error")
{
cb_out(format!("[WINETRICKS] {}", line));
}
}
});
// Stream Stderr
let stderr = child.stderr.take().unwrap();
let cb_err = status_callback.clone();
thread::spawn(move || {
let reader = std::io::BufReader::new(stderr);
for line in reader.lines().map_while(Result::ok) {
// Hard block Wine internal logs
if line.contains(":err:") || line.contains(":fixme:") || line.contains(":warn:") {
continue;
}
let l = line.to_lowercase();
if l.contains("executing")
|| l.contains("installing")
|| l.contains("downloading")
|| l.contains("completed")
|| l.contains("success")
|| l.contains("fail")
|| l.contains("error")
{
cb_err(format!("[WINETRICKS] {}", line));
}
}
});
// Loop to check for cancel or exit
loop {
if cancel_flag.load(Ordering::Relaxed) {
let _ = child.kill();
let _ = child.wait(); // Clean up zombie
// Kill wineserver
let _ = Command::new(&wineserver)
.arg("-k")
.env("WINEPREFIX", &prefix_real)
.status();
kill_xalia_processes();
return Err("Installation Cancelled by User".into());
}
match child.try_wait() {
Ok(Some(status)) => {
if !status.success() {
return Err(format!("Winetricks exited with code: {}", status).into());
}
break;
}
Ok(None) => {
// Still running - kill any xalia popups
kill_xalia_processes();
thread::sleep(Duration::from_millis(100));
}
Err(e) => return Err(e.into()),
}
}
status_callback("Dependencies installed successfully.".to_string());
Ok(())
}
pub fn run_winetricks_command(
&self,
prefix_path: &Path,
proton: &ProtonInfo,
verb: &str,
status_callback: impl Fn(String) + Clone + Send + 'static,
cancel_flag: Arc<AtomicBool>,
) -> Result<(), Box<dyn Error>> {
// Get winetricks from NaK bin directory (handles symlinks properly)
let nak_bin = get_nak_bin_path();
let winetricks_real = nak_bin.join("winetricks");
if !winetricks_real.exists() {
return Err(format!("Winetricks not found at {:?}", winetricks_real).into());
}
// Prepare environment - resolve proton path through NaK symlink
let proton_real = resolve_nak_path(&proton.path);
let wine_bin = proton_real.join("files/bin/wine");
let wineserver = proton_real.join("files/bin/wineserver");
// Also resolve prefix path through NaK symlink
let prefix_real = resolve_nak_path(prefix_path);
// Include NaK bin directory for bundled tools (cabextract, winetricks, etc.)
let path_env = format!(
"{}:{}:{}",
proton_real.join("files/bin").to_string_lossy(),
nak_bin.to_string_lossy(),
std::env::var("PATH").unwrap_or_default()
);
if !wine_bin.exists() {
return Err(format!("Wine binary not found at {:?}", wine_bin).into());
}
status_callback(format!("Running winetricks verb: {}", verb));
let mut cmd = Command::new(&winetricks_real);
cmd.arg("--unattended")
.arg(verb)
.env("WINEPREFIX", &prefix_real)
.env("WINE", &wine_bin)
.env("WINESERVER", &wineserver)
.env("PATH", &path_env)
// Disable xalia - it requires .NET 4.8 to run, causing popups during .NET install
.env("PROTON_NO_XALIA", "1")
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = cmd.spawn().map_err(|e| {
format!(
"Failed to spawn winetricks: {} | winetricks={:?} wine={:?}",
e, winetricks_real, wine_bin
)
})?;
// Stream Stdout (Simplified for single command)
let stdout = child.stdout.take().unwrap();
let cb_out = status_callback.clone();
thread::spawn(move || {
let reader = std::io::BufReader::new(stdout);
for line in reader.lines().map_while(Result::ok) {
cb_out(format!("[STDOUT] {}", line));
}
});
// Poll for completion with cancel check
loop {
if cancel_flag.load(Ordering::Relaxed) {
let _ = child.kill();
let _ = child.wait(); // Clean up zombie
// Kill wineserver to stop all Wine processes
let _ = Command::new(&wineserver)
.arg("-k")
.env("WINEPREFIX", &prefix_real)
.status();
kill_xalia_processes();
return Err("Installation Cancelled by User".into());
}
match child.try_wait() {
Ok(Some(status)) => {
if !status.success() {
return Err(format!("Winetricks verb '{}' failed", verb).into());
}
break;
}
Ok(None) => {
// Still running - kill any xalia popups
kill_xalia_processes();
thread::sleep(Duration::from_millis(100));
}
Err(e) => return Err(e.into()),
}
}
Ok(())
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/wine/runtime.rs | src/wine/runtime.rs | //! Steam Linux Runtime Management
//! Detects (and eventually downloads) the Steam Linux Runtime (Sniper).
use std::error::Error;
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
use xz2::read::XzDecoder;
use tar::Archive;
use crate::config::AppConfig;
pub fn find_steam_runtime_sniper() -> Option<PathBuf> {
// Check NaK standalone installation ONLY
let config = AppConfig::load();
let nak_runtime = config.get_data_path().join("Runtime/SteamLinuxRuntime_sniper");
if nak_runtime.join("_v2-entry-point").exists() {
return Some(nak_runtime);
}
None
}
pub fn get_entry_point() -> Option<PathBuf> {
find_steam_runtime_sniper().map(|p| p.join("_v2-entry-point"))
}
pub fn is_runtime_installed() -> bool {
find_steam_runtime_sniper().is_some()
}
pub fn download_runtime<F>(progress_callback: F) -> Result<PathBuf, Box<dyn Error>>
where
F: Fn(u64, u64) + Send + 'static,
{
let config = AppConfig::load();
let data_path = config.get_data_path();
let install_root = data_path.join("Runtime");
let temp_dir = data_path.join("tmp");
fs::create_dir_all(&install_root)?;
fs::create_dir_all(&temp_dir)?;
let url = "https://repo.steampowered.com/steamrt-images-sniper/snapshots/latest-public-stable/SteamLinuxRuntime_sniper.tar.xz";
let temp_file_path = temp_dir.join("SteamLinuxRuntime_sniper.tar.xz");
// 1. Download
let response = ureq::get(url)
.set("User-Agent", "NaK-Rust-Agent")
.call()?;
let total_size = response
.header("Content-Length")
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let mut file = fs::File::create(&temp_file_path)?;
let mut buffer = [0; 65536]; // 64KB buffer for faster downloads
let mut downloaded: u64 = 0;
let mut reader = response.into_reader();
loop {
let bytes_read = reader.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
file.write_all(&buffer[..bytes_read])?;
downloaded += bytes_read as u64;
if total_size > 0 {
progress_callback(downloaded, total_size);
}
}
// 2. Extract (.tar.xz)
// Signal that we're extracting (progress will stay at 100%)
progress_callback(total_size, total_size);
let tar_xz = fs::File::open(&temp_file_path)?;
let tar = XzDecoder::new(tar_xz);
let mut archive = Archive::new(tar);
archive.unpack(&install_root)?;
// 3. Cleanup
fs::remove_file(temp_file_path)?;
// Return the path
Ok(install_root.join("SteamLinuxRuntime_sniper"))
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/wine/mod.rs | src/wine/mod.rs | //! Wine/Proton related functionality
mod deps;
mod prefixes;
mod proton;
pub mod runtime;
pub use deps::{check_command_available, ensure_cabextract, ensure_winetricks, DependencyManager};
pub use prefixes::{NakPrefix, PrefixManager};
pub use proton::{delete_cachyos_proton, download_cachyos_proton, fetch_cachyos_releases};
pub use proton::{delete_ge_proton, download_ge_proton, ensure_dotnet48_proton, fetch_ge_releases};
pub use proton::{set_active_proton, GithubRelease, ProtonFinder, ProtonInfo};
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/wine/prefixes.rs | src/wine/prefixes.rs | //! Wine prefix management
use std::fs;
use std::path::PathBuf;
use crate::config::AppConfig;
#[derive(Clone)]
#[allow(dead_code)]
pub struct NakPrefix {
pub name: String,
pub path: PathBuf,
pub is_orphaned: bool,
}
#[allow(dead_code)]
pub struct PrefixManager {
prefixes_root: PathBuf,
}
#[allow(dead_code)]
impl PrefixManager {
pub fn new() -> Self {
let config = AppConfig::load();
Self {
prefixes_root: config.get_prefixes_path(),
}
}
/// Scans for NaK prefixes in $DATA_PATH/Prefixes
pub fn scan_prefixes(&self) -> Vec<NakPrefix> {
let mut prefixes = Vec::new();
if !self.prefixes_root.exists() {
return prefixes;
}
if let Ok(entries) = fs::read_dir(&self.prefixes_root) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_dir() {
continue;
}
let name = entry.file_name().to_string_lossy().to_string();
// Check for orphan status
let link = path.join("manager_link");
let mut is_orphaned = false;
if link.is_symlink() && !link.exists() {
// link.exists() follows the link; false if broken
is_orphaned = true;
}
prefixes.push(NakPrefix {
name,
path: path.join("pfx"), // Points to the pfx inside
is_orphaned,
});
}
}
// Sort by name
prefixes.sort_by(|a, b| a.name.cmp(&b.name));
prefixes
}
pub fn delete_prefix(&self, name: &str) -> std::io::Result<()> {
let path = self.prefixes_root.join(name);
if path.exists() {
fs::remove_dir_all(path)?;
}
Ok(())
}
pub fn get_unique_prefix_name(&self, base_name: &str) -> String {
if !self.prefixes_root.exists() {
return base_name.to_string();
}
let mut name = base_name.to_string();
let mut counter = 2;
while self.prefixes_root.join(&name).exists() {
name = format!("{}_{}", base_name, counter);
counter += 1;
}
name
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/game_fixer.rs | src/ui/game_fixer.rs | //! Game Modding page UI
//!
//! Allows users to find Steam/Heroic games and apply modding fixes to their Wine prefixes.
//! For users who don't use a dedicated mod manager like MO2 or Vortex.
use eframe::egui;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::thread;
use crate::app::MyApp;
use crate::games::{DetectedGame, GameFixer, GameSource};
use crate::installers::STANDARD_DEPS;
pub fn render_game_fixer(app: &mut MyApp, ui: &mut egui::Ui) {
ui.heading("Game Modding");
ui.label("Apply modding fixes directly to game prefixes - for modding without MO2 or Vortex.");
ui.separator();
// Check if we're currently applying fixes
let is_applying = *app.is_applying_game_fix.lock().unwrap();
if is_applying {
render_applying_status(app, ui);
return;
}
// Top bar with search, refresh, and proton selection
ui.horizontal(|ui| {
ui.label("Search:");
ui.add(egui::TextEdit::singleline(&mut app.game_search_query).desired_width(200.0));
ui.add_space(20.0);
if ui.button("Refresh Games").clicked() {
app.refresh_detected_games();
}
ui.add_space(10.0);
ui.label(format!("{} games found", app.detected_games.len()));
});
ui.add_space(5.0);
// Proton selection in a compact row
ui.horizontal(|ui| {
ui.label("Proton:");
let mut selected = app.config.selected_proton.clone();
egui::ComboBox::from_id_salt("game_fix_proton")
.width(200.0)
.selected_text(selected.as_deref().unwrap_or("Select Proton"))
.show_ui(ui, |ui| {
for p in &app.proton_versions {
ui.selectable_value(&mut selected, Some(p.name.clone()), &p.name);
}
});
if app.config.selected_proton != selected {
app.config.selected_proton = selected;
app.config.save();
}
ui.add_space(20.0);
ui.label(
egui::RichText::new("Applies same fixes as Mod Managers")
.size(11.0)
.color(egui::Color32::from_gray(150)),
);
});
ui.add_space(10.0);
ui.separator();
// Filter games by search query
let search_query = app.game_search_query.to_lowercase();
let filtered_games: Vec<DetectedGame> = app
.detected_games
.iter()
.filter(|g| search_query.is_empty() || g.name.to_lowercase().contains(&search_query))
.cloned()
.collect();
let total_games = app.detected_games.len();
let no_games = filtered_games.is_empty();
let no_results = no_games && total_games > 0;
// Games list
ui.strong("Detected Games");
ui.add_space(5.0);
egui::ScrollArea::vertical()
.id_salt("games_list")
.show(ui, |ui| {
if no_games {
if no_results {
ui.label("No games match your search.");
} else {
ui.label("No games detected. Make sure Steam or Heroic is installed.");
}
}
for game in &filtered_games {
render_game_card(app, ui, game);
}
});
}
fn render_game_card(app: &mut MyApp, ui: &mut egui::Ui, game: &DetectedGame) {
let has_prefix = game.has_prefix;
let source_text = match &game.source {
GameSource::Steam { app_id } => format!("Steam ({})", app_id),
GameSource::Heroic { store } => format!("Heroic ({})", store),
};
egui::Frame::group(ui.style())
.rounding(egui::Rounding::same(4.0))
.fill(egui::Color32::from_gray(28))
.stroke(egui::Stroke::new(1.0, egui::Color32::from_gray(50)))
.inner_margin(8.0)
.show(ui, |ui| {
ui.horizontal(|ui| {
ui.vertical(|ui| {
ui.strong(&game.name);
ui.label(
egui::RichText::new(&source_text)
.size(11.0)
.color(egui::Color32::from_gray(150)),
);
// Prefix status
if has_prefix {
ui.label(
egui::RichText::new("[OK] Has Wine prefix")
.size(11.0)
.color(egui::Color32::GREEN),
);
} else {
ui.label(
egui::RichText::new("[--] No prefix (run game first)")
.size(11.0)
.color(egui::Color32::YELLOW),
);
}
});
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
let can_fix = has_prefix
&& app.winetricks_path.lock().unwrap().is_some()
&& app.config.selected_proton.is_some();
if ui
.add_enabled(can_fix, egui::Button::new("Apply Fixes"))
.on_disabled_hover_text(if !has_prefix {
"Run the game first to create a Wine prefix"
} else if app.config.selected_proton.is_none() {
"Select a Proton version first"
} else {
"Winetricks not available"
})
.clicked()
{
apply_fixes_to_game(app, game.clone());
}
if ui
.button("Open")
.on_hover_text("Open install folder")
.clicked()
{
let _ = std::process::Command::new("xdg-open")
.arg(&game.install_path)
.spawn();
}
});
});
});
ui.add_space(4.0);
}
fn render_applying_status(app: &mut MyApp, ui: &mut egui::Ui) {
ui.vertical_centered(|ui| {
ui.add_space(50.0);
ui.heading("Applying Fixes...");
ui.add_space(20.0);
let status = app.game_fix_status.lock().unwrap().clone();
ui.label(&status);
ui.add_space(20.0);
// Show logs
let logs = app.game_fix_logs.lock().unwrap().clone();
egui::ScrollArea::vertical()
.max_height(300.0)
.show(ui, |ui| {
for log in &logs {
ui.label(
egui::RichText::new(log)
.size(12.0)
.color(egui::Color32::from_gray(180)),
);
}
});
ui.add_space(20.0);
if ui.button("Cancel").clicked() {
*app.is_applying_game_fix.lock().unwrap() = false;
}
});
}
fn apply_fixes_to_game(app: &mut MyApp, game: DetectedGame) {
// Get proton
let selected_proton = app.config.selected_proton.clone();
let proton = if let Some(ref name) = selected_proton {
app.proton_versions.iter().find(|p| &p.name == name).cloned()
} else {
None
};
let Some(proton) = proton else {
*app.game_fix_status.lock().unwrap() = "Error: No Proton version selected!".to_string();
return;
};
let winetricks = app.winetricks_path.lock().unwrap().clone();
let Some(winetricks_path) = winetricks else {
*app.game_fix_status.lock().unwrap() = "Error: Winetricks not available!".to_string();
return;
};
// Set state
*app.is_applying_game_fix.lock().unwrap() = true;
*app.game_fix_status.lock().unwrap() = format!("Preparing to fix {}...", game.name);
app.game_fix_logs.lock().unwrap().clear();
let is_applying = app.is_applying_game_fix.clone();
let status = app.game_fix_status.clone();
let logs = app.game_fix_logs.clone();
let cancel_flag = Arc::new(AtomicBool::new(false));
// Use STANDARD_DEPS but exclude dotnet48
let deps: Vec<&str> = STANDARD_DEPS
.iter()
.filter(|d| *d != &"dotnet48")
.copied()
.collect();
thread::spawn(move || {
let log_callback = {
let logs = logs.clone();
let status = status.clone();
move |msg: String| {
if let Ok(mut guard) = status.lock() {
*guard = msg.clone();
}
if let Ok(mut guard) = logs.lock() {
guard.push(msg);
}
}
};
match GameFixer::apply_fixes(
&game,
&proton,
&winetricks_path,
&deps,
true, // apply registry
log_callback,
cancel_flag,
) {
Ok(()) => {
if let Ok(mut guard) = status.lock() {
*guard = format!("Fixes applied successfully to {}!", game.name);
}
}
Err(e) => {
if let Ok(mut guard) = status.lock() {
*guard = format!("Error: {}", e);
}
}
}
// Keep the dialog open for a moment to show results
std::thread::sleep(std::time::Duration::from_secs(2));
if let Ok(mut guard) = is_applying.lock() {
*guard = false;
}
});
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/mod_managers.rs | src/ui/mod_managers.rs | //! Mod Managers page UI
use eframe::egui;
use std::sync::atomic::Ordering;
use std::thread;
use crate::app::{InstallWizard, ModManagerView, MyApp, WizardStep};
use crate::config::AppConfig;
use crate::installers::{
apply_dpi, apply_wine_registry_settings, install_mo2, install_vortex, kill_wineserver,
launch_dpi_test_app, setup_existing_mo2, setup_existing_vortex, TaskContext, DPI_PRESETS,
};
use crate::logging::log_action;
use crate::nxm::NxmHandler;
use crate::scripts::ScriptGenerator;
pub fn render_mod_managers(app: &mut MyApp, ui: &mut egui::Ui) {
let is_busy = *app.is_installing_manager.lock().unwrap();
ui.heading("Mod Managers & Prefixes");
ui.separator();
ui.add_space(10.0);
// Navigation / Breadcrumbs logic
if app.mod_manager_view != ModManagerView::Dashboard {
if ui.add_enabled(!is_busy, egui::Button::new("⬅ Back to Dashboard")).clicked() {
app.mod_manager_view = ModManagerView::Dashboard;
// Reset wizard state when leaving
app.install_wizard = InstallWizard::default();
}
ui.add_space(10.0);
}
// Main Content
egui::ScrollArea::vertical().show(ui, |ui| match app.mod_manager_view {
ModManagerView::Dashboard => render_dashboard(app, ui),
ModManagerView::PrefixManager => render_prefix_manager(app, ui),
ModManagerView::Mo2Manager => render_manager_view(app, ui, "MO2"),
ModManagerView::VortexManager => render_manager_view(app, ui, "Vortex"),
});
}
fn render_dashboard(app: &mut MyApp, ui: &mut egui::Ui) {
ui.label("Select a manager to configure:");
ui.add_space(10.0);
let button_size = egui::vec2(ui.available_width(), 80.0);
if ui
.add_sized(
button_size,
egui::Button::new(egui::RichText::new("🍷 Prefix Manager").heading()),
)
.clicked()
{
app.mod_manager_view = ModManagerView::PrefixManager;
}
ui.add_space(10.0);
if ui
.add_sized(
button_size,
egui::Button::new(egui::RichText::new("Mod Organizer 2").heading()),
)
.clicked()
{
app.mod_manager_view = ModManagerView::Mo2Manager;
app.install_wizard.manager_type = "MO2".to_string();
app.install_wizard.use_slr = app.config.use_steam_runtime;
}
ui.add_space(10.0);
if ui
.add_sized(
button_size,
egui::Button::new(egui::RichText::new("Vortex").heading()),
)
.clicked()
{
app.mod_manager_view = ModManagerView::VortexManager;
app.install_wizard.manager_type = "Vortex".to_string();
app.install_wizard.use_slr = app.config.use_steam_runtime;
}
}
fn render_prefix_manager(app: &mut MyApp, ui: &mut egui::Ui) {
ui.heading("Prefix Manager");
ui.label("Manage your Wine prefixes directly.");
ui.add_space(5.0);
ui.horizontal(|ui| {
if ui.button("Scan for New Prefixes").clicked() {
app.detected_prefixes = app.prefix_manager.scan_prefixes();
}
});
ui.add_space(10.0);
// Clone prefixes to safely iterate while mutating app later
let prefixes = app.detected_prefixes.clone();
if prefixes.is_empty() {
ui.add_space(10.0);
egui::Frame::none()
.fill(egui::Color32::from_rgb(40, 40, 50))
.rounding(egui::Rounding::same(8.0))
.inner_margin(15.0)
.show(ui, |ui| {
ui.label(egui::RichText::new("No prefixes detected!").size(16.0).strong());
ui.add_space(8.0);
ui.label("To get started, install MO2 or Vortex from the Mod Managers page.");
ui.add_space(5.0);
ui.label(
egui::RichText::new("You can have multiple MO2 instances and multiple Vortex instances.")
.color(egui::Color32::LIGHT_GRAY)
.size(12.0),
);
ui.add_space(10.0);
ui.horizontal(|ui| {
if ui.button("Install MO2").clicked() {
app.mod_manager_view = crate::app::ModManagerView::Mo2Manager;
app.install_wizard.manager_type = "MO2".to_string();
app.install_wizard.use_slr = app.config.use_steam_runtime;
}
if ui.button("Install Vortex").clicked() {
app.mod_manager_view = crate::app::ModManagerView::VortexManager;
app.install_wizard.manager_type = "Vortex".to_string();
app.install_wizard.use_slr = app.config.use_steam_runtime;
}
});
});
}
for prefix in &prefixes {
egui::Frame::group(ui.style())
.rounding(egui::Rounding::same(6.0))
.fill(egui::Color32::from_gray(32))
.stroke(egui::Stroke::new(1.0, egui::Color32::from_gray(60)))
.inner_margin(10.0)
.show(ui, |ui| {
// Header: Name + Orphan Status
ui.horizontal(|ui| {
ui.heading(&prefix.name);
if prefix.is_orphaned {
ui.add_space(10.0);
ui.colored_label(egui::Color32::RED, "Orphaned");
}
});
// Path
ui.label(egui::RichText::new(prefix.path.to_string_lossy()).color(egui::Color32::from_gray(180)).size(12.0));
ui.add_space(5.0);
// Actions
ui.horizontal(|ui| {
let winetricks_ready = app.winetricks_path.lock().unwrap().is_some();
if ui.add_enabled(winetricks_ready, egui::Button::new("Winetricks")).clicked() {
launch_winetricks(app, prefix.path.clone());
}
if ui.button("Open Folder").clicked() {
let _ = std::process::Command::new("xdg-open")
.arg(&prefix.path)
.spawn();
}
render_nxm_toggle(app, ui, prefix);
// Regenerate Script Button
let prefix_base = prefix.path.parent().unwrap();
let manager_link = prefix_base.join("manager_link");
let is_managed = manager_link.exists() || std::fs::symlink_metadata(&manager_link).is_ok();
// Fix Game Registry button (for MO2 global instances)
let registry_fix_script = prefix_base.join("game_registry_fix.sh");
if registry_fix_script.exists()
&& ui.button("Fix Registry").on_hover_text("Run game registry fix (for global instances)").clicked()
{
// Launch the registry fix script in a terminal
let script_path = registry_fix_script.to_string_lossy().to_string();
let _ = std::process::Command::new("sh")
.arg("-c")
.arg(format!(
"x-terminal-emulator -e '{}' || gnome-terminal -- '{}' || konsole -e '{}' || xterm -e '{}'",
script_path, script_path, script_path, script_path
))
.spawn();
}
if is_managed && ui.button("Regenerate Scripts").clicked() {
// Resolve target to pass correct exe path
let target = std::fs::read_link(&manager_link).unwrap_or(manager_link.clone());
regenerate_script(app, prefix, &target);
}
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if ui.button("Delete").on_hover_text("Delete Prefix").clicked() {
app.pending_prefix_delete = Some(prefix.name.clone());
}
});
});
// SLR toggle row (below main actions)
let prefix_base = prefix.path.parent().unwrap();
let manager_link = prefix_base.join("manager_link");
let is_managed = manager_link.exists() || std::fs::symlink_metadata(&manager_link).is_ok();
if is_managed {
let script_path = prefix_base.join("start.sh");
let current_slr = ScriptGenerator::script_uses_slr(&script_path);
ui.horizontal(|ui| {
ui.label("SLR:");
match current_slr {
Some(true) => {
ui.colored_label(
egui::Color32::from_rgb(100, 200, 100),
"Enabled",
);
if ui.small_button("Disable SLR").on_hover_text("Regenerate scripts without Steam Linux Runtime").clicked() {
let target = std::fs::read_link(&manager_link).unwrap_or(manager_link.clone());
regenerate_script_with_slr(app, prefix, &target, false);
}
}
Some(false) => {
ui.colored_label(
egui::Color32::from_rgb(255, 200, 100),
"Disabled",
);
let slr_available = crate::wine::runtime::is_runtime_installed();
if ui.add_enabled(slr_available, egui::Button::new("Enable SLR").small())
.on_hover_text(if slr_available {
"Regenerate scripts with Steam Linux Runtime"
} else {
"SLR not installed - download in Proton Picker"
})
.clicked()
{
let target = std::fs::read_link(&manager_link).unwrap_or(manager_link.clone());
regenerate_script_with_slr(app, prefix, &target, true);
}
}
None => {
ui.colored_label(
egui::Color32::GRAY,
"Unknown",
);
}
}
});
// DPI Settings row
render_dpi_settings(app, ui, prefix);
}
});
ui.add_space(8.0);
}
}
/// Render DPI settings for a prefix in the prefix manager
fn render_dpi_settings(app: &mut MyApp, ui: &mut egui::Ui, prefix: &crate::wine::NakPrefix) {
// Get proton for DPI operations
let proton = match app.config.selected_proton.as_ref() {
Some(name) => app.proton_versions.iter().find(|p| &p.name == name).cloned(),
None => None,
};
// Clone prefix path for use in closures
let prefix_path = prefix.path.clone();
let prefix_name = prefix.name.clone();
ui.horizontal(|ui| {
ui.label("DPI:");
if let Some(proton) = &proton {
// Display preset buttons with percentage and value
for (dpi_value, label) in DPI_PRESETS {
let btn_text = format!("{} ({})", label, dpi_value);
if ui.small_button(&btn_text).on_hover_text(format!("Set DPI to {}", dpi_value)).clicked() {
// Kill any running processes
kill_wineserver(&prefix_path, proton);
// Apply the new DPI
if let Err(e) = apply_dpi(&prefix_path, proton, *dpi_value) {
crate::logging::log_error(&format!("Failed to apply DPI {}: {}", dpi_value, e));
} else {
crate::logging::log_info(&format!("Set DPI to {} for {}", dpi_value, prefix_name));
}
}
}
// Custom DPI input
ui.add_space(10.0);
ui.add(
egui::TextEdit::singleline(&mut app.prefix_custom_dpi_input)
.desired_width(50.0)
.hint_text("DPI")
);
if ui.small_button("Set").on_hover_text("Apply custom DPI (72-480)").clicked() {
if let Ok(custom_dpi) = app.prefix_custom_dpi_input.trim().parse::<u32>() {
if (72..=480).contains(&custom_dpi) {
kill_wineserver(&prefix_path, proton);
if let Err(e) = apply_dpi(&prefix_path, proton, custom_dpi) {
crate::logging::log_error(&format!("Failed to apply DPI {}: {}", custom_dpi, e));
} else {
crate::logging::log_info(&format!("Set DPI to {} for {}", custom_dpi, prefix_name));
}
}
}
}
} else {
ui.colored_label(egui::Color32::GRAY, "No Proton selected");
}
});
}
fn render_manager_view(app: &mut MyApp, ui: &mut egui::Ui, manager_name: &str) {
ui.heading(format!("{} Manager", manager_name));
ui.label(format!("Manage your {} installations.", manager_name));
ui.add_space(10.0);
let is_busy = *app.is_installing_manager.lock().unwrap();
let status = app.install_status.lock().unwrap().clone();
let progress = *app.install_progress.lock().unwrap();
let proton_selected = app.config.selected_proton.is_some();
// Render Wizard
let action = render_install_wizard(
&mut app.install_wizard,
is_busy,
&status,
progress,
proton_selected,
manager_name,
ui
);
match action {
WizardAction::Start => start_installation(app),
WizardAction::Reset => {
// Reset wizard to start over
app.install_wizard = InstallWizard::default();
// Ensure we are back on the right manager page if reset changes things (it shouldn't here)
app.install_wizard.manager_type = manager_name.to_string();
// Initialize SLR toggle from config
app.install_wizard.use_slr = app.config.use_steam_runtime;
// Clear installation status to prevent state detection issues
*app.install_status.lock().unwrap() = String::new();
*app.install_progress.lock().unwrap() = 0.0;
app.logs.lock().unwrap().clear();
// Clear DPI test processes
app.dpi_test_processes.lock().unwrap().clear();
// Reset cancel flag so new installations can start
app.cancel_install.store(false, Ordering::Relaxed);
}
WizardAction::Cancel => {
app.cancel_install.store(true, Ordering::Relaxed);
*app.install_status.lock().unwrap() = "Cancelling...".to_string();
}
WizardAction::ApplyDpi(dpi_value) => {
handle_apply_dpi(app, dpi_value);
}
WizardAction::LaunchTestApp(app_name) => {
handle_launch_test_app(app, &app_name);
}
WizardAction::ConfirmDpi => {
handle_confirm_dpi(app);
}
WizardAction::None => {}
}
}
enum WizardAction {
None,
Start,
Reset,
Cancel,
ApplyDpi(u32), // Apply DPI value
LaunchTestApp(String), // Launch test app by name
ConfirmDpi, // Confirm DPI and proceed to finished
}
fn render_install_wizard(
wizard: &mut InstallWizard,
is_busy: bool,
status: &str,
_progress: f32,
proton_selected: bool,
manager_name: &str,
ui: &mut egui::Ui
) -> WizardAction {
let mut action = WizardAction::None;
// We use a mutable reference to action inside the closure to bubble up the event
let action_ref = &mut action;
if is_busy {
ui.vertical_centered(|ui| {
ui.add_space(20.0);
// Global progress bar is shown at the top of the window, so we just show status here
ui.label(egui::RichText::new("Installation in Progress...").heading());
ui.add_space(10.0);
ui.label(status);
ui.add_space(20.0);
if ui.button("Cancel").clicked() {
*action_ref = WizardAction::Cancel;
}
});
return action;
}
// Handle installation completion states
if !is_busy && wizard.step != WizardStep::Finished && wizard.step != WizardStep::Selection && wizard.step != WizardStep::DpiSetup {
if status.contains("Cancelled") {
// Installation was cancelled - reset wizard to allow retry
wizard.step = WizardStep::Selection;
wizard.last_install_error = None;
wizard.name.clear();
wizard.path.clear();
wizard.validation_error = None;
wizard.force_install = false;
} else if status.starts_with("Error:") {
// Installation failed with error
wizard.last_install_error = Some(status.to_string());
wizard.step = WizardStep::Finished;
} else if status.contains("Complete!") {
// Installation succeeded - move to DPI setup
wizard.last_install_error = None;
wizard.step = WizardStep::DpiSetup;
}
}
match wizard.step {
WizardStep::Selection => {
// Allow buttons to expand
let btn_size = egui::vec2(ui.available_width() / 2.0 - 10.0, 50.0);
ui.horizontal(|ui| {
if ui.add_sized(btn_size, egui::Button::new(format!("Install New {}", manager_name))).clicked() {
wizard.install_type = "New".to_string();
wizard.step = WizardStep::NameInput;
}
if ui.add_sized(btn_size, egui::Button::new(format!("Setup Existing {}", manager_name))).clicked() {
wizard.install_type = "Existing".to_string();
wizard.step = WizardStep::NameInput;
}
});
},
WizardStep::NameInput => {
ui.heading("Step 1: Instance Name");
ui.label("Give your installation a unique name. This will be used to identify the prefix folder.");
ui.add_space(5.0);
ui.text_edit_singleline(&mut wizard.name);
ui.add_space(10.0);
ui.horizontal(|ui| {
if ui.button("Back").clicked() {
wizard.step = WizardStep::Selection;
}
if !wizard.name.trim().is_empty() && ui.button("Next").clicked() {
wizard.step = WizardStep::PathInput;
wizard.force_install = false;
// Validate existing path if any
if !wizard.path.is_empty() {
validate_path(wizard);
} else {
wizard.validation_error = None;
}
}
});
},
WizardStep::PathInput => {
ui.heading("Step 2: Install Location");
if wizard.install_type == "New" {
ui.label("Select an EMPTY directory where you want to install.");
} else {
ui.label(format!("Select the existing {} directory.", manager_name));
}
ui.add_space(5.0);
ui.horizontal(|ui| {
let old_path = wizard.path.clone();
ui.text_edit_singleline(&mut wizard.path);
// Validate when path changes (either by typing or browsing)
if wizard.path != old_path {
validate_path(wizard);
}
if ui.button("Browse").clicked() {
if let Some(path) = rfd::FileDialog::new().pick_folder() {
wizard.path = path.to_string_lossy().to_string();
validate_path(wizard);
}
}
});
// Live Validation feedback
if let Some(err) = &wizard.validation_error {
ui.colored_label(egui::Color32::RED, err);
// If it's the "not empty" warning, offer override
if err.contains("not empty") {
ui.checkbox(&mut wizard.force_install, "I acknowledge this folder is not empty and files might be overwritten.");
}
} else if !wizard.path.is_empty() {
ui.colored_label(egui::Color32::GREEN, "Path looks good");
}
ui.add_space(15.0);
ui.separator();
ui.add_space(10.0);
// SLR toggle
let slr_available = crate::wine::runtime::is_runtime_installed();
ui.horizontal(|ui| {
ui.label("Use Steam Linux Runtime (SLR):");
if slr_available {
ui.checkbox(&mut wizard.use_slr, "");
if wizard.use_slr {
ui.colored_label(egui::Color32::from_rgb(100, 200, 100), "Enabled");
} else {
ui.colored_label(egui::Color32::from_rgb(255, 200, 100), "Disabled");
}
} else {
wizard.use_slr = false;
ui.colored_label(egui::Color32::GRAY, "Not installed");
ui.label("(Download in Proton Picker)");
}
});
ui.label(egui::RichText::new("SLR provides better compatibility but might cause unexpected issues.").small().weak());
// Show warning if no Proton selected
if !proton_selected {
ui.add_space(5.0);
ui.colored_label(
egui::Color32::from_rgb(255, 150, 100),
"⚠ Please select a Proton version in 'Proton Picker' first",
);
}
ui.add_space(10.0);
ui.horizontal(|ui| {
if ui.button("Back").clicked() {
wizard.step = WizardStep::NameInput;
}
// Proceed logic
let mut can_proceed = !wizard.path.trim().is_empty();
if let Some(err) = &wizard.validation_error {
if err.contains("not empty") {
// Allow if forced
if !wizard.force_install {
can_proceed = false;
}
} else {
// Block on other errors (like doesn't exist for Existing install)
can_proceed = false;
}
}
if ui.add_enabled(can_proceed && proton_selected, egui::Button::new(if wizard.install_type == "New" { "Start Installation" } else { "Setup Instance" }))
.on_disabled_hover_text(if !proton_selected { "Please select a Proton version in 'Proton Picker' first."} else { "Please resolve the path issues." })
.clicked()
{
*action_ref = WizardAction::Start;
}
});
},
WizardStep::DpiSetup => {
ui.heading("Step 3: DPI Scaling");
ui.label("Configure display scaling for your mod manager.");
ui.add_space(5.0);
ui.label(
egui::RichText::new("Select a DPI value and test with sample applications to find the best setting.")
.size(12.0)
.color(egui::Color32::GRAY),
);
ui.add_space(15.0);
// Current DPI display
let current_label = DPI_PRESETS.iter()
.find(|(v, _)| *v == wizard.selected_dpi)
.map(|(_, l)| format!("{} ({})", l, wizard.selected_dpi))
.unwrap_or_else(|| format!("Custom ({})", wizard.selected_dpi));
ui.label(format!("Current: {}", current_label));
ui.add_space(10.0);
// DPI Preset buttons
ui.label(egui::RichText::new("Presets:").strong());
ui.add_space(5.0);
ui.horizontal(|ui| {
for (dpi_value, label) in DPI_PRESETS {
let is_selected = wizard.selected_dpi == *dpi_value;
let btn_text = format!("{} ({})", label, dpi_value);
let button = egui::Button::new(&btn_text)
.fill(if is_selected {
egui::Color32::from_rgb(60, 100, 60)
} else {
egui::Color32::from_gray(45)
});
if ui.add_sized([100.0, 35.0], button).clicked() {
wizard.custom_dpi_input = dpi_value.to_string();
*action_ref = WizardAction::ApplyDpi(*dpi_value);
}
}
});
ui.add_space(10.0);
// Custom DPI input
ui.horizontal(|ui| {
ui.label("Custom DPI:");
let response = ui.add(
egui::TextEdit::singleline(&mut wizard.custom_dpi_input)
.desired_width(80.0)
.hint_text("e.g. 110")
);
if ui.button("Apply").clicked() || (response.lost_focus() && ui.input(|i| i.key_pressed(egui::Key::Enter))) {
if let Ok(custom_dpi) = wizard.custom_dpi_input.trim().parse::<u32>() {
if (72..=480).contains(&custom_dpi) {
*action_ref = WizardAction::ApplyDpi(custom_dpi);
}
}
}
ui.label(
egui::RichText::new("(72-480)")
.size(11.0)
.color(egui::Color32::GRAY),
);
});
ui.add_space(15.0);
ui.separator();
ui.add_space(10.0);
// Test Applications
ui.label(egui::RichText::new("Test Applications:").strong());
ui.label(
egui::RichText::new("Launch these to preview how your DPI setting looks.")
.size(11.0)
.color(egui::Color32::GRAY),
);
ui.add_space(5.0);
ui.horizontal(|ui| {
let test_apps = [
("winecfg", "Wine Config"),
("regedit", "Registry Editor"),
("notepad", "Notepad"),
("control", "Control Panel"),
];
for (app_cmd, app_label) in test_apps {
if ui.button(app_label).clicked() {
*action_ref = WizardAction::LaunchTestApp(app_cmd.to_string());
}
}
});
ui.add_space(10.0);
ui.label(
egui::RichText::new("Note: Changing DPI or confirming will close all test applications.")
.size(11.0)
.color(egui::Color32::from_rgb(255, 200, 100)),
);
ui.add_space(20.0);
ui.separator();
ui.add_space(10.0);
// Confirm button
ui.horizontal(|ui| {
if ui.button("Skip (Use 96/100%)").clicked() {
wizard.selected_dpi = 96;
*action_ref = WizardAction::ConfirmDpi;
}
ui.add_space(20.0);
let confirm_label = DPI_PRESETS.iter()
.find(|(v, _)| *v == wizard.selected_dpi)
.map(|(v, l)| format!("{} ({})", l, v))
.unwrap_or_else(|| format!("{}", wizard.selected_dpi));
if ui.add(egui::Button::new(format!("Confirm DPI: {}", confirm_label))
.fill(egui::Color32::from_rgb(60, 100, 60)))
.clicked()
{
*action_ref = WizardAction::ConfirmDpi;
}
});
},
WizardStep::Finished => {
ui.vertical_centered(|ui| {
ui.add_space(20.0);
if let Some(error_msg) = &wizard.last_install_error {
ui.heading(egui::RichText::new("Installation Failed!").color(egui::Color32::RED));
ui.add_space(10.0);
ui.label(format!("Error: {}", error_msg));
ui.add_space(10.0);
ui.label(egui::RichText::new("Please check the logs for more details:").strong());
let config = AppConfig::load();
ui.label(config.get_data_path().join("logs").to_string_lossy().to_string());
ui.add_space(20.0);
ui.horizontal(|ui| {
if ui.button("Try Again").clicked() {
*action_ref = WizardAction::Reset;
}
ui.add_space(10.0);
if ui.button("Return to Dashboard").clicked() {
*action_ref = WizardAction::Reset;
}
});
} else {
ui.heading(egui::RichText::new("Installation Successful!").color(egui::Color32::from_rgb(100, 255, 100)));
ui.add_space(15.0);
ui.label(format!("{} has been set up successfully.", manager_name));
ui.add_space(15.0);
// Build paths
let prefix_name = format!(
"{}_{}",
wizard.manager_type.to_lowercase(),
wizard.name.to_lowercase().replace(' ', "_")
);
let config = AppConfig::load();
let prefix_path = config.get_prefixes_path().join(&prefix_name);
let script_path = prefix_path.join("start.sh");
// How to launch
ui.label(egui::RichText::new("How to Launch").strong().size(14.0));
ui.add_space(4.0);
ui.label("Double-click the shortcut in your mod manager folder:");
ui.label(egui::RichText::new(format!("{}/Launch {}", wizard.path, manager_name)).monospace().color(egui::Color32::from_rgb(150, 200, 255)));
ui.add_space(6.0);
ui.label("Or run the script directly:");
ui.label(egui::RichText::new(script_path.to_string_lossy().to_string()).monospace().size(11.0).color(egui::Color32::from_rgb(150, 200, 255)));
ui.add_space(6.0);
ui.colored_label(
egui::Color32::from_rgb(255, 200, 100),
"Do NOT launch the .exe directly - it won't work correctly!",
);
ui.add_space(15.0);
// Installation paths
ui.label(egui::RichText::new("Installation Paths").strong());
ui.add_space(4.0);
ui.label(egui::RichText::new(format!("Prefix: {}", prefix_path.display())).monospace().size(11.0));
ui.label(egui::RichText::new(format!("{}: {}", manager_name, &wizard.path)).monospace().size(11.0));
ui.add_space(15.0);
// Steam Deck tip
ui.label(egui::RichText::new("Steam Deck / Game Mode").strong());
ui.add_space(4.0);
ui.label("Add the script above to Steam as a non-Steam game");
ui.label("to launch your mod manager from Game Mode.");
ui.add_space(20.0);
if ui.button("Return to Menu").clicked() {
*action_ref = WizardAction::Reset;
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | true |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/mod.rs | src/ui/mod.rs | //! UI components and rendering
mod game_fixer;
mod mod_managers;
mod pages;
mod proton_tools;
mod sidebar;
pub use game_fixer::render_game_fixer;
pub use mod_managers::render_mod_managers;
pub use pages::{render_first_run_setup, render_getting_started, render_marketplace, render_settings, render_updater};
pub use proton_tools::render_proton_tools;
pub use sidebar::render_sidebar;
use eframe::egui;
use std::sync::atomic::Ordering;
use crate::app::MyApp;
use crate::logging::{log_action, log_error, log_info};
use crate::wine::{delete_cachyos_proton, delete_ge_proton};
// ============================================================================
// UI Extension Trait
// ============================================================================
pub trait UiExt {
fn subheading(&mut self, text: &str);
}
impl UiExt for egui::Ui {
fn subheading(&mut self, text: &str) {
self.add_space(10.0);
self.label(egui::RichText::new(text).size(16.0).strong());
self.add_space(5.0);
}
}
// ============================================================================
// eframe::App Implementation
// ============================================================================
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
// Check for background download completion (using atomic flag)
if self.download_needs_refresh.swap(false, Ordering::Relaxed) {
self.should_refresh_proton = true;
}
if self.should_refresh_proton {
self.refresh_proton_versions();
self.should_refresh_proton = false;
}
// Global Style Tweaks
let mut style = (*ctx.style()).clone();
style.visuals.widgets.active.rounding = egui::Rounding::same(4.0);
style.visuals.widgets.inactive.rounding = egui::Rounding::same(4.0);
style.visuals.widgets.open.rounding = egui::Rounding::same(4.0);
ctx.set_style(style);
let is_busy = *self.is_installing_manager.lock().unwrap();
// Render confirmation dialogs
render_confirmation_dialogs(self, ctx);
// Hide sidebar during first-run setup
if self.current_page != crate::app::Page::FirstRunSetup {
egui::SidePanel::left("sidebar_panel")
.resizable(false)
.default_width(180.0)
.show(ctx, |ui| {
render_sidebar(self, ctx, ui, !is_busy);
});
}
egui::CentralPanel::default().show(ctx, |ui| {
// If busy, show overlay/status at top
if is_busy {
ui.vertical_centered(|ui| {
ui.add_space(10.0);
ui.heading("Installation in Progress...");
let status = self.install_status.lock().unwrap().clone();
ui.label(&status);
let p = *self.install_progress.lock().unwrap();
ui.add(egui::ProgressBar::new(p).animate(true));
if ui.button("Cancel").clicked() {
self.cancel_install.store(true, Ordering::Relaxed);
*self.install_status.lock().unwrap() = "Cancelling...".to_string();
}
ui.add_space(10.0);
ui.separator();
});
}
match self.current_page {
crate::app::Page::FirstRunSetup => {
render_first_run_setup(self, ui);
}
crate::app::Page::ModManagers => {
render_mod_managers(self, ui);
}
crate::app::Page::GameFixer => {
render_game_fixer(self, ui);
}
_ => {
ui.add_enabled_ui(!is_busy, |ui| match self.current_page {
crate::app::Page::GettingStarted => render_getting_started(self, ui),
crate::app::Page::Marketplace => render_marketplace(self, ui),
crate::app::Page::ProtonTools => render_proton_tools(self, ui),
crate::app::Page::Settings => render_settings(self, ui),
crate::app::Page::Updater => render_updater(self, ui),
_ => {}
});
}
}
});
}
}
/// Render confirmation dialogs for destructive actions
fn render_confirmation_dialogs(app: &mut MyApp, ctx: &egui::Context) {
// Prefix deletion confirmation
if let Some(prefix_name) = app.pending_prefix_delete.clone() {
egui::Window::new("Confirm Delete")
.collapsible(false)
.resizable(false)
.anchor(egui::Align2::CENTER_CENTER, [0.0, 0.0])
.show(ctx, |ui| {
ui.vertical_centered(|ui| {
ui.add_space(10.0);
ui.label(egui::RichText::new("Delete Prefix?").size(18.0).strong());
ui.add_space(10.0);
ui.label(format!("Are you sure you want to delete '{}'?", prefix_name));
ui.label(egui::RichText::new("This will permanently remove all data in this prefix.")
.color(egui::Color32::from_rgb(255, 150, 150)));
ui.add_space(15.0);
ui.horizontal(|ui| {
if ui.button("Cancel").clicked() {
app.pending_prefix_delete = None;
}
ui.add_space(20.0);
if ui.button(egui::RichText::new("Delete").color(egui::Color32::RED)).clicked() {
log_action(&format!("Confirmed delete prefix: {}", prefix_name));
// Check if this prefix is the active NXM handler before deleting
let prefix_path = app.config.get_prefixes_path().join(&prefix_name).join("pfx");
let prefix_path_str = prefix_path.to_string_lossy().to_string();
let is_active_nxm = app.config.active_nxm_prefix.as_ref() == Some(&prefix_path_str);
if let Err(e) = app.prefix_manager.delete_prefix(&prefix_name) {
log_error(&format!("Failed to delete prefix '{}': {}", prefix_name, e));
} else {
log_info(&format!("Prefix '{}' deleted successfully", prefix_name));
// If this was the active NXM prefix, clear the NXM handler state
if is_active_nxm {
log_info("Cleared active NXM handler (deleted prefix was active)");
app.config.active_nxm_prefix = None;
app.config.save();
// Also remove the active_nxm_game symlink
let nxm_link = app.config.get_data_path().join("active_nxm_game");
if nxm_link.exists() || std::fs::symlink_metadata(&nxm_link).is_ok() {
let _ = std::fs::remove_file(&nxm_link);
}
}
app.detected_prefixes = app.prefix_manager.scan_prefixes();
}
app.pending_prefix_delete = None;
}
});
ui.add_space(10.0);
});
});
}
// Proton deletion confirmation
if let Some((proton_name, proton_type)) = app.pending_proton_delete.clone() {
egui::Window::new("Confirm Uninstall")
.collapsible(false)
.resizable(false)
.anchor(egui::Align2::CENTER_CENTER, [0.0, 0.0])
.show(ctx, |ui| {
ui.vertical_centered(|ui| {
ui.add_space(10.0);
ui.label(egui::RichText::new("Uninstall Proton?").size(18.0).strong());
ui.add_space(10.0);
ui.label(format!("Are you sure you want to uninstall '{}'?", proton_name));
ui.add_space(15.0);
ui.horizontal(|ui| {
if ui.button("Cancel").clicked() {
app.pending_proton_delete = None;
}
ui.add_space(20.0);
if ui.button(egui::RichText::new("Uninstall").color(egui::Color32::RED)).clicked() {
log_action(&format!("Confirmed uninstall proton: {} ({})", proton_name, proton_type));
let result = if proton_type == "ge" {
delete_ge_proton(&proton_name)
} else {
delete_cachyos_proton(&proton_name)
};
if let Err(e) = result {
log_error(&format!("Failed to uninstall '{}': {}", proton_name, e));
} else {
log_info(&format!("Proton '{}' uninstalled successfully", proton_name));
app.should_refresh_proton = true;
}
app.pending_proton_delete = None;
}
});
ui.add_space(10.0);
});
});
}
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/sidebar.rs | src/ui/sidebar.rs | //! Sidebar navigation
use crate::app::{MyApp, Page};
use eframe::egui;
pub fn render_sidebar(app: &mut MyApp, _ctx: &egui::Context, ui: &mut egui::Ui, is_enabled: bool) {
ui.heading("NaK");
ui.add_space(10.0);
// Steam detection warning (critical)
if !app.steam_detected {
egui::Frame::none()
.fill(egui::Color32::from_rgb(80, 20, 20))
.rounding(egui::Rounding::same(4.0))
.inner_margin(8.0)
.show(ui, |ui| {
ui.colored_label(egui::Color32::RED, "STEAM NOT DETECTED");
ui.colored_label(
egui::Color32::from_rgb(255, 150, 150),
"NaK requires Steam to be installed.",
);
ui.colored_label(
egui::Color32::from_rgb(255, 150, 150),
"Please install Steam first.",
);
});
ui.add_space(5.0);
ui.separator();
} else if let Some(path) = &app.steam_path {
ui.small(format!("Steam: {}", path));
ui.add_space(2.0);
}
let missing = app.missing_deps.lock().unwrap();
if !missing.is_empty() {
ui.colored_label(egui::Color32::RED, "Missing Deps:");
for dep in missing.iter() {
ui.colored_label(egui::Color32::RED, format!("- {}", dep));
}
ui.small("Please install via OS package manager.");
ui.separator();
}
drop(missing); // Release lock early
// Update available notification
let update_available = app.update_info.lock().unwrap()
.as_ref()
.map(|i| i.is_update_available)
.unwrap_or(false);
if update_available {
egui::Frame::none()
.fill(egui::Color32::from_rgb(40, 80, 40))
.rounding(egui::Rounding::same(4.0))
.inner_margin(8.0)
.show(ui, |ui| {
ui.colored_label(egui::Color32::from_rgb(100, 255, 100), "UPDATE AVAILABLE");
if let Some(info) = app.update_info.lock().unwrap().as_ref() {
ui.colored_label(
egui::Color32::from_rgb(200, 255, 200),
format!("v{} -> v{}", info.current_version, info.latest_version),
);
}
if ui.small_button("View Update").clicked() {
app.current_page = Page::Updater;
}
});
ui.add_space(5.0);
ui.separator();
}
let navigation_buttons = [
(Page::GettingStarted, "Getting Started"),
(Page::ModManagers, "Mod Managers"),
(Page::GameFixer, "Game Modding"),
(Page::Marketplace, "Marketplace"),
(Page::ProtonTools, "Proton Picker"),
(Page::Settings, "Settings"),
(Page::Updater, if update_available { "Version (NEW!)" } else { "Version" }),
];
for (page, label) in navigation_buttons {
let is_selected = app.current_page == page;
let is_enabled_page = page != Page::Marketplace; // Disable Marketplace
// Highlight Version button if update available
let button = if page == Page::Updater && update_available {
egui::Button::new(egui::RichText::new(label).color(egui::Color32::from_rgb(100, 255, 100)))
.min_size(egui::vec2(150.0, 30.0))
} else {
egui::Button::new(label).min_size(egui::vec2(150.0, 30.0))
};
if ui
.add_enabled(!is_selected && is_enabled_page && is_enabled, button)
.clicked()
{
app.current_page = page;
}
ui.add_space(5.0);
}
ui.with_layout(egui::Layout::bottom_up(egui::Align::Min), |ui| {
ui.label(format!("v{}", env!("CARGO_PKG_VERSION")));
ui.separator();
});
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/pages.rs | src/ui/pages.rs | //! Simple pages: Getting Started, Marketplace, Settings, First Run Setup
use crate::app::{MyApp, Page};
use crate::config::{AppConfig, StorageManager};
use crate::logging::log_action;
use crate::wine::runtime;
use eframe::egui;
/// First-run setup page - shown on first launch to configure SLR preference
pub fn render_first_run_setup(app: &mut MyApp, ui: &mut egui::Ui) {
// Wrap in scroll area for fullscreen/small window support
egui::ScrollArea::vertical().show(ui, |ui| {
ui.vertical_centered(|ui| {
ui.add_space(20.0);
ui.heading(egui::RichText::new("Welcome to NaK!").size(28.0).strong());
ui.add_space(5.0);
ui.label(
egui::RichText::new("Linux Modding Helper")
.size(16.0)
.color(egui::Color32::GRAY),
);
ui.add_space(20.0);
ui.separator();
ui.add_space(15.0);
// SLR Configuration Section
ui.heading(egui::RichText::new("Runtime Configuration").size(20.0));
ui.add_space(10.0);
// Calculate max width based on available space
let max_width = ui.available_width().min(600.0);
// Explanation
egui::Frame::none()
.fill(egui::Color32::from_rgb(35, 35, 45))
.rounding(egui::Rounding::same(8.0))
.inner_margin(15.0)
.show(ui, |ui| {
ui.set_max_width(max_width);
ui.label(egui::RichText::new("What is Steam Linux Runtime (SLR)?").strong().size(14.0));
ui.add_space(8.0);
ui.label("SLR is a containerized environment that provides consistent libraries for running Windows applications via Proton.");
ui.add_space(10.0);
ui.colored_label(
egui::Color32::from_rgb(100, 200, 100),
"Pros:",
);
ui.label(" - Functions more like Steam does when running games");
ui.label(" - More consistent behavior across different Linux distributions");
ui.add_space(8.0);
ui.colored_label(
egui::Color32::from_rgb(255, 180, 100),
"Cons:",
);
ui.label(" - Might cause unexpected issues");
ui.add_space(8.0);
ui.colored_label(
egui::Color32::from_rgb(150, 150, 150),
"Requirements:",
);
ui.label(" - ~750MB disk space for runtime files");
});
ui.add_space(15.0);
// Important note
egui::Frame::none()
.fill(egui::Color32::from_rgb(50, 50, 30))
.rounding(egui::Rounding::same(8.0))
.inner_margin(12.0)
.show(ui, |ui| {
ui.set_max_width(max_width);
ui.label(
egui::RichText::new("Note: You can change this setting anytime in Settings > Advanced or per-prefix in the Prefix Manager. If you choose 'No' now and want to enable SLR later, you'll just need to wait for the download to complete.")
.color(egui::Color32::from_rgb(255, 220, 100)),
);
});
ui.add_space(20.0);
// Choice buttons
ui.label(egui::RichText::new("Would you like to use Steam Linux Runtime?").size(16.0));
ui.add_space(15.0);
let slr_installed = runtime::is_runtime_installed();
let is_downloading = *app.is_downloading.lock().unwrap();
// Center buttons using a centered horizontal layout
ui.horizontal(|ui| {
let button_width = 200.0;
let total_buttons_width = button_width * 2.0 + 20.0; // 2 buttons + spacing
let available = ui.available_width();
let padding = ((available - total_buttons_width) / 2.0).max(0.0);
ui.add_space(padding);
// Yes button
let yes_text = if slr_installed {
"Yes, use SLR (Already installed)"
} else if is_downloading {
"Yes, use SLR (Downloading...)"
} else {
"Yes, use SLR (Recommended)"
};
if ui.add_sized(
[button_width, 40.0],
egui::Button::new(egui::RichText::new(yes_text).size(13.0))
).clicked() {
log_action("First-run setup: User chose to use SLR");
app.config.use_steam_runtime = true;
app.config.first_run_completed = true;
app.config.save();
// Start SLR download if not installed
if !slr_installed && !is_downloading {
app.start_slr_download();
}
app.current_page = Page::GettingStarted;
}
ui.add_space(20.0);
// No button
if ui.add_sized(
[button_width, 40.0],
egui::Button::new(egui::RichText::new("No, use Direct Proton").size(13.0))
).clicked() {
log_action("First-run setup: User chose Direct Proton (no SLR)");
app.config.use_steam_runtime = false;
app.config.first_run_completed = true;
app.config.save();
app.current_page = Page::GettingStarted;
}
});
ui.add_space(15.0);
// Show download progress if downloading
if is_downloading {
let status = app.download_status.lock().unwrap().clone();
let progress = *app.download_progress.lock().unwrap();
ui.add_space(5.0);
ui.set_max_width(max_width);
ui.label(&status);
ui.add(egui::ProgressBar::new(progress).animate(true));
}
ui.add_space(20.0);
// Skip for advanced users
ui.label(
egui::RichText::new("Not sure? 'Yes' works for most users. You can always change this later.")
.size(12.0)
.color(egui::Color32::GRAY),
);
ui.add_space(20.0);
});
});
}
pub fn render_getting_started(app: &mut MyApp, ui: &mut egui::Ui) {
egui::ScrollArea::vertical().show(ui, |ui| {
// ============================================================
// Header
// ============================================================
ui.heading(egui::RichText::new("Welcome to NaK!").size(24.0).strong());
ui.label(
egui::RichText::new("Linux Modding Helper")
.size(14.0)
.color(egui::Color32::GRAY),
);
ui.separator();
ui.add_space(10.0);
ui.label("NaK makes it easy to run Windows modding tools on Linux using Proton.");
ui.label(
egui::RichText::new("Get started by following these three simple steps:")
.color(egui::Color32::LIGHT_GRAY),
);
ui.add_space(15.0);
// ============================================================
// Step 1: Pick a Proton Version
// ============================================================
ui.label(
egui::RichText::new("1. Pick a Proton Version")
.size(16.0)
.strong(),
);
ui.label(
egui::RichText::new(" NaK needs Proton to run Windows modding tools")
.size(12.0)
.color(egui::Color32::LIGHT_GRAY),
);
ui.add_space(5.0);
ui.label(" - Recommended: Download Proton-GE (best compatibility)");
ui.label(" - Alternative: Use system Proton if you prefer");
ui.add_space(5.0);
if ui.button("Open Proton Picker").clicked() {
log_action("Navigate to Proton Picker from Getting Started");
app.current_page = Page::ProtonTools;
}
ui.add_space(20.0);
ui.separator();
ui.add_space(10.0);
// ============================================================
// Step 2: Choose Your Modding Approach
// ============================================================
ui.label(
egui::RichText::new("2. Choose Your Modding Approach")
.size(16.0)
.strong(),
);
ui.label(
egui::RichText::new(" Pick the option that fits your modding style")
.size(12.0)
.color(egui::Color32::LIGHT_GRAY),
);
ui.add_space(10.0);
// Option A: Mod Managers
ui.horizontal(|ui| {
if ui.button("Mod Managers").clicked() {
log_action("Navigate to Mod Managers from Getting Started");
app.current_page = Page::ModManagers;
}
ui.label(
egui::RichText::new("Install MO2 or Vortex for full mod management")
.color(egui::Color32::LIGHT_GRAY),
);
});
ui.label(" Best for: Heavy modding with load order management, mod profiles, etc.");
ui.add_space(8.0);
// Option B: Game Modding
ui.horizontal(|ui| {
if ui.button("Game Modding").clicked() {
log_action("Navigate to Game Modding from Getting Started");
app.current_page = Page::GameFixer;
}
ui.label(
egui::RichText::new("Apply fixes directly to game prefixes")
.color(egui::Color32::LIGHT_GRAY),
);
});
ui.label(" Best for: Simple modding without a dedicated mod manager");
ui.add_space(20.0);
ui.separator();
ui.add_space(10.0);
// ============================================================
// Step 3: Check FAQ & Known Issues
// ============================================================
ui.label(
egui::RichText::new("3. Check FAQ & Known Issues")
.size(16.0)
.strong(),
);
ui.label(
egui::RichText::new(" Solutions to common problems and setup tips")
.size(12.0)
.color(egui::Color32::LIGHT_GRAY),
);
ui.add_space(5.0);
if ui.button("View FAQ & Known Issues").clicked() {
log_action("Open FAQ in browser");
let _ = std::process::Command::new("xdg-open")
.arg("https://github.com/SulfurNitride/NaK/blob/main/docs/FAQ.md")
.spawn();
}
ui.add_space(15.0);
// Support links
ui.horizontal(|ui| {
ui.label("Need help?");
if ui.link("GitHub Issues").clicked() {
let _ = std::process::Command::new("xdg-open")
.arg("https://github.com/SulfurNitride/NaK/issues")
.spawn();
}
ui.label("|");
if ui.link("Discord").clicked() {
let _ = std::process::Command::new("xdg-open")
.arg("https://discord.gg/9JWQzSeUWt")
.spawn();
}
ui.label("|");
if ui.link("Ko-Fi (Donate)").clicked() {
let _ = std::process::Command::new("xdg-open")
.arg("https://ko-fi.com/sulfurnitride")
.spawn();
}
});
});
}
pub fn render_marketplace(_app: &MyApp, ui: &mut egui::Ui) {
ui.heading("Marketplace");
ui.separator();
// Placeholder Grid for Marketplace items
egui::ScrollArea::vertical().show(ui, |ui| {
ui.horizontal_wrapped(|ui| {
for i in 1..=6 {
egui::Frame::group(ui.style())
.rounding(egui::Rounding::same(4.0))
.inner_margin(10.0)
.show(ui, |ui| {
ui.set_width(150.0);
ui.set_height(120.0);
ui.vertical_centered(|ui| {
ui.label(egui::RichText::new("[Plugin]").size(16.0));
ui.strong(format!("Plugin {}", i));
ui.small("Description of plugin...");
ui.add_space(5.0);
if ui.button("Install").clicked() {}
});
});
ui.add_space(10.0);
}
});
});
}
pub fn render_settings(app: &mut MyApp, ui: &mut egui::Ui) {
ui.heading("Settings");
ui.separator();
egui::ScrollArea::vertical().show(ui, |ui| {
// ============================================================
// Storage & Cache Section
// ============================================================
egui::CollapsingHeader::new("Storage & Cache")
.default_open(true)
.show(ui, |ui| {
ui.add_space(5.0);
// Cache storage info - only refresh every 5 seconds
let should_refresh = app.storage_info_last_update.elapsed().as_secs() > 5
|| app.cached_storage_info.is_none();
if should_refresh {
app.cached_storage_info = Some(StorageManager::get_storage_info(&app.config.get_data_path()));
app.storage_info_last_update = std::time::Instant::now();
}
let storage_info = app.cached_storage_info.clone().unwrap_or_default();
// Refresh button
ui.horizontal(|ui| {
ui.label(egui::RichText::new("Storage Info").strong());
if ui.small_button("Refresh").clicked() {
app.cached_storage_info = Some(StorageManager::get_storage_info(&app.config.get_data_path()));
app.storage_info_last_update = std::time::Instant::now();
}
});
// --- Location Info ---
ui.horizontal(|ui| {
ui.label("Data Path:");
ui.monospace(&storage_info.data_path);
});
ui.label(
egui::RichText::new("Config stored in: ~/.config/nak/")
.size(11.0)
.color(egui::Color32::GRAY),
);
// --- Usage Breakdown ---
if storage_info.exists {
ui.add_space(5.0);
ui.horizontal(|ui| {
ui.label("Total Used:");
ui.strong(format!("{:.2} GB", storage_info.used_space_gb));
ui.label(" | Free:");
ui.strong(format!("{:.2} GB", storage_info.free_space_gb));
});
ui.add_space(5.0);
ui.indent("storage_breakdown", |ui| {
ui.horizontal(|ui| {
ui.label("• Prefixes:");
ui.strong(format!("{:.2} GB", storage_info.prefixes_size_gb));
});
ui.horizontal(|ui| {
ui.label("• Proton Versions:");
ui.strong(format!("{:.2} GB", storage_info.proton_size_gb));
});
ui.horizontal(|ui| {
ui.label("• Cache:");
ui.strong(format!("{:.2} GB", storage_info.cache_size_gb));
});
if storage_info.other_size_gb > 0.01 {
ui.horizontal(|ui| {
ui.label("• Other:");
ui.strong(format!("{:.2} GB", storage_info.other_size_gb))
.on_hover_text("Includes logs, tmp files, and binaries");
});
}
});
}
ui.add_space(15.0);
ui.separator();
ui.add_space(10.0);
// --- Cache Controls ---
ui.label(egui::RichText::new("Cache Configuration").strong());
let mut cache_enabled = app.cache_config.cache_enabled;
if ui.checkbox(&mut cache_enabled, "Enable Caching").changed() {
app.cache_config.cache_enabled = cache_enabled;
app.cache_config.save();
log_action(&format!("Cache enabled: {}", cache_enabled));
}
ui.add_enabled_ui(cache_enabled, |ui| {
ui.indent("cache_options", |ui| {
let mut cache_deps = app.cache_config.cache_dependencies;
if ui.checkbox(&mut cache_deps, "Cache Dependencies (~1.7GB)").changed() {
app.cache_config.cache_dependencies = cache_deps;
app.cache_config.save();
}
let mut cache_mo2 = app.cache_config.cache_mo2;
if ui.checkbox(&mut cache_mo2, "Cache MO2 Downloads").changed() {
app.cache_config.cache_mo2 = cache_mo2;
app.cache_config.save();
}
let mut cache_vortex = app.cache_config.cache_vortex;
if ui.checkbox(&mut cache_vortex, "Cache Vortex Downloads").changed() {
app.cache_config.cache_vortex = cache_vortex;
app.cache_config.save();
}
});
});
ui.add_space(5.0);
if ui.button("Clear Cache").clicked() {
log_action("Clear cache clicked");
if let Err(e) = app.cache_config.clear_cache(&app.config) {
eprintln!("Failed to clear cache: {}", e);
}
}
ui.add_space(15.0);
ui.separator();
ui.add_space(10.0);
// --- Migration Controls ---
ui.label(egui::RichText::new("Move Installation").strong());
ui.label("Move the entire NaK folder to a different drive (e.g., SSD).");
ui.horizontal(|ui| {
ui.add(
egui::TextEdit::singleline(&mut app.migration_path_input)
.hint_text("/path/to/new/location")
.desired_width(300.0),
);
if ui.button("Browse").clicked() {
log_action("Browse for migration path clicked");
if let Some(path) = rfd::FileDialog::new().pick_folder() {
app.migration_path_input = path.to_string_lossy().to_string();
}
}
});
ui.horizontal(|ui| {
let can_move = !app.migration_path_input.trim().is_empty();
if ui.add_enabled(can_move, egui::Button::new("Move NaK Here")).clicked() {
log_action(&format!("Move NaK to: {}", app.migration_path_input));
let path = std::path::PathBuf::from(app.migration_path_input.trim());
match StorageManager::move_data(&mut app.config, &path) {
Ok(msg) => {
crate::logging::log_info(&msg);
app.migration_path_input.clear();
// Force refresh storage info
app.cached_storage_info = None;
// Refresh prefix manager and detected prefixes (paths changed)
app.prefix_manager = crate::wine::PrefixManager::new();
app.detected_prefixes = app.prefix_manager.scan_prefixes();
}
Err(e) => {
crate::logging::log_error(&format!("Migration failed: {}", e));
}
}
}
});
});
ui.add_space(10.0);
ui.separator();
ui.add_space(10.0);
// ============================================================
// Advanced Settings Section
// ============================================================
egui::CollapsingHeader::new("Advanced Settings")
.default_open(false)
.show(ui, |ui| {
ui.add_space(5.0);
// --- Steam Linux Runtime Toggle ---
ui.label(egui::RichText::new("Steam Linux Runtime (Pressure Vessel)").strong());
ui.label(
egui::RichText::new("Container runtime that provides consistent library environment")
.size(11.0)
.color(egui::Color32::GRAY),
);
ui.add_space(5.0);
let mut use_slr = app.config.use_steam_runtime;
if ui.checkbox(&mut use_slr, "Use Steam Linux Runtime").changed() {
app.config.use_steam_runtime = use_slr;
app.config.save();
log_action(&format!("Steam Linux Runtime: {}", if use_slr { "enabled" } else { "disabled" }));
}
ui.indent("slr_info", |ui| {
if use_slr {
ui.colored_label(
egui::Color32::from_rgb(100, 200, 100),
"Enabled - Launch scripts use containerized runtime",
);
ui.label(
egui::RichText::new("Recommended for most systems")
.size(11.0)
.color(egui::Color32::GRAY),
);
} else {
ui.colored_label(
egui::Color32::from_rgb(255, 200, 100),
"Disabled - Launch scripts use direct Proton execution",
);
ui.label(
egui::RichText::new("Use if you experience SLR-related errors")
.size(11.0)
.color(egui::Color32::GRAY),
);
}
});
ui.add_space(10.0);
// --- Regenerate Scripts Button ---
ui.label(egui::RichText::new("Regenerate Launch Scripts").strong());
ui.label(
egui::RichText::new("Update all existing installation scripts with current settings")
.size(11.0)
.color(egui::Color32::GRAY),
);
ui.add_space(5.0);
if ui.button("Regenerate All Scripts").clicked() {
log_action("Regenerate all scripts clicked");
match crate::scripts::regenerate_all_prefix_scripts() {
Ok(count) => {
crate::logging::log_info(&format!("Regenerated scripts for {} prefix(es)", count));
}
Err(e) => {
crate::logging::log_error(&format!("Failed to regenerate scripts: {}", e));
}
}
}
ui.label(
egui::RichText::new("Use after changing Steam Linux Runtime setting")
.size(11.0)
.color(egui::Color32::GRAY),
);
ui.add_space(15.0);
ui.separator();
ui.add_space(10.0);
// --- Pre-Release Updates Toggle ---
ui.label(egui::RichText::new("Pre-Release Updates").strong());
ui.label(
egui::RichText::new("Get notified about pre-release versions (beta/testing builds)")
.size(11.0)
.color(egui::Color32::GRAY),
);
ui.add_space(5.0);
});
ui.add_space(10.0);
ui.separator();
ui.add_space(10.0);
// ============================================================
// About Section
// ============================================================
egui::CollapsingHeader::new("About")
.default_open(false)
.show(ui, |ui| {
ui.label("NaK - Linux Modding Helper (Rust Edition)");
ui.label(format!("Version: {}", env!("CARGO_PKG_VERSION")));
ui.add_space(5.0);
ui.hyperlink_to("GitHub Repository", "https://github.com/SulfurNitride/NaK");
ui.add_space(10.0);
ui.horizontal(|ui| {
if ui.button("Open NaK Folder").clicked() {
let config = AppConfig::load();
let nak_path = config.get_data_path();
let _ = std::process::Command::new("xdg-open").arg(&nak_path).spawn();
}
if ui.button("Open Logs Folder").clicked() {
let config = AppConfig::load();
let logs_path = config.get_data_path().join("logs");
let _ = std::process::Command::new("xdg-open").arg(&logs_path).spawn();
}
});
});
});
}
pub fn render_updater(app: &mut MyApp, ui: &mut egui::Ui) {
ui.heading("Version");
ui.separator();
egui::ScrollArea::vertical().show(ui, |ui| {
// Current version info
ui.add_space(10.0);
ui.label(egui::RichText::new("NaK - Linux Modding Helper").size(18.0));
ui.label(format!("Current Version: {}", env!("CARGO_PKG_VERSION")));
ui.add_space(10.0);
ui.separator();
ui.add_space(10.0);
// Update status
let is_checking = *app.is_checking_update.lock().unwrap();
let is_installing = *app.is_installing_update.lock().unwrap();
let update_installed = *app.update_installed.lock().unwrap();
let update_info = app.update_info.lock().unwrap().clone();
let update_error = app.update_error.lock().unwrap().clone();
// Show restart prompt if update was installed
if update_installed {
egui::Frame::none()
.fill(egui::Color32::from_rgb(40, 80, 40))
.rounding(egui::Rounding::same(4.0))
.inner_margin(10.0)
.show(ui, |ui| {
ui.colored_label(
egui::Color32::from_rgb(150, 255, 150),
"Update installed successfully!",
);
ui.label("Please restart NaK to use the new version.");
ui.add_space(5.0);
if ui.button("Restart Now").clicked() {
let exe = std::env::current_exe().ok();
if let Some(exe_path) = exe {
let _ = std::process::Command::new(&exe_path).spawn();
std::process::exit(0);
}
}
});
ui.add_space(10.0);
}
if is_checking {
ui.horizontal(|ui| {
ui.spinner();
ui.label("Checking for updates...");
});
} else if is_installing {
ui.horizontal(|ui| {
ui.spinner();
ui.label("Installing update...");
});
} else if let Some(ref info) = update_info {
if info.is_update_available {
egui::Frame::none()
.fill(egui::Color32::from_rgb(40, 60, 40))
.rounding(egui::Rounding::same(4.0))
.inner_margin(10.0)
.show(ui, |ui| {
ui.colored_label(
egui::Color32::from_rgb(100, 200, 100),
format!("Update available: v{}", info.latest_version),
);
ui.label(format!("Current: v{}", info.current_version));
});
if !info.release_notes.is_empty() {
ui.add_space(10.0);
egui::CollapsingHeader::new("Release Notes")
.default_open(true)
.show(ui, |ui| {
egui::ScrollArea::vertical()
.max_height(200.0)
.show(ui, |ui| {
ui.label(&info.release_notes);
});
});
}
ui.add_space(10.0);
if info.download_url.is_some() {
if crate::updater::can_self_update() {
if ui.button("Install Update").clicked() {
log_action("Install update clicked");
let url = info.download_url.clone().unwrap();
let is_installing = app.is_installing_update.clone();
let update_error = app.update_error.clone();
let update_installed = app.update_installed.clone();
*is_installing.lock().unwrap() = true;
*update_error.lock().unwrap() = None;
std::thread::spawn(move || {
match crate::updater::install_update(&url) {
Ok(_) => {
*update_installed.lock().unwrap() = true;
}
Err(e) => {
*update_error.lock().unwrap() = Some(e.to_string());
}
}
*is_installing.lock().unwrap() = false;
});
}
} else {
ui.colored_label(
egui::Color32::from_rgb(255, 200, 100),
"Cannot self-update (no write permission to executable location)",
);
if ui.button("Copy Download URL").clicked() {
if let Some(url) = &info.download_url {
ui.output_mut(|o| o.copied_text = url.clone());
}
}
}
} else {
ui.label("No Linux binary found in release");
if ui.link("Download from GitHub").clicked() {
let _ = std::process::Command::new("xdg-open")
.arg("https://github.com/SulfurNitride/NaK/releases/latest")
.spawn();
}
}
} else {
ui.colored_label(
egui::Color32::from_rgb(100, 200, 100),
format!("You're up to date! (v{})", info.current_version),
);
}
} else {
ui.label("Click 'Check for Updates' to see if a new version is available.");
}
if let Some(error) = update_error {
ui.add_space(5.0);
ui.colored_label(egui::Color32::RED, format!("Error: {}", error));
}
ui.add_space(15.0);
ui.horizontal(|ui| {
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | true |
SulfurNitride/NaK | https://github.com/SulfurNitride/NaK/blob/112deef949a5a4acf846c040d97c24250fed25ee/src/ui/proton_tools.rs | src/ui/proton_tools.rs | //! Proton Picker page UI
use eframe::egui;
use std::sync::atomic::Ordering;
use std::thread;
use super::UiExt;
use crate::app::MyApp;
use crate::wine::{
download_cachyos_proton, download_ge_proton, runtime, set_active_proton, GithubRelease,
};
pub fn render_proton_tools(app: &mut MyApp, ui: &mut egui::Ui) {
ui.heading("Proton Picker");
ui.separator();
ui.horizontal(|ui| {
ui.label("Active Proton:");
let mut selected = app.config.selected_proton.clone();
egui::ComboBox::from_id_salt("proton_combo")
.selected_text(selected.as_deref().unwrap_or("Select Version"))
.show_ui(ui, |ui| {
for p in &app.proton_versions {
ui.selectable_value(&mut selected, Some(p.name.clone()), &p.name);
}
});
// Save if changed and update active symlink
if app.config.selected_proton != selected {
app.config.selected_proton = selected.clone();
app.config.save();
// Update the 'active' symlink for the selected proton
if let Some(name) = &selected {
if let Some(proton) = app.proton_versions.iter().find(|p| &p.name == name) {
let _ = set_active_proton(proton);
}
}
}
});
// =========================================================================
// Steam Runtime Section
// =========================================================================
ui.add_space(20.0);
ui.subheading("Steam Linux Runtime");
let is_downloading_any = *app.is_downloading.lock().unwrap();
let runtime_path = runtime::find_steam_runtime_sniper();
if let Some(path) = runtime_path {
ui.horizontal(|ui| {
ui.label("Status:");
ui.colored_label(egui::Color32::GREEN, "Installed");
});
ui.label(
egui::RichText::new(path.to_string_lossy())
.size(12.0)
.color(egui::Color32::from_gray(180)),
);
} else {
ui.horizontal(|ui| {
ui.label("Status:");
if is_downloading_any {
ui.colored_label(egui::Color32::YELLOW, "Downloading...");
} else {
ui.colored_label(egui::Color32::RED, "Missing");
}
});
ui.label("Required for stable containerized gaming. (Auto-downloads on startup)");
ui.add_space(5.0);
if ui
.add_enabled(
!is_downloading_any,
egui::Button::new("Manually Download (~500MB)"),
)
.clicked()
{
download_runtime_ui(app);
}
}
// =========================================================================
// Unified Proton Downloads Section
// =========================================================================
ui.add_space(20.0);
ui.subheading("Download Proton");
// Source toggle buttons
ui.horizontal(|ui| {
let is_ge = app.proton_download_source == "ge";
let is_cachyos = app.proton_download_source == "cachyos";
if ui
.selectable_label(is_ge, "GE-Proton")
.on_hover_text("GloriousEggroll's Proton fork with game fixes")
.clicked()
{
app.proton_download_source = "ge".to_string();
}
if ui
.selectable_label(is_cachyos, "CachyOS Proton")
.on_hover_text("CachyOS optimized Proton builds")
.clicked()
{
app.proton_download_source = "cachyos".to_string();
}
ui.add_space(20.0);
// Show fetching status
let is_fetching = if app.proton_download_source == "ge" {
*app.is_fetching_ge.lock().unwrap()
} else {
*app.is_fetching_cachyos.lock().unwrap()
};
if is_fetching {
ui.spinner();
ui.label("Fetching...");
} else {
// Refresh button
if ui.button("⟳ Refresh").on_hover_text("Refresh available versions").clicked() {
if app.proton_download_source == "ge" {
refresh_ge_releases(app);
} else {
refresh_cachyos_releases(app);
}
}
}
});
ui.add_space(10.0);
// Single search bar
ui.horizontal(|ui| {
ui.label("Search:");
ui.add(
egui::TextEdit::singleline(&mut app.proton_search_query).desired_width(300.0),
);
});
// Download Status Bar
let status = app.download_status.lock().unwrap().clone();
let progress = *app.download_progress.lock().unwrap();
let is_downloading_any = *app.is_downloading.lock().unwrap();
if is_downloading_any {
ui.add_space(5.0);
ui.add(egui::ProgressBar::new(progress).text(&status).animate(true));
} else if !status.is_empty() {
ui.add_space(5.0);
ui.colored_label(egui::Color32::LIGHT_BLUE, format!("ℹ {}", status));
}
ui.add_space(10.0);
// Create a set of installed version names for fast lookup
let installed_names: Vec<String> = app.proton_versions.iter().map(|p| p.name.clone()).collect();
// Show releases based on selected source
if app.proton_download_source == "ge" {
render_ge_releases(app, ui, &installed_names, is_downloading_any);
} else {
render_cachyos_releases(app, ui, &installed_names, is_downloading_any);
}
}
fn render_ge_releases(
app: &mut MyApp,
ui: &mut egui::Ui,
installed_names: &[String],
is_downloading: bool,
) {
let filtered_releases: Vec<GithubRelease> = {
let releases = app.available_ge_versions.lock().unwrap();
releases
.iter()
.filter(|r| {
app.proton_search_query.is_empty()
|| r.tag_name
.to_lowercase()
.contains(&app.proton_search_query.to_lowercase())
})
.take(10)
.cloned()
.collect()
};
if filtered_releases.is_empty() {
let releases_empty = app.available_ge_versions.lock().unwrap().is_empty();
if !releases_empty {
ui.label("No matching versions found.");
} else if !*app.is_fetching_ge.lock().unwrap() {
ui.label("No versions available (Check internet connection).");
}
return;
}
egui::ScrollArea::vertical()
.id_salt("ge_releases")
.show(ui, |ui| {
for release in filtered_releases {
ui.horizontal(|ui| {
ui.strong(&release.tag_name);
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if installed_names.contains(&release.tag_name) {
if ui.button("Uninstall").clicked() {
app.pending_proton_delete =
Some((release.tag_name.clone(), "ge".to_string()));
}
ui.add_enabled(false, egui::Button::new("Installed"));
} else if ui
.add_enabled(!is_downloading, egui::Button::new("Download"))
.clicked()
{
download_ge_proton_ui(app, &release);
}
});
});
ui.separator();
}
});
}
fn render_cachyos_releases(
app: &mut MyApp,
ui: &mut egui::Ui,
installed_names: &[String],
is_downloading: bool,
) {
// Filter CachyOS releases - only show ones with v2 assets
let filtered_releases: Vec<GithubRelease> = {
let releases = app.available_cachyos_versions.lock().unwrap();
releases
.iter()
.filter(|r| {
// Must have a v2 tar.xz asset
r.assets.iter().any(|a| a.name.contains("_v2.tar.xz"))
})
.filter(|r| {
app.proton_search_query.is_empty()
|| r.tag_name
.to_lowercase()
.contains(&app.proton_search_query.to_lowercase())
})
.take(10)
.cloned()
.collect()
};
if filtered_releases.is_empty() {
let releases_empty = app.available_cachyos_versions.lock().unwrap().is_empty();
if !releases_empty {
ui.label("No matching versions found.");
} else if !*app.is_fetching_cachyos.lock().unwrap() {
ui.label("No versions available (Check internet connection).");
}
return;
}
egui::ScrollArea::vertical()
.id_salt("cachyos_releases")
.show(ui, |ui| {
for release in filtered_releases {
ui.horizontal(|ui| {
ui.strong(&release.tag_name);
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
// Check if installed - extract the date part
let date_part = release
.tag_name
.split('-')
.find(|s| s.len() == 8 && s.chars().all(|c| c.is_ascii_digit()))
.unwrap_or("");
let matching_installed = installed_names.iter().find(|n| {
n.contains("proton-cachyos") && n.contains(date_part) && !date_part.is_empty()
});
if let Some(installed_name) = matching_installed {
let name_to_delete = installed_name.clone();
if ui.button("Uninstall").clicked() {
app.pending_proton_delete =
Some((name_to_delete, "cachyos".to_string()));
}
ui.add_enabled(false, egui::Button::new("Installed"));
} else if ui
.add_enabled(!is_downloading, egui::Button::new("Download"))
.clicked()
{
download_cachyos_proton_ui(app, &release);
}
});
});
ui.separator();
}
});
}
fn download_cachyos_proton_ui(app: &MyApp, release: &GithubRelease) {
let is_downloading = app.is_downloading.clone();
let status_msg = app.download_status.clone();
let progress_val = app.download_progress.clone();
let needs_refresh = app.download_needs_refresh.clone();
// Find the v2.tar.xz asset
let asset = release
.assets
.iter()
.find(|a| a.name.contains("_v2.tar.xz"));
if asset.is_none() {
if let Ok(mut guard) = status_msg.lock() {
*guard = "Error: No v2.tar.xz asset found".to_string();
}
return;
}
let asset = asset.unwrap().clone();
let file_name = asset.name.clone();
let download_url = asset.browser_download_url.clone();
let mut dl_guard = is_downloading.lock().unwrap();
if *dl_guard {
return;
}
*dl_guard = true;
drop(dl_guard);
// Reset progress
if let Ok(mut guard) = progress_val.lock() {
*guard = 0.0;
}
if let Ok(mut guard) = status_msg.lock() {
*guard = format!("Starting download: {}...", file_name);
}
thread::spawn(move || {
let cb_progress_inner = progress_val.clone();
let cb_status_progress = status_msg.clone();
let progress_callback = move |current: u64, total: u64| {
if total > 0 {
let p = current as f32 / total as f32;
if let Ok(mut guard) = cb_progress_inner.lock() {
*guard = p;
}
if let Ok(mut guard) = cb_status_progress.lock() {
*guard = format!("Downloading: {:.1}%", p * 100.0);
}
}
};
let cb_status_extract = status_msg.clone();
let status_callback = move |msg: &str| {
if let Ok(mut guard) = cb_status_extract.lock() {
*guard = msg.to_string();
}
};
match download_cachyos_proton(download_url, file_name, progress_callback, status_callback) {
Ok(_) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = "Download complete!".to_string();
}
if let Ok(mut guard) = progress_val.lock() {
*guard = 1.0;
}
// Signal UI to refresh proton list
needs_refresh.store(true, Ordering::Relaxed);
}
Err(e) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = format!("Error: {}", e);
}
}
}
if let Ok(mut guard) = is_downloading.lock() {
*guard = false;
}
});
}
fn download_ge_proton_ui(app: &MyApp, release: &GithubRelease) {
let is_downloading = app.is_downloading.clone();
let status_msg = app.download_status.clone();
let progress_val = app.download_progress.clone();
let needs_refresh = app.download_needs_refresh.clone();
// Find the .tar.gz asset
let asset = release.assets.iter().find(|a| a.name.ends_with(".tar.gz"));
if asset.is_none() {
if let Ok(mut guard) = status_msg.lock() {
*guard = "Error: No .tar.gz asset found".to_string();
}
return;
}
let asset = asset.unwrap().clone();
let file_name = asset.name.clone();
let download_url = asset.browser_download_url.clone();
let mut dl_guard = is_downloading.lock().unwrap();
if *dl_guard {
return;
}
*dl_guard = true;
drop(dl_guard);
// Reset progress
if let Ok(mut guard) = progress_val.lock() {
*guard = 0.0;
}
if let Ok(mut guard) = status_msg.lock() {
*guard = format!("Starting download: {}...", file_name);
}
thread::spawn(move || {
let cb_progress_inner = progress_val.clone();
let cb_status_progress = status_msg.clone();
let progress_callback = move |current: u64, total: u64| {
if total > 0 {
let p = current as f32 / total as f32;
if let Ok(mut guard) = cb_progress_inner.lock() {
*guard = p;
}
if let Ok(mut guard) = cb_status_progress.lock() {
*guard = format!("Downloading: {:.1}%", p * 100.0);
}
}
};
let cb_status_extract = status_msg.clone();
let status_callback = move |msg: &str| {
if let Ok(mut guard) = cb_status_extract.lock() {
*guard = msg.to_string();
}
};
match download_ge_proton(download_url, file_name, progress_callback, status_callback) {
Ok(_) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = "Download complete!".to_string();
}
if let Ok(mut guard) = progress_val.lock() {
*guard = 1.0;
}
// Signal UI to refresh proton list
needs_refresh.store(true, Ordering::Relaxed);
}
Err(e) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = format!("Error: {}", e);
}
}
}
if let Ok(mut guard) = is_downloading.lock() {
*guard = false;
}
});
}
fn download_runtime_ui(app: &MyApp) {
let is_downloading = app.is_downloading.clone();
let status_msg = app.download_status.clone();
let progress_val = app.download_progress.clone();
let needs_refresh = app.download_needs_refresh.clone();
let mut dl_guard = is_downloading.lock().unwrap();
if *dl_guard {
return;
}
*dl_guard = true;
drop(dl_guard);
if let Ok(mut guard) = progress_val.lock() {
*guard = 0.0;
}
if let Ok(mut guard) = status_msg.lock() {
*guard = "Downloading Steam Runtime...".to_string();
}
thread::spawn(move || {
let cb_progress_inner = progress_val.clone();
let cb_status_inner = status_msg.clone();
let callback = move |current: u64, total: u64| {
if total > 0 {
let p = current as f32 / total as f32;
if let Ok(mut guard) = cb_progress_inner.lock() {
*guard = p;
}
if let Ok(mut guard) = cb_status_inner.lock() {
*guard = format!("Downloading Runtime: {:.1}%", p * 100.0);
}
}
};
match crate::wine::runtime::download_runtime(callback) {
Ok(_) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = "Runtime Installed!".to_string();
}
if let Ok(mut guard) = progress_val.lock() {
*guard = 1.0;
}
// Signal UI to refresh
needs_refresh.store(true, Ordering::Relaxed);
}
Err(e) => {
if let Ok(mut guard) = status_msg.lock() {
*guard = format!("Error: {}", e);
}
}
}
if let Ok(mut guard) = is_downloading.lock() {
*guard = false;
}
});
}
/// Refresh GE-Proton releases from GitHub
fn refresh_ge_releases(app: &MyApp) {
use crate::wine::fetch_ge_releases;
let is_fetching = app.is_fetching_ge.clone();
let versions = app.available_ge_versions.clone();
// Don't start if already fetching
if *is_fetching.lock().unwrap() {
return;
}
*is_fetching.lock().unwrap() = true;
thread::spawn(move || {
match fetch_ge_releases() {
Ok(releases) => {
*versions.lock().unwrap() = releases;
}
Err(e) => {
eprintln!("Failed to fetch GE releases: {}", e);
}
}
*is_fetching.lock().unwrap() = false;
});
}
/// Refresh CachyOS Proton releases from GitHub
fn refresh_cachyos_releases(app: &MyApp) {
use crate::wine::fetch_cachyos_releases;
let is_fetching = app.is_fetching_cachyos.clone();
let versions = app.available_cachyos_versions.clone();
// Don't start if already fetching
if *is_fetching.lock().unwrap() {
return;
}
*is_fetching.lock().unwrap() = true;
thread::spawn(move || {
match fetch_cachyos_releases() {
Ok(releases) => {
*versions.lock().unwrap() = releases;
}
Err(e) => {
eprintln!("Failed to fetch CachyOS releases: {}", e);
}
}
*is_fetching.lock().unwrap() = false;
});
}
| rust | MIT | 112deef949a5a4acf846c040d97c24250fed25ee | 2026-01-04T20:14:01.598686Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/src/main.rs | src/main.rs | fn main() {
println!("usage: `cargo test`");
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-tests.rs | tests/compile-tests.rs | extern crate compiletest_rs as compiletest;
use std::path::PathBuf;
fn run_mode(mode: &'static str) {
let mut config = compiletest::Config::default();
config.mode = mode.parse().expect("Invalid mode");
config.src_base = PathBuf::from(format!("tests/{}", mode));
config.link_deps(); // Populate config.target_rustcflags with dependencies on the path
config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464
config.target_rustcflags = Some("-D unused -D warnings".to_string());
compiletest::run_tests(&config);
}
#[test]
fn compile_test() {
run_mode("compile-fail");
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_13_3.rs | tests/compile-fail/Rule_13_3.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_22_2.rs | tests/compile-fail/Rule_22_2.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_17_1.rs | tests/compile-fail/Rule_17_1.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_5_8.rs | tests/compile-fail/Rule_5_8.rs | pub static count: i32 = 1;
fn main() {
let count: i32 = 1;
//~^ ERROR let bindings cannot shadow statics
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_6_1.rs | tests/compile-fail/Rule_6_1.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_13_4.rs | tests/compile-fail/Rule_13_4.rs | fn main() {
let a: u8;
if a = 1 {
//~^ ERROR mismatched types
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_16_7.rs | tests/compile-fail/Rule_16_7.rs | fn main() {
let i = true;
match i as bool {
//~^ ERROR Non-compliant - match on a boolean expression
false => {
let _ = 1;
}
_ => {
let _ = 2;
}
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_12_4.rs | tests/compile-fail/Rule_12_4.rs | fn main() {
let u8a: u8 = 0;
let _x = u8a - 10;
//~^ERROR Non-compliant - attempt to subtract with overflow
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_16_2.rs | tests/compile-fail/Rule_16_2.rs | fn main() {
let x = 0..1;
for i in x {
match i {
0 => {
1 => { /* Malformed match. */ } //~ ERROR mismatched types
//~^ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `=>`
},
_ => { /*'default' case.*/ }
}
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_16_5.rs | tests/compile-fail/Rule_16_5.rs | fn main() {
let i = 1;
match i {
0 => {}
_ => {}
1 => {} //~ ERROR unreachable pattern
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_10_1.rs | tests/compile-fail/Rule_10_1.rs | #[allow(unused_variables)]
fn main() {
let x: i32 = 0xFF;
let y = x << 2;
//~^ ERROR Non-compliant - inappropriate essential type
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_5_3.rs | tests/compile-fail/Rule_5_3.rs | fn main() {
let i: i16 = 1;
if true {
let i: i16 = 0; //~ ERROR Non-compliant - `i` shadows outer scope.
let _ = i;
}
let _ = i;
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_10_8.rs | tests/compile-fail/Rule_10_8.rs | fn main() {
let x: u16 = 1;
let y: u16 = 2;
let _: u32 = (x + y) as u32;
//~^ ERROR Non-compliant - composite cast to wider type
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_20_7.rs | tests/compile-fail/Rule_20_7.rs | macro_rules! three {
() => 3; //~ ERROR macro rhs must be delimited
}
fn main() {
let _ = three!();
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_6_2.rs | tests/compile-fail/Rule_6_2.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_4_1.rs | tests/compile-fail/Rule_4_1.rs | fn main() {
let x = "\x41\x4g";
//~^ ERROR invalid character in numeric character escape: g
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_21_4.rs | tests/compile-fail/Rule_21_4.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_5_6.rs | tests/compile-fail/Rule_5_6.rs | #[allow(dead_code)]
fn main() {
type U8 = bool;
{
type U8 = u8;
//~^ ERROR Non-compliant - type name shadows U8
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_20_5.rs | tests/compile-fail/Rule_20_5.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_20_6.rs | tests/compile-fail/Rule_20_6.rs | macro_rules! double_it {
($e:expr) => {
$e * $e
};
}
fn main() {
let _ = double_it!(
#[cfg(feature = 10)]
10
#[cfg(feature = 100)]
//~^ ERROR no rules expected the token `#`
100
);
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_21_5.rs | tests/compile-fail/Rule_21_5.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_12_2.rs | tests/compile-fail/Rule_12_2.rs | fn main() {
let u8a: u8 = 1;
let _: u8 = u8a << 8;
//~^ ERROR attempt to shift left with overflow
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_12_1.rs | tests/compile-fail/Rule_12_1.rs | fn main() {
let x: usize = 1;
if x >= 2 << 2 + 1 as usize {
//~^ ERROR Non-compliant - operator precedence can trip the unwary
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_7_1.rs | tests/compile-fail/Rule_7_1.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_1_1.rs | tests/compile-fail/Rule_1_1.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_7_4.rs | tests/compile-fail/Rule_7_4.rs | fn main() {
let mut _l = "string literal";
//~^ ERROR Non-compliant - string literal not const-qualified.
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_8_5.rs | tests/compile-fail/Rule_8_5.rs | pub const PUBLIC_ZERO: u64 = 0u64;
pub const PUBLIC_ZERO: u64 = 0u64;
//~^ ERROR the name `PUBLIC_ZERO` is defined multiple times
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_20_11.rs | tests/compile-fail/Rule_20_11.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_19_1.rs | tests/compile-fail/Rule_19_1.rs | union UnionA {
f1: i16,
f2: i32,
}
fn main() {
let u = UnionA { f2: 0 };
u.f1 = u.f2;
//~^ ERROR mismatched types
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_11_2.rs | tests/compile-fail/Rule_11_2.rs | #[derive(Debug)]
struct Type {
f: f64,
}
fn main() {
let t = Type { f: 3.14 };
let r1: &Type = t as &Type;
//~^ ERROR non-primitive cast: `Type` as `&Type`
let r2: &Type = From::from(t);
//~^ ERROR the trait bound `&Type: std::convert::From<Type>` is not satisfied
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_14_3.rs | tests/compile-fail/Rule_14_3.rs | fn main() {
let a: i32 = 0;
if (a < 10) && (a > 20) {
//~^ ERROR Non-compliant - always true
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_11_8.rs | tests/compile-fail/Rule_11_8.rs | fn main() {
let a = 1;
*(&mut a) = 2;
//~^ ERROR cannot borrow immutable local variable `a` as mutable
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_2_6.rs | tests/compile-fail/Rule_2_6.rs | #[allow(unused_labels)]
#[deny(unreachable_code)]
fn main() {
'outer: loop {
'inner: loop {
break 'outer;
}
'unreachable: loop {
//~^ ERROR unreachable expression
break 'outer;
}
}
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_22_3.rs | tests/compile-fail/Rule_22_3.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_4_2.rs | tests/compile-fail/Rule_4_2.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_18_4.rs | tests/compile-fail/Rule_18_4.rs | fn main() {
let a: [u8; 10] = [0; 10];
let ptr = &a[5];
(ptr + 5) = 0;
//~^ ERROR invalid left-hand side expression
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_7_3.rs | tests/compile-fail/Rule_7_3.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_21_6.rs | tests/compile-fail/Rule_21_6.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_13_2.rs | tests/compile-fail/Rule_13_2.rs | /// This function has a side effect.
fn increment(x: &mut u8) -> &mut u8 {
*x += 1;
x
}
/// This function does not have a side effect.
fn add(x: u8, y: u8) -> u8 {
x + y
}
fn main() {
let mut x: u8 = 0;
let _ = add(*increment(&mut x), x);
//~ ERROR Non-compliant - evaluation order effects result
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_8_8.rs | tests/compile-fail/Rule_8_8.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_3_2.rs | tests/compile-fail/Rule_3_2.rs | fn main() {
// \
compile_error_if_not_commented(true);
//~^ ERROR cannot find function `compile_error_if_not_commented` in this scope
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_15_1.rs | tests/compile-fail/Rule_15_1.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_12_3.rs | tests/compile-fail/Rule_12_3.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_9_5.rs | tests/compile-fail/Rule_9_5.rs | fn main() {
let x: [i32] = [0, 1]; //~ ERROR mismatched types
//~^ ERROR the size for value values of type `[i32]` cannot be known at compilation time
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_17_3.rs | tests/compile-fail/Rule_17_3.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_5_2.rs | tests/compile-fail/Rule_5_2.rs | #[allow(unused_variables)]
fn main() {
let engine_exhaust_gas_temperature_raw: i32 = 0;
let engine_exhaust_gas_temperature_scaled: i32 = 1;
//~^ ERROR Non-compliant - variable name shadows engine_exhaust_gas_temperature_raw
}
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
PolySync/misra-rust | https://github.com/PolySync/misra-rust/blob/caa5e5bf9133f277f60a5807e79da6789a1fbb1e/tests/compile-fail/Rule_13_6.rs | tests/compile-fail/Rule_13_6.rs | "N/A"
//~^ ERROR expected item, found `"N/A"`
| rust | MIT | caa5e5bf9133f277f60a5807e79da6789a1fbb1e | 2026-01-04T20:22:35.104590Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.