repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-macros/src/scope.rs | crates/typst-macros/src/scope.rs | use heck::ToKebabCase;
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::{MetaNameValue, Result, Token, parse_quote};
use crate::util::{BareType, foundations, kw, parse_flag};
/// Expand the `#[scope]` macro.
pub fn scope(stream: TokenStream, item: syn::Item) -> Result<TokenStream> {
let meta: Meta = syn::parse2(stream)?;
let syn::Item::Impl(mut item) = item else {
bail!(item, "expected module or impl item");
};
let self_ty = &item.self_ty;
let mut primitive_ident_ext = None;
if let syn::Type::Path(syn::TypePath { path, .. }) = self_ty.as_ref()
&& let Some(ident) = path.get_ident()
&& meta.ext
{
let ident_ext = quote::format_ident!("{ident}Ext");
primitive_ident_ext = Some(ident_ext);
}
let self_ty_expr = match &primitive_ident_ext {
None => quote! { #self_ty },
Some(ident_ext) => quote! { <#self_ty as #ident_ext> },
};
let mut definitions = vec![];
let mut constructor = quote! { None };
for child in &mut item.items {
let bare: BareType;
let (mut def, attrs) = match child {
syn::ImplItem::Const(item) => {
(handle_const(&self_ty_expr, item)?, &item.attrs)
}
syn::ImplItem::Fn(item) => (
match handle_fn(self_ty, item)? {
FnKind::Member(tokens) => tokens,
FnKind::Constructor(tokens) => {
constructor = tokens;
continue;
}
},
&item.attrs,
),
syn::ImplItem::Verbatim(item) => {
bare = syn::parse2(item.clone())?;
(handle_type_or_elem(&bare)?, &bare.attrs)
}
_ => bail!(child, "unexpected item in scope"),
};
if let Some(attr) = attrs.iter().find(|attr| attr.path().is_ident("deprecated")) {
match &attr.meta {
syn::Meta::NameValue(pair) if pair.path.is_ident("deprecated") => {
let message = &pair.value;
def = quote! { #def.deprecated(#message) }
}
syn::Meta::List(list) if list.path.is_ident("deprecated") => {
let args = list.parse_args_with(
Punctuated::<MetaNameValue, Token![,]>::parse_separated_nonempty,
)?;
let mut deprecation =
quote! { crate::foundations::Deprecation::new() };
if let Some(message) = args.iter().find_map(|pair| {
pair.path.is_ident("message").then_some(&pair.value)
}) {
deprecation = quote! { #deprecation.with_message(#message) }
}
if let Some(version) = args.iter().find_map(|pair| {
pair.path.is_ident("until").then_some(&pair.value)
}) {
deprecation = quote! { #deprecation.with_until(#version) }
}
def = quote! { #def.deprecated(#deprecation) }
}
_ => {}
}
}
definitions.push(def);
}
item.items.retain(|item| !matches!(item, syn::ImplItem::Verbatim(_)));
let base = match &primitive_ident_ext {
None => quote! { #item },
Some(ident_ext) => rewrite_primitive_base(&item, ident_ext),
};
Ok(quote! {
#base
impl #foundations::NativeScope for #self_ty {
fn constructor() -> ::std::option::Option<&'static #foundations::NativeFuncData> {
#constructor
}
#[allow(deprecated)]
fn scope() -> #foundations::Scope {
let mut scope = #foundations::Scope::deduplicating();
#(#definitions;)*
scope
}
}
})
}
/// The `..` in `#[scope(..)]`.
struct Meta {
/// Whether this the scope should be implemented through an extension
/// trait instead of an inherent impl.
ext: bool,
}
impl Parse for Meta {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Self { ext: parse_flag::<kw::ext>(input)? })
}
}
/// Process a const item and returns its definition.
fn handle_const(self_ty: &TokenStream, item: &syn::ImplItemConst) -> Result<TokenStream> {
let ident = &item.ident;
let name = ident.to_string().to_kebab_case();
Ok(quote! { scope.define(#name, #self_ty::#ident) })
}
/// Process a type item.
fn handle_type_or_elem(item: &BareType) -> Result<TokenStream> {
let ident = &item.ident;
let define = if item.attrs.iter().any(|attr| attr.path().is_ident("elem")) {
quote! { define_elem }
} else {
quote! { define_type }
};
Ok(quote! { scope.#define::<#ident>() })
}
/// Process a function, return its definition, and register it as a constructor
/// if applicable.
fn handle_fn(self_ty: &syn::Type, item: &mut syn::ImplItemFn) -> Result<FnKind> {
let Some(attr) = item.attrs.iter_mut().find(|attr| attr.meta.path().is_ident("func"))
else {
bail!(item, "scope function is missing #[func] attribute");
};
let ident_data = quote::format_ident!("{}_data", item.sig.ident);
match &mut attr.meta {
syn::Meta::Path(_) => {
*attr = parse_quote! { #[func(parent = #self_ty)] };
}
syn::Meta::List(list) => {
let tokens = &list.tokens;
let meta: crate::func::Meta = syn::parse2(tokens.clone())?;
list.tokens = quote! { #tokens, parent = #self_ty };
if meta.constructor {
return Ok(FnKind::Constructor(quote! { Some(#self_ty::#ident_data()) }));
}
}
syn::Meta::NameValue(_) => bail!(attr.meta, "invalid func attribute"),
}
Ok(FnKind::Member(quote! { scope.define_func_with_data(#self_ty::#ident_data()) }))
}
enum FnKind {
Constructor(TokenStream),
Member(TokenStream),
}
/// Rewrite an impl block for a primitive into a trait + trait impl.
fn rewrite_primitive_base(item: &syn::ItemImpl, ident_ext: &syn::Ident) -> TokenStream {
let mut sigs = vec![];
let mut items = vec![];
for sub in &item.items {
match sub.clone() {
syn::ImplItem::Fn(mut func) => {
func.vis = syn::Visibility::Inherited;
items.push(func.clone());
let mut sig = func.sig;
let inputs = sig.inputs.iter().cloned().map(|mut input| {
if let syn::FnArg::Typed(typed) = &mut input {
typed.attrs.clear();
}
input
});
sig.inputs = parse_quote! { #(#inputs),* };
let ident_data = quote::format_ident!("{}_data", sig.ident);
sigs.push(quote! { #sig; });
sigs.push(quote! {
fn #ident_data() -> &'static #foundations::NativeFuncData;
});
}
syn::ImplItem::Const(cons) => {
sigs.push(quote! { #cons });
}
_ => {}
}
}
let self_ty = &item.self_ty;
quote! {
#[allow(non_camel_case_types)]
trait #ident_ext {
#(#sigs)*
}
impl #ident_ext for #self_ty {
#(#items)*
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-macros/src/cast.rs | crates/typst-macros/src/cast.rs | use heck::ToKebabCase;
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::{DeriveInput, Ident, Result, Token};
use crate::util::{documentation, foundations};
/// Expand the `#[derive(Cast)]` macro.
pub fn derive_cast(item: DeriveInput) -> Result<TokenStream> {
let ty = &item.ident;
let syn::Data::Enum(data) = &item.data else {
bail!(item, "only enums are supported");
};
let mut variants = vec![];
for variant in &data.variants {
if let Some((_, expr)) = &variant.discriminant {
bail!(expr, "explicit discriminant is not allowed");
}
let string = if let Some(attr) =
variant.attrs.iter().find(|attr| attr.path().is_ident("string"))
{
attr.parse_args::<syn::LitStr>()?.value()
} else {
variant.ident.to_string().to_kebab_case()
};
variants.push(Variant {
ident: variant.ident.clone(),
string,
docs: documentation(&variant.attrs),
});
}
let strs_to_variants = variants.iter().map(|Variant { ident, string, docs }| {
quote! {
#[doc = #docs]
#string => Self::#ident
}
});
let variants_to_strs = variants.iter().map(|Variant { ident, string, .. }| {
quote! {
#ty::#ident => #string
}
});
Ok(quote! {
#foundations::cast! {
#ty,
self => #foundations::IntoValue::into_value(match self {
#(#variants_to_strs),*
}),
#(#strs_to_variants),*
}
})
}
/// An enum variant in a `derive(Cast)`.
struct Variant {
ident: Ident,
string: String,
docs: String,
}
/// Expand the `cast!` macro.
pub fn cast(stream: TokenStream) -> Result<TokenStream> {
let input: CastInput = syn::parse2(stream)?;
let ty = &input.ty;
let castable_body = create_castable_body(&input);
let input_body = create_input_body(&input);
let output_body = create_output_body(&input);
let into_value_body = create_into_value_body(&input);
let from_value_body = create_from_value_body(&input);
let reflect = (!input.from_value.is_empty() || input.dynamic).then(|| {
quote! {
impl #foundations::Reflect for #ty {
fn input() -> #foundations::CastInfo {
#input_body
}
fn output() -> #foundations::CastInfo {
#output_body
}
fn castable(value: &#foundations::Value) -> bool {
#castable_body
}
}
}
});
let into_value = (input.into_value.is_some() || input.dynamic).then(|| {
quote! {
impl #foundations::IntoValue for #ty {
fn into_value(self) -> #foundations::Value {
#into_value_body
}
}
}
});
let from_value = (!input.from_value.is_empty() || input.dynamic).then(|| {
quote! {
impl #foundations::FromValue for #ty {
fn from_value(value: #foundations::Value) -> ::typst_library::diag::HintedStrResult<Self> {
#from_value_body
}
}
}
});
Ok(quote! {
#reflect
#into_value
#from_value
})
}
/// The input to `cast!`.
struct CastInput {
ty: syn::Type,
dynamic: bool,
into_value: Option<syn::Expr>,
from_value: Punctuated<Cast, Token![,]>,
}
impl Parse for CastInput {
fn parse(input: ParseStream) -> Result<Self> {
let mut dynamic = false;
if input.peek(syn::Token![type]) {
let _: syn::Token![type] = input.parse()?;
dynamic = true;
}
let ty = input.parse()?;
let _: syn::Token![,] = input.parse()?;
let mut to_value = None;
if input.peek(syn::Token![self]) {
let _: syn::Token![self] = input.parse()?;
let _: syn::Token![=>] = input.parse()?;
to_value = Some(input.parse()?);
let _: syn::Token![,] = input.parse()?;
}
let from_value = Punctuated::parse_terminated(input)?;
Ok(Self { ty, dynamic, into_value: to_value, from_value })
}
}
impl Parse for Cast {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(syn::Attribute::parse_outer)?;
let pattern = input.parse()?;
let _: syn::Token![=>] = input.parse()?;
let expr = input.parse()?;
Ok(Self { attrs, pattern, expr })
}
}
impl Parse for Pattern {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(syn::LitStr) {
Ok(Pattern::Str(input.parse()?))
} else {
let pat = syn::Pat::parse_single(input)?;
let _: syn::Token![:] = input.parse()?;
let ty = input.parse()?;
Ok(Pattern::Ty(pat, ty))
}
}
}
/// A single cast, e.g. `v: i64 => Self::Int(v)`.
struct Cast {
attrs: Vec<syn::Attribute>,
pattern: Pattern,
expr: syn::Expr,
}
/// A pattern in a cast, e.g.`"ascender"` or `v: i64`.
#[allow(clippy::large_enum_variant)]
enum Pattern {
Str(syn::LitStr),
Ty(syn::Pat, syn::Type),
}
fn create_castable_body(input: &CastInput) -> TokenStream {
let mut strings = vec![];
let mut casts = vec![];
for cast in &input.from_value {
match &cast.pattern {
Pattern::Str(lit) => {
strings.push(quote! { #lit => return true });
}
Pattern::Ty(_, ty) => {
casts.push(quote! {
if <#ty as #foundations::Reflect>::castable(value) {
return true;
}
});
}
}
}
let dynamic_check = input.dynamic.then(|| {
quote! {
if let #foundations::Value::Dyn(dynamic) = &value {
if dynamic.is::<Self>() {
return true;
}
}
}
});
let str_check = (!strings.is_empty()).then(|| {
quote! {
if let #foundations::Value::Str(string) = &value {
match string.as_str() {
#(#strings,)*
_ => {}
}
}
}
});
quote! {
#dynamic_check
#str_check
#(#casts)*
false
}
}
fn create_input_body(input: &CastInput) -> TokenStream {
let mut infos = vec![];
for cast in &input.from_value {
let docs = documentation(&cast.attrs);
infos.push(match &cast.pattern {
Pattern::Str(lit) => {
quote! {
#foundations::CastInfo::Value(
#foundations::IntoValue::into_value(#lit),
#docs,
)
}
}
Pattern::Ty(_, ty) => {
quote! { <#ty as #foundations::Reflect>::input() }
}
});
}
if input.dynamic {
infos.push(quote! {
#foundations::CastInfo::Type(#foundations::Type::of::<Self>())
});
}
quote! {
#(#infos)+*
}
}
fn create_output_body(input: &CastInput) -> TokenStream {
if input.dynamic {
quote! { #foundations::CastInfo::Type(#foundations::Type::of::<Self>()) }
} else {
quote! { <Self as #foundations::Reflect>::input() }
}
}
fn create_into_value_body(input: &CastInput) -> TokenStream {
if let Some(expr) = &input.into_value {
quote! { #expr }
} else {
quote! { #foundations::Value::dynamic(self) }
}
}
fn create_from_value_body(input: &CastInput) -> TokenStream {
let mut string_arms = vec![];
let mut cast_checks = vec![];
for cast in &input.from_value {
let expr = &cast.expr;
match &cast.pattern {
Pattern::Str(lit) => {
string_arms.push(quote! { #lit => return Ok(#expr) });
}
Pattern::Ty(binding, ty) => {
cast_checks.push(quote! {
if <#ty as #foundations::Reflect>::castable(&value) {
let #binding = <#ty as #foundations::FromValue>::from_value(value)?;
return Ok(#expr);
}
});
}
}
}
let dynamic_check = input.dynamic.then(|| {
quote! {
if let #foundations::Value::Dyn(dynamic) = &value {
if let Some(concrete) = dynamic.downcast::<Self>() {
return Ok(concrete.clone());
}
}
}
});
let str_check = (!string_arms.is_empty()).then(|| {
quote! {
if let #foundations::Value::Str(string) = &value {
match string.as_str() {
#(#string_arms,)*
_ => {}
}
}
}
});
quote! {
#dynamic_check
#str_check
#(#cast_checks)*
Err(<Self as #foundations::Reflect>::error(&value))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-macros/src/ty.rs | crates/typst-macros/src/ty.rs | use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::{Parse, ParseStream};
use syn::{Attribute, Ident, Result};
use crate::util::{
BareType, determine_name_and_title, documentation, foundations, kw, parse_flag,
parse_string, parse_string_array,
};
/// Expand the `#[ty]` macro.
pub fn ty(stream: TokenStream, item: syn::Item) -> Result<TokenStream> {
let meta: Meta = syn::parse2(stream)?;
let bare: BareType;
let (ident, attrs, keep) = match &item {
syn::Item::Struct(item) => (&item.ident, &item.attrs, true),
syn::Item::Type(item) => (&item.ident, &item.attrs, true),
syn::Item::Enum(item) => (&item.ident, &item.attrs, true),
syn::Item::Verbatim(item) => {
bare = syn::parse2(item.clone())?;
(&bare.ident, &bare.attrs, false)
}
_ => bail!(item, "invalid type item"),
};
let ty = parse(meta, ident.clone(), attrs)?;
Ok(create(&ty, keep.then_some(&item)))
}
/// Holds all relevant parsed data about a type.
struct Type {
meta: Meta,
/// The name for this type given in Rust.
ident: Ident,
/// The type's identifier as exposed to Typst.
name: String,
long: String,
/// The type's title case name.
title: String,
/// The documentation for this type as a string.
docs: String,
}
/// The `..` in `#[ty(..)]`.
struct Meta {
/// Whether this element has an associated scope defined by the `#[scope]` macro.
scope: bool,
/// Whether a custom cast implementation will be defined for this type.
cast: bool,
name: Option<String>,
title: Option<String>,
keywords: Vec<String>,
}
impl Parse for Meta {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Self {
scope: parse_flag::<kw::scope>(input)?,
cast: parse_flag::<kw::cast>(input)?,
name: parse_string::<kw::name>(input)?,
title: parse_string::<kw::title>(input)?,
keywords: parse_string_array::<kw::keywords>(input)?,
})
}
}
/// Parse details about the type from its definition.
fn parse(meta: Meta, ident: Ident, attrs: &[Attribute]) -> Result<Type> {
let docs = documentation(attrs);
let (name, title) =
determine_name_and_title(meta.name.clone(), meta.title.clone(), &ident, None)?;
let long = title.to_lowercase();
Ok(Type { meta, ident, name, long, title, docs })
}
/// Produce the output of the macro.
fn create(ty: &Type, item: Option<&syn::Item>) -> TokenStream {
let Type { ident, name, long, title, docs, meta, .. } = ty;
let Meta { keywords, .. } = meta;
let constructor = if meta.scope {
quote! { <#ident as #foundations::NativeScope>::constructor() }
} else {
quote! { None }
};
let scope = if meta.scope {
quote! { <#ident as #foundations::NativeScope>::scope() }
} else {
quote! { #foundations::Scope::new() }
};
let cast = (!meta.cast).then(|| {
quote! {
#foundations::cast! { type #ident, }
}
});
let data = quote! {
#foundations::NativeTypeData {
name: #name,
long_name: #long,
title: #title,
docs: #docs,
keywords: &[#(#keywords),*],
constructor: ::std::sync::LazyLock::new(|| #constructor),
scope: ::std::sync::LazyLock::new(|| #scope),
}
};
let attr = item.map(|_| {
quote! {
#[allow(rustdoc::broken_intra_doc_links)]
}
});
quote! {
#attr
#item
#cast
impl #foundations::NativeType for #ident {
const NAME: &'static str = #name;
fn data() -> &'static #foundations::NativeTypeData {
static DATA: #foundations::NativeTypeData = #data;
&DATA
}
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-timing/src/lib.rs | crates/typst-timing/src/lib.rs | //! Performance timing for Typst.
use std::io::Write;
use std::num::NonZeroU64;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use parking_lot::Mutex;
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
/// Creates a timing scope around an expression.
///
/// The output of the expression is returned.
///
/// The scope will be named `name` and will have the span `span`. The span is
/// optional.
///
/// ## Example
///
/// ```rs
/// // With a scope name and span.
/// timed!(
/// "my scope",
/// span = Span::detached(),
/// std::thread::sleep(std::time::Duration::from_secs(1)),
/// );
///
/// // With a scope name and no span.
/// timed!(
/// "my scope",
/// std::thread::sleep(std::time::Duration::from_secs(1)),
/// );
/// ```
#[macro_export]
macro_rules! timed {
($name:expr, span = $span:expr, $body:expr $(,)?) => {{
let __scope = $crate::TimingScope::with_span($name, Some($span));
$body
}};
($name:expr, $body:expr $(,)?) => {{
let __scope = $crate::TimingScope::new($name);
$body
}};
}
thread_local! {
/// Data that is initialized once per thread.
static THREAD_DATA: ThreadData = ThreadData {
id: {
// We only need atomicity and no synchronization of other
// operations, so `Relaxed` is fine.
static COUNTER: AtomicU64 = AtomicU64::new(1);
COUNTER.fetch_add(1, Ordering::Relaxed)
},
#[cfg(all(target_arch = "wasm32", feature = "wasm"))]
timer: WasmTimer::new(),
};
}
/// Whether the timer is enabled. Defaults to `false`.
static ENABLED: AtomicBool = AtomicBool::new(false);
/// The list of collected events.
static EVENTS: Mutex<Vec<Event>> = Mutex::new(Vec::new());
/// Enable the timer.
#[inline]
pub fn enable() {
// We only need atomicity and no synchronization of other
// operations, so `Relaxed` is fine.
ENABLED.store(true, Ordering::Relaxed);
}
/// Disable the timer.
#[inline]
pub fn disable() {
// We only need atomicity and no synchronization of other
// operations, so `Relaxed` is fine.
ENABLED.store(false, Ordering::Relaxed);
}
/// Whether the timer is enabled.
#[inline]
pub fn is_enabled() -> bool {
ENABLED.load(Ordering::Relaxed)
}
/// Clears the recorded events.
#[inline]
pub fn clear() {
EVENTS.lock().clear();
}
/// Export data as JSON for Chrome's tracing tool.
///
/// The `source` function is called for each span to get the source code
/// location of the span. The first element of the tuple is the file path and
/// the second element is the line number.
pub fn export_json<W: Write>(
writer: W,
mut source: impl FnMut(NonZeroU64) -> (String, u32),
) -> Result<(), String> {
#[derive(Serialize)]
struct Entry {
name: &'static str,
cat: &'static str,
ph: &'static str,
ts: f64,
pid: u64,
tid: u64,
args: Option<Args>,
}
#[derive(Serialize)]
struct Args {
file: String,
line: u32,
}
let lock = EVENTS.lock();
let events = lock.as_slice();
let mut serializer = serde_json::Serializer::new(writer);
let mut seq = serializer
.serialize_seq(Some(events.len()))
.map_err(|e| format!("failed to serialize events: {e}"))?;
for event in events.iter() {
seq.serialize_element(&Entry {
name: event.name,
cat: "typst",
ph: match event.kind {
EventKind::Start => "B",
EventKind::End => "E",
},
ts: event.timestamp.micros_since(events[0].timestamp),
pid: 1,
tid: event.thread_id,
args: event.span.map(&mut source).map(|(file, line)| Args { file, line }),
})
.map_err(|e| format!("failed to serialize event: {e}"))?;
}
seq.end().map_err(|e| format!("failed to serialize events: {e}"))?;
Ok(())
}
/// A scope that records an event when it is dropped.
pub struct TimingScope {
name: &'static str,
span: Option<NonZeroU64>,
thread_id: u64,
}
impl TimingScope {
/// Create a new scope if timing is enabled.
#[inline]
pub fn new(name: &'static str) -> Option<Self> {
Self::with_span(name, None)
}
/// Create a new scope with a span if timing is enabled.
///
/// The span is a raw number because `typst-timing` can't depend on
/// `typst-syntax` (or else `typst-syntax` couldn't depend on
/// `typst-timing`).
#[inline]
pub fn with_span(name: &'static str, span: Option<NonZeroU64>) -> Option<Self> {
if is_enabled() {
return Some(Self::new_impl(name, span));
}
None
}
/// Create a new scope without checking if timing is enabled.
fn new_impl(name: &'static str, span: Option<NonZeroU64>) -> Self {
let (thread_id, timestamp) =
THREAD_DATA.with(|data| (data.id, Timestamp::now_with(data)));
EVENTS.lock().push(Event {
kind: EventKind::Start,
timestamp,
name,
span,
thread_id,
});
Self { name, span, thread_id }
}
}
impl Drop for TimingScope {
fn drop(&mut self) {
let timestamp = Timestamp::now();
EVENTS.lock().push(Event {
kind: EventKind::End,
timestamp,
name: self.name,
span: self.span,
thread_id: self.thread_id,
});
}
}
/// An event that has been recorded.
struct Event {
/// Whether this is a start or end event.
kind: EventKind,
/// The time at which this event occurred.
timestamp: Timestamp,
/// The name of this event.
name: &'static str,
/// The raw value of the span of code that this event was recorded in.
span: Option<NonZeroU64>,
/// The thread ID of this event.
thread_id: u64,
}
/// Whether an event marks the start or end of a scope.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum EventKind {
Start,
End,
}
/// A cross-platform way to get the current time.
#[derive(Copy, Clone)]
struct Timestamp {
#[cfg(not(target_arch = "wasm32"))]
inner: std::time::SystemTime,
#[cfg(target_arch = "wasm32")]
inner: f64,
}
impl Timestamp {
fn now() -> Self {
#[cfg(target_arch = "wasm32")]
return THREAD_DATA.with(Self::now_with);
#[cfg(not(target_arch = "wasm32"))]
Self { inner: std::time::SystemTime::now() }
}
#[allow(unused_variables)]
fn now_with(data: &ThreadData) -> Self {
#[cfg(all(target_arch = "wasm32", feature = "wasm"))]
return Self { inner: data.timer.now() };
#[cfg(all(target_arch = "wasm32", not(feature = "wasm")))]
return Self { inner: 0.0 };
#[cfg(not(target_arch = "wasm32"))]
Self::now()
}
fn micros_since(self, start: Self) -> f64 {
#[cfg(target_arch = "wasm32")]
return (self.inner - start.inner) * 1000.0;
#[cfg(not(target_arch = "wasm32"))]
(self
.inner
.duration_since(start.inner)
.unwrap_or(std::time::Duration::ZERO)
.as_nanos() as f64
/ 1_000.0)
}
}
/// Per-thread data.
struct ThreadData {
/// The thread's ID.
///
/// In contrast to `std::thread::current().id()`, this is wasm-compatible
/// and also a bit cheaper to access because the std version does a bit more
/// stuff (including cloning an `Arc`).
id: u64,
/// A way to get the time from WebAssembly.
#[cfg(all(target_arch = "wasm32", feature = "wasm"))]
timer: WasmTimer,
}
/// A way to get the time from WebAssembly.
#[cfg(all(target_arch = "wasm32", feature = "wasm"))]
struct WasmTimer {
/// The cached JS performance handle for the thread.
perf: web_sys::Performance,
/// The cached JS time origin.
time_origin: f64,
}
#[cfg(all(target_arch = "wasm32", feature = "wasm"))]
impl WasmTimer {
fn new() -> Self {
// Retrieve `performance` from global object, either the window or
// globalThis.
let perf = web_sys::window()
.and_then(|window| window.performance())
.or_else(|| {
use web_sys::wasm_bindgen::JsCast;
web_sys::js_sys::global()
.dyn_into::<web_sys::WorkerGlobalScope>()
.ok()
.and_then(|scope| scope.performance())
})
.expect("failed to get JS performance handle");
// Every thread gets its own time origin. To make the results consistent
// across threads, we need to add this to each `now()` call.
let time_origin = perf.time_origin();
Self { perf, time_origin }
}
fn now(&self) -> f64 {
self.time_origin + self.perf.now()
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-render/src/paint.rs | crates/typst-render/src/paint.rs | use std::sync::Arc;
use tiny_skia as sk;
use typst_library::layout::{Axes, Point, Ratio, Size};
use typst_library::visualize::{Color, Gradient, Paint, RelativeTo, Tiling};
use crate::{AbsExt, State};
/// Trait for sampling of a paint, used as a generic
/// abstraction over solid colors and gradients.
pub trait PaintSampler: Copy {
/// Sample the color at the `pos` in the pixmap.
fn sample(self, pos: (u32, u32)) -> sk::PremultipliedColorU8;
}
impl PaintSampler for sk::PremultipliedColorU8 {
fn sample(self, _: (u32, u32)) -> sk::PremultipliedColorU8 {
self
}
}
/// State used when sampling colors for text.
///
/// It caches the inverse transform to the parent, so that we can
/// reuse it instead of recomputing it for each pixel.
#[derive(Copy, Clone)]
pub struct GradientSampler<'a> {
gradient: &'a Gradient,
container_size: Size,
transform_to_parent: sk::Transform,
}
impl<'a> GradientSampler<'a> {
pub fn new(
gradient: &'a Gradient,
state: &State,
item_size: Size,
on_text: bool,
) -> Self {
let relative = gradient.unwrap_relative(on_text);
let container_size = match relative {
RelativeTo::Self_ => item_size,
RelativeTo::Parent => state.size,
};
let fill_transform = match relative {
RelativeTo::Self_ => sk::Transform::identity(),
RelativeTo::Parent => state.container_transform.invert().unwrap(),
};
Self {
gradient,
container_size,
transform_to_parent: fill_transform,
}
}
}
impl PaintSampler for GradientSampler<'_> {
/// Samples a single point in a glyph.
fn sample(self, (x, y): (u32, u32)) -> sk::PremultipliedColorU8 {
// Compute the point in the gradient's coordinate space.
let mut point = sk::Point { x: x as f32, y: y as f32 };
self.transform_to_parent.map_point(&mut point);
// Sample the gradient
to_sk_color_u8(self.gradient.sample_at(
(point.x, point.y),
(self.container_size.x.to_f32(), self.container_size.y.to_f32()),
))
.premultiply()
}
}
/// State used when sampling tilings for text.
///
/// It caches the inverse transform to the parent, so that we can
/// reuse it instead of recomputing it for each pixel.
#[derive(Copy, Clone)]
pub struct TilingSampler<'a> {
size: Size,
transform_to_parent: sk::Transform,
pixmap: &'a sk::Pixmap,
pixel_per_pt: f32,
}
impl<'a> TilingSampler<'a> {
pub fn new(
tilings: &'a Tiling,
pixmap: &'a sk::Pixmap,
state: &State,
on_text: bool,
) -> Self {
let relative = tilings.unwrap_relative(on_text);
let fill_transform = match relative {
RelativeTo::Self_ => sk::Transform::identity(),
RelativeTo::Parent => state.container_transform.invert().unwrap(),
};
Self {
pixmap,
size: (tilings.size() + tilings.spacing()) * state.pixel_per_pt as f64,
transform_to_parent: fill_transform,
pixel_per_pt: state.pixel_per_pt,
}
}
}
impl PaintSampler for TilingSampler<'_> {
/// Samples a single point in a glyph.
fn sample(self, (x, y): (u32, u32)) -> sk::PremultipliedColorU8 {
// Compute the point in the tilings's coordinate space.
let mut point = sk::Point { x: x as f32, y: y as f32 };
self.transform_to_parent.map_point(&mut point);
let x =
(point.x * self.pixel_per_pt).rem_euclid(self.size.x.to_f32()).floor() as u32;
let y =
(point.y * self.pixel_per_pt).rem_euclid(self.size.y.to_f32()).floor() as u32;
// Sample the tilings
self.pixmap.pixel(x, y).unwrap()
}
}
/// Transforms a [`Paint`] into a [`sk::Paint`].
/// Applying the necessary transform, if the paint is a gradient.
///
/// `gradient_map` is used to scale and move the gradient being sampled,
/// this is used to line up the stroke and the fill of a shape.
pub fn to_sk_paint<'a>(
paint: &Paint,
state: State,
item_size: Size,
on_text: bool,
fill_transform: Option<sk::Transform>,
pixmap: &'a mut Option<Arc<sk::Pixmap>>,
gradient_map: Option<(Point, Axes<Ratio>)>,
) -> sk::Paint<'a> {
/// Actual sampling of the gradient, cached for performance.
#[comemo::memoize]
fn cached(
gradient: &Gradient,
width: u32,
height: u32,
gradient_map: Option<(Point, Axes<Ratio>)>,
) -> Arc<sk::Pixmap> {
let (offset, scale) =
gradient_map.unwrap_or_else(|| (Point::zero(), Axes::splat(Ratio::one())));
let mut pixmap = sk::Pixmap::new(width.max(1), height.max(1)).unwrap();
for x in 0..width {
for y in 0..height {
let color = gradient.sample_at(
(
(x as f32 + offset.x.to_f32()) * scale.x.get() as f32,
(y as f32 + offset.y.to_f32()) * scale.y.get() as f32,
),
(width as f32, height as f32),
);
pixmap.pixels_mut()[(y * width + x) as usize] =
to_sk_color(color).premultiply().to_color_u8();
}
}
Arc::new(pixmap)
}
let mut sk_paint: sk::Paint<'_> = sk::Paint::default();
match paint {
Paint::Solid(color) => {
sk_paint.set_color(to_sk_color(*color));
sk_paint.anti_alias = true;
}
Paint::Gradient(gradient) => {
let relative = gradient.unwrap_relative(on_text);
let container_size = match relative {
RelativeTo::Self_ => item_size,
RelativeTo::Parent => state.size,
};
let fill_transform = match relative {
RelativeTo::Self_ => fill_transform.unwrap_or_default(),
RelativeTo::Parent => state
.container_transform
.post_concat(state.transform.invert().unwrap()),
};
let gradient_map = match relative {
RelativeTo::Self_ => gradient_map,
RelativeTo::Parent => None,
};
let width =
(container_size.x.to_f32().abs() * state.pixel_per_pt).ceil() as u32;
let height =
(container_size.y.to_f32().abs() * state.pixel_per_pt).ceil() as u32;
*pixmap = Some(cached(
gradient,
width.max(state.pixel_per_pt.ceil() as u32),
height.max(state.pixel_per_pt.ceil() as u32),
gradient_map,
));
// We can use FilterQuality::Nearest here because we're
// rendering to a pixmap that is already at native resolution.
sk_paint.shader = sk::Pattern::new(
pixmap.as_ref().unwrap().as_ref().as_ref(),
sk::SpreadMode::Pad,
sk::FilterQuality::Nearest,
1.0,
fill_transform.pre_scale(
container_size.x.signum() as f32 / state.pixel_per_pt,
container_size.y.signum() as f32 / state.pixel_per_pt,
),
);
sk_paint.anti_alias = gradient.anti_alias();
}
Paint::Tiling(tilings) => {
let relative = tilings.unwrap_relative(on_text);
let fill_transform = match relative {
RelativeTo::Self_ => fill_transform.unwrap_or_default(),
RelativeTo::Parent => state
.container_transform
.post_concat(state.transform.invert().unwrap()),
};
let canvas = render_tiling_frame(&state, tilings);
*pixmap = Some(Arc::new(canvas));
let offset = match relative {
RelativeTo::Self_ => {
gradient_map.map(|(offset, _)| -offset).unwrap_or_default()
}
RelativeTo::Parent => Point::zero(),
};
// Create the shader
sk_paint.shader = sk::Pattern::new(
pixmap.as_ref().unwrap().as_ref().as_ref(),
sk::SpreadMode::Repeat,
sk::FilterQuality::Nearest,
1.0,
fill_transform
.pre_scale(1.0 / state.pixel_per_pt, 1.0 / state.pixel_per_pt)
.pre_translate(offset.x.to_f32(), offset.y.to_f32()),
);
}
}
sk_paint
}
pub fn to_sk_color(color: Color) -> sk::Color {
let (r, g, b, a) = color.to_rgb().into_components();
sk::Color::from_rgba(r, g, b, a)
.expect("components must always be in the range [0..=1]")
}
pub fn to_sk_color_u8(color: Color) -> sk::ColorU8 {
let (r, g, b, a) = color.to_rgb().into_format::<u8, u8>().into_components();
sk::ColorU8::from_rgba(r, g, b, a)
}
pub fn render_tiling_frame(state: &State, tilings: &Tiling) -> sk::Pixmap {
let size = tilings.size() + tilings.spacing();
let mut canvas = sk::Pixmap::new(
(size.x.to_f32() * state.pixel_per_pt).round() as u32,
(size.y.to_f32() * state.pixel_per_pt).round() as u32,
)
.unwrap();
// Render the tilings into a new canvas.
let ts = sk::Transform::from_scale(state.pixel_per_pt, state.pixel_per_pt);
let temp_state = State::new(tilings.size(), ts, state.pixel_per_pt);
crate::render_frame(&mut canvas, temp_state, tilings.frame());
canvas
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-render/src/shape.rs | crates/typst-render/src/shape.rs | use tiny_skia as sk;
use typst_library::layout::{Abs, Axes, Point, Ratio, Size};
use typst_library::visualize::{
Curve, CurveItem, DashPattern, FillRule, FixedStroke, Geometry, LineCap, LineJoin,
Shape,
};
use crate::{AbsExt, State, paint};
/// Render a geometrical shape into the canvas.
pub fn render_shape(canvas: &mut sk::Pixmap, state: State, shape: &Shape) -> Option<()> {
let ts = state.transform;
let path = match &shape.geometry {
Geometry::Line(target) => {
let mut builder = sk::PathBuilder::new();
builder.line_to(target.x.to_f32(), target.y.to_f32());
builder.finish()?
}
Geometry::Rect(size) => {
let w = size.x.to_f32();
let h = size.y.to_f32();
let rect = if w < 0.0 || h < 0.0 {
// Skia doesn't normally allow for negative dimensions, but
// Typst supports them, so we apply a transform if needed
// Because this operation is expensive according to tiny-skia's
// docs, we prefer to not apply it if not needed
let transform = sk::Transform::from_scale(w.signum(), h.signum());
let rect = sk::Rect::from_xywh(0.0, 0.0, w.abs(), h.abs())?;
rect.transform(transform)?
} else {
sk::Rect::from_xywh(0.0, 0.0, w, h)?
};
sk::PathBuilder::from_rect(rect)
}
Geometry::Curve(curve) => convert_curve(curve)?,
};
if let Some(fill) = &shape.fill {
let mut pixmap = None;
let mut paint: sk::Paint = paint::to_sk_paint(
fill,
state,
shape.geometry.bbox_size(),
false,
None,
&mut pixmap,
None,
);
if matches!(shape.geometry, Geometry::Rect(_)) {
paint.anti_alias = false;
}
let rule = match shape.fill_rule {
FillRule::NonZero => sk::FillRule::Winding,
FillRule::EvenOdd => sk::FillRule::EvenOdd,
};
canvas.fill_path(&path, &paint, rule, ts, state.mask);
}
if let Some(FixedStroke { paint, thickness, cap, join, dash, miter_limit }) =
&shape.stroke
{
let width = thickness.to_f32();
// Don't draw zero-pt stroke.
if width > 0.0 {
let dash = dash.as_ref().and_then(to_sk_dash_pattern);
let bbox = shape.geometry.bbox_size();
let offset_bbox = if !matches!(shape.geometry, Geometry::Line(..)) {
offset_bounding_box(bbox, *thickness)
} else {
bbox
};
let fill_transform =
(!matches!(shape.geometry, Geometry::Line(..))).then(|| {
sk::Transform::from_translate(
-thickness.to_f32(),
-thickness.to_f32(),
)
});
let gradient_map =
(!matches!(shape.geometry, Geometry::Line(..))).then(|| {
(
Point::new(
-*thickness * state.pixel_per_pt as f64,
-*thickness * state.pixel_per_pt as f64,
),
Axes::new(
Ratio::new(offset_bbox.x / bbox.x),
Ratio::new(offset_bbox.y / bbox.y),
),
)
});
let mut pixmap = None;
let paint = paint::to_sk_paint(
paint,
state,
offset_bbox,
false,
fill_transform,
&mut pixmap,
gradient_map,
);
let stroke = sk::Stroke {
width,
line_cap: to_sk_line_cap(*cap),
line_join: to_sk_line_join(*join),
dash,
miter_limit: miter_limit.get() as f32,
};
canvas.stroke_path(&path, &paint, &stroke, ts, state.mask);
}
}
Some(())
}
/// Convert a Typst curve into a tiny-skia path.
pub fn convert_curve(curve: &Curve) -> Option<sk::Path> {
let mut builder = sk::PathBuilder::new();
for elem in &curve.0 {
match elem {
CurveItem::Move(p) => {
builder.move_to(p.x.to_f32(), p.y.to_f32());
}
CurveItem::Line(p) => {
builder.line_to(p.x.to_f32(), p.y.to_f32());
}
CurveItem::Cubic(p1, p2, p3) => {
builder.cubic_to(
p1.x.to_f32(),
p1.y.to_f32(),
p2.x.to_f32(),
p2.y.to_f32(),
p3.x.to_f32(),
p3.y.to_f32(),
);
}
CurveItem::Close => {
builder.close();
}
};
}
builder.finish()
}
fn offset_bounding_box(bbox: Size, stroke_width: Abs) -> Size {
Size::new(bbox.x + stroke_width * 2.0, bbox.y + stroke_width * 2.0)
}
pub fn to_sk_line_cap(cap: LineCap) -> sk::LineCap {
match cap {
LineCap::Butt => sk::LineCap::Butt,
LineCap::Round => sk::LineCap::Round,
LineCap::Square => sk::LineCap::Square,
}
}
pub fn to_sk_line_join(join: LineJoin) -> sk::LineJoin {
match join {
LineJoin::Miter => sk::LineJoin::Miter,
LineJoin::Round => sk::LineJoin::Round,
LineJoin::Bevel => sk::LineJoin::Bevel,
}
}
pub fn to_sk_dash_pattern(dash: &DashPattern<Abs, Abs>) -> Option<sk::StrokeDash> {
// tiny-skia only allows dash patterns with an even number of elements,
// while pdf allows any number.
let pattern_len = dash.array.len();
let len = if pattern_len % 2 == 1 { 2 * pattern_len } else { pattern_len };
let dash_array = dash.array.iter().map(|l| l.to_f32()).cycle().take(len).collect();
sk::StrokeDash::new(dash_array, dash.phase.to_f32())
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-render/src/lib.rs | crates/typst-render/src/lib.rs | //! Rendering of Typst documents into raster images.
mod image;
mod paint;
mod shape;
mod text;
use tiny_skia as sk;
use typst_library::layout::{
Abs, Axes, Frame, FrameItem, FrameKind, GroupItem, Page, PagedDocument, Point, Size,
Transform,
};
use typst_library::visualize::{Color, Geometry, Paint};
/// Export a page into a raster image.
///
/// This renders the page at the given number of pixels per point and returns
/// the resulting `tiny-skia` pixel buffer.
#[typst_macros::time(name = "render")]
pub fn render(page: &Page, pixel_per_pt: f32) -> sk::Pixmap {
let size = page.frame.size();
let pxw = (pixel_per_pt * size.x.to_f32()).round().max(1.0) as u32;
let pxh = (pixel_per_pt * size.y.to_f32()).round().max(1.0) as u32;
let ts = sk::Transform::from_scale(pixel_per_pt, pixel_per_pt);
let state = State::new(size, ts, pixel_per_pt);
let mut canvas = sk::Pixmap::new(pxw, pxh).unwrap();
if let Some(fill) = page.fill_or_white() {
if let Paint::Solid(color) = fill {
canvas.fill(paint::to_sk_color(color));
} else {
let rect = Geometry::Rect(page.frame.size()).filled(fill);
shape::render_shape(&mut canvas, state, &rect);
}
}
render_frame(&mut canvas, state, &page.frame);
canvas
}
/// Export a document with potentially multiple pages into a single raster image.
pub fn render_merged(
document: &PagedDocument,
pixel_per_pt: f32,
gap: Abs,
fill: Option<Color>,
) -> sk::Pixmap {
let pixmaps: Vec<_> =
document.pages.iter().map(|page| render(page, pixel_per_pt)).collect();
let gap = (pixel_per_pt * gap.to_f32()).round() as u32;
let pxw = pixmaps.iter().map(sk::Pixmap::width).max().unwrap_or_default();
let pxh = pixmaps.iter().map(|pixmap| pixmap.height()).sum::<u32>()
+ gap * pixmaps.len().saturating_sub(1) as u32;
let mut canvas = sk::Pixmap::new(pxw, pxh).unwrap();
if let Some(fill) = fill {
canvas.fill(paint::to_sk_color(fill));
}
let mut y = 0;
for pixmap in pixmaps {
canvas.draw_pixmap(
0,
y as i32,
pixmap.as_ref(),
&sk::PixmapPaint::default(),
sk::Transform::identity(),
None,
);
y += pixmap.height() + gap;
}
canvas
}
/// Additional metadata carried through the rendering process.
#[derive(Default, Copy, Clone)]
struct State<'a> {
/// The transform of the current item.
transform: sk::Transform,
/// The transform of the first hard frame in the hierarchy.
container_transform: sk::Transform,
/// The mask of the current item.
mask: Option<&'a sk::Mask>,
/// The pixel per point ratio.
pixel_per_pt: f32,
/// The size of the first hard frame in the hierarchy.
size: Size,
}
impl<'a> State<'a> {
fn new(size: Size, transform: sk::Transform, pixel_per_pt: f32) -> Self {
Self {
size,
transform,
container_transform: transform,
pixel_per_pt,
..Default::default()
}
}
/// Pre translate the current item's transform.
fn pre_translate(self, pos: Point) -> Self {
Self {
transform: self.transform.pre_translate(pos.x.to_f32(), pos.y.to_f32()),
..self
}
}
fn pre_scale(self, scale: Axes<Abs>) -> Self {
Self {
transform: self.transform.pre_scale(scale.x.to_f32(), scale.y.to_f32()),
..self
}
}
/// Pre concat the current item's transform.
fn pre_concat(self, transform: sk::Transform) -> Self {
Self {
transform: self.transform.pre_concat(transform),
..self
}
}
/// Sets the current mask.
///
/// If no mask is provided, the parent mask is used.
fn with_mask(self, mask: Option<&'a sk::Mask>) -> State<'a> {
State { mask: mask.or(self.mask), ..self }
}
/// Sets the size of the first hard frame in the hierarchy.
fn with_size(self, size: Size) -> Self {
Self { size, ..self }
}
/// Pre concat the container's transform.
fn pre_concat_container(self, transform: sk::Transform) -> Self {
Self {
container_transform: self.container_transform.pre_concat(transform),
..self
}
}
}
/// Render a frame into the canvas.
fn render_frame(canvas: &mut sk::Pixmap, state: State, frame: &Frame) {
for (pos, item) in frame.items() {
match item {
FrameItem::Group(group) => {
render_group(canvas, state, *pos, group);
}
FrameItem::Text(text) => {
text::render_text(canvas, state.pre_translate(*pos), text);
}
FrameItem::Shape(shape, _) => {
shape::render_shape(canvas, state.pre_translate(*pos), shape);
}
FrameItem::Image(image, size, _) => {
image::render_image(canvas, state.pre_translate(*pos), image, *size);
}
FrameItem::Link(_, _) => {}
FrameItem::Tag(_) => {}
}
}
}
/// Render a group frame with optional transform and clipping into the canvas.
fn render_group(canvas: &mut sk::Pixmap, state: State, pos: Point, group: &GroupItem) {
let sk_transform = to_sk_transform(&group.transform);
let state = match group.frame.kind() {
FrameKind::Soft => state.pre_translate(pos).pre_concat(sk_transform),
FrameKind::Hard => state
.pre_translate(pos)
.pre_concat(sk_transform)
.pre_concat_container(
state
.transform
.post_concat(state.container_transform.invert().unwrap()),
)
.pre_concat_container(to_sk_transform(&Transform::translate(pos.x, pos.y)))
.pre_concat_container(sk_transform)
.with_size(group.frame.size()),
};
let mut mask = state.mask;
let storage;
if let Some(clip_curve) = group.clip.as_ref()
&& let Some(path) = shape::convert_curve(clip_curve)
.and_then(|path| path.transform(state.transform))
{
if let Some(mask) = mask {
let mut mask = mask.clone();
mask.intersect_path(
&path,
sk::FillRule::default(),
true,
sk::Transform::default(),
);
storage = mask;
} else {
let pxw = canvas.width();
let pxh = canvas.height();
let Some(mut mask) = sk::Mask::new(pxw, pxh) else {
// Fails if clipping rect is empty. In that case we just
// clip everything by returning.
return;
};
mask.fill_path(
&path,
sk::FillRule::default(),
true,
sk::Transform::default(),
);
storage = mask;
};
mask = Some(&storage);
}
render_frame(canvas, state.with_mask(mask), &group.frame);
}
fn to_sk_transform(transform: &Transform) -> sk::Transform {
let Transform { sx, ky, kx, sy, tx, ty } = *transform;
sk::Transform::from_row(
sx.get() as _,
ky.get() as _,
kx.get() as _,
sy.get() as _,
tx.to_f32(),
ty.to_f32(),
)
}
/// Additional methods for [`Abs`].
trait AbsExt {
/// Convert to a number of points as f32.
fn to_f32(self) -> f32;
}
impl AbsExt for Abs {
fn to_f32(self) -> f32 {
self.to_pt() as f32
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-render/src/image.rs | crates/typst-render/src/image.rs | use hayro::{FontData, FontQuery, InterpreterSettings, RenderSettings, StandardFont};
use image::imageops::FilterType;
use image::{GenericImageView, Rgba};
use std::sync::Arc;
use tiny_skia as sk;
use tiny_skia::IntSize;
use typst_library::foundations::Smart;
use typst_library::layout::Size;
use typst_library::visualize::{Image, ImageKind, ImageScaling, PdfImage};
use crate::{AbsExt, State};
/// Render a raster or SVG image into the canvas.
pub fn render_image(
canvas: &mut sk::Pixmap,
state: State,
image: &Image,
size: Size,
) -> Option<()> {
let ts = state.transform;
let view_width = size.x.to_f32();
let view_height = size.y.to_f32();
// For better-looking output, resize `image` to its final size before
// painting it to `canvas`. For the math, see:
// https://github.com/typst/typst/issues/1404#issuecomment-1598374652
let theta = f32::atan2(-ts.kx, ts.sx);
// To avoid division by 0, choose the one of { sin, cos } that is
// further from 0.
let prefer_sin = theta.sin().abs() > std::f32::consts::FRAC_1_SQRT_2;
let scale_x =
f32::abs(if prefer_sin { ts.kx / theta.sin() } else { ts.sx / theta.cos() });
let aspect = (image.width() as f32) / (image.height() as f32);
let w = (scale_x * view_width.max(aspect * view_height)).ceil() as u32;
let h = ((w as f32) / aspect).ceil() as u32;
let pixmap = build_texture(image, w, h)?;
let paint_scale_x = view_width / pixmap.width() as f32;
let paint_scale_y = view_height / pixmap.height() as f32;
let paint = sk::Paint {
shader: sk::Pattern::new(
(*pixmap).as_ref(),
sk::SpreadMode::Pad,
sk::FilterQuality::Nearest,
1.0,
sk::Transform::from_scale(paint_scale_x, paint_scale_y),
),
..Default::default()
};
let rect = sk::Rect::from_xywh(0.0, 0.0, view_width, view_height)?;
canvas.fill_rect(rect, &paint, ts, state.mask);
Some(())
}
/// Prepare a texture for an image at a scaled size.
#[comemo::memoize]
fn build_texture(image: &Image, w: u32, h: u32) -> Option<Arc<sk::Pixmap>> {
let texture = match image.kind() {
ImageKind::Raster(raster) => {
let mut texture = sk::Pixmap::new(w, h)?;
let w = texture.width();
let h = texture.height();
let buf;
let dynamic = raster.dynamic();
let resized = if (w, h) == (dynamic.width(), dynamic.height()) {
// Small optimization to not allocate in case image is not resized.
dynamic
} else {
let upscale = w > dynamic.width();
let filter = match image.scaling() {
Smart::Custom(ImageScaling::Pixelated) => FilterType::Nearest,
_ if upscale => FilterType::CatmullRom,
_ => FilterType::Lanczos3, // downscale
};
buf = dynamic.resize_exact(w, h, filter);
&buf
};
for ((_, _, src), dest) in resized.pixels().zip(texture.pixels_mut()) {
let Rgba([r, g, b, a]) = src;
*dest = sk::ColorU8::from_rgba(r, g, b, a).premultiply();
}
texture
}
ImageKind::Svg(svg) => {
let mut texture = sk::Pixmap::new(w, h)?;
let tree = svg.tree();
let ts = tiny_skia::Transform::from_scale(
w as f32 / tree.size().width(),
h as f32 / tree.size().height(),
);
resvg::render(tree, ts, &mut texture.as_mut());
texture
}
ImageKind::Pdf(pdf) => build_pdf_texture(pdf, w, h)?,
};
Some(Arc::new(texture))
}
// Keep this in sync with `typst-svg`!
fn build_pdf_texture(pdf: &PdfImage, w: u32, h: u32) -> Option<sk::Pixmap> {
let select_standard_font = move |font: StandardFont| -> Option<(FontData, u32)> {
let bytes = match font {
StandardFont::Helvetica => typst_assets::pdf::SANS,
StandardFont::HelveticaBold => typst_assets::pdf::SANS_BOLD,
StandardFont::HelveticaOblique => typst_assets::pdf::SANS_ITALIC,
StandardFont::HelveticaBoldOblique => typst_assets::pdf::SANS_BOLD_ITALIC,
StandardFont::Courier => typst_assets::pdf::FIXED,
StandardFont::CourierBold => typst_assets::pdf::FIXED_BOLD,
StandardFont::CourierOblique => typst_assets::pdf::FIXED_ITALIC,
StandardFont::CourierBoldOblique => typst_assets::pdf::FIXED_BOLD_ITALIC,
StandardFont::TimesRoman => typst_assets::pdf::SERIF,
StandardFont::TimesBold => typst_assets::pdf::SERIF_BOLD,
StandardFont::TimesItalic => typst_assets::pdf::SERIF_ITALIC,
StandardFont::TimesBoldItalic => typst_assets::pdf::SERIF_BOLD_ITALIC,
StandardFont::ZapfDingBats => typst_assets::pdf::DING_BATS,
StandardFont::Symbol => typst_assets::pdf::SYMBOL,
};
Some((Arc::new(bytes), 0))
};
let interpreter_settings = InterpreterSettings {
font_resolver: Arc::new(move |query| match query {
FontQuery::Standard(s) => select_standard_font(*s),
FontQuery::Fallback(f) => select_standard_font(f.pick_standard_font()),
}),
warning_sink: Arc::new(|_| {}),
};
let render_settings = RenderSettings {
x_scale: w as f32 / pdf.width(),
y_scale: h as f32 / pdf.height(),
width: Some(w as u16),
height: Some(h as u16),
};
let hayro_pix = hayro::render(pdf.page(), &interpreter_settings, &render_settings);
sk::Pixmap::from_vec(hayro_pix.take_u8(), IntSize::from_wh(w, h)?)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-render/src/text.rs | crates/typst-render/src/text.rs | use std::sync::Arc;
use pixglyph::Bitmap;
use tiny_skia as sk;
use ttf_parser::{GlyphId, OutlineBuilder};
use typst_library::layout::{Abs, Axes, Point, Size};
use typst_library::text::color::{glyph_frame, should_outline};
use typst_library::text::{Font, TextItem};
use typst_library::visualize::{FixedStroke, Paint};
use crate::paint::{self, GradientSampler, PaintSampler, TilingSampler};
use crate::{AbsExt, State, shape};
/// Render a text run into the canvas.
pub fn render_text(canvas: &mut sk::Pixmap, state: State, text: &TextItem) {
let mut x = Abs::zero();
let mut y = Abs::zero();
for glyph in &text.glyphs {
let id = GlyphId(glyph.id);
let x_offset = x + glyph.x_offset.at(text.size);
let y_offset = y + glyph.y_offset.at(text.size);
if should_outline(&text.font, glyph) {
let state = state.pre_translate(Point::new(x_offset, -y_offset));
render_outline_glyph(canvas, state, text, id);
} else {
let upem = text.font.units_per_em();
let text_scale = text.size / upem;
let state = state
.pre_translate(Point::new(x_offset, -y_offset - text.size))
.pre_scale(Axes::new(text_scale, text_scale));
let (glyph_frame, _) = glyph_frame(&text.font, glyph.id);
crate::render_frame(canvas, state, &glyph_frame);
}
x += glyph.x_advance.at(text.size);
y += glyph.y_advance.at(text.size);
}
}
/// Render an outline glyph into the canvas. This is the "normal" case.
fn render_outline_glyph(
canvas: &mut sk::Pixmap,
state: State,
text: &TextItem,
id: GlyphId,
) -> Option<()> {
let ts = &state.transform;
let ppem = text.size.to_f32() * ts.sy;
// Render a glyph directly as a path. This only happens when the fast glyph
// rasterization can't be used due to very large text size or weird
// scale/skewing transforms.
if ppem > 100.0
|| ts.kx != 0.0
|| ts.ky != 0.0
|| ts.sx != ts.sy
|| text.stroke.is_some()
|| text.size < Abs::zero()
{
let path = {
let mut builder = WrappedPathBuilder(sk::PathBuilder::new());
text.font.ttf().outline_glyph(id, &mut builder)?;
builder.0.finish()?
};
let scale = text.size.to_f32() / text.font.units_per_em() as f32;
let mut pixmap = None;
let rule = sk::FillRule::default();
// Flip vertically because font design coordinate
// system is Y-up.
let ts = ts.pre_scale(scale, -scale);
let state_ts = state.pre_concat(sk::Transform::from_scale(scale, -scale));
let paint = paint::to_sk_paint(
&text.fill,
state_ts,
Size::zero(),
true,
None,
&mut pixmap,
None,
);
canvas.fill_path(&path, &paint, rule, ts, state.mask);
if let Some(FixedStroke { paint, thickness, cap, join, dash, miter_limit }) =
&text.stroke
&& thickness.to_f32() > 0.0
{
let dash = dash.as_ref().and_then(shape::to_sk_dash_pattern);
let paint = paint::to_sk_paint(
paint,
state_ts,
Size::zero(),
true,
None,
&mut pixmap,
None,
);
let stroke = sk::Stroke {
width: thickness.to_f32() / scale, // When we scale the path, we need to scale the stroke width, too.
line_cap: shape::to_sk_line_cap(*cap),
line_join: shape::to_sk_line_join(*join),
dash,
miter_limit: miter_limit.get() as f32,
};
canvas.stroke_path(&path, &paint, &stroke, ts, state.mask);
}
return Some(());
}
// Rasterize the glyph with `pixglyph`.
#[comemo::memoize]
fn rasterize(
font: &Font,
id: GlyphId,
x: u32,
y: u32,
size: u32,
) -> Option<Arc<Bitmap>> {
let glyph = pixglyph::Glyph::load(font.ttf(), id)?;
Some(Arc::new(glyph.rasterize(
f32::from_bits(x),
f32::from_bits(y),
f32::from_bits(size),
)))
}
// Try to retrieve a prepared glyph or prepare it from scratch if it
// doesn't exist, yet.
let bitmap =
rasterize(&text.font, id, ts.tx.to_bits(), ts.ty.to_bits(), ppem.to_bits())?;
match &text.fill {
Paint::Gradient(gradient) => {
let sampler = GradientSampler::new(gradient, &state, Size::zero(), true);
write_bitmap(canvas, &bitmap, &state, sampler)?;
}
Paint::Solid(color) => {
write_bitmap(
canvas,
&bitmap,
&state,
paint::to_sk_color_u8(*color).premultiply(),
)?;
}
Paint::Tiling(tiling) => {
let pixmap = paint::render_tiling_frame(&state, tiling);
let sampler = TilingSampler::new(tiling, &pixmap, &state, true);
write_bitmap(canvas, &bitmap, &state, sampler)?;
}
}
Some(())
}
fn write_bitmap<S: PaintSampler>(
canvas: &mut sk::Pixmap,
bitmap: &Bitmap,
state: &State,
sampler: S,
) -> Option<()> {
// If we have a clip mask we first render to a pixmap that we then blend
// with our canvas
if state.mask.is_some() {
let cw = canvas.width() as i32;
let ch = canvas.height() as i32;
let mw = bitmap.width;
let mh = bitmap.height;
let left = bitmap.left;
let top = bitmap.top;
// Pad the pixmap with 1 pixel in each dimension so that we do
// not get any problem with floating point errors along their border
let mut pixmap = sk::Pixmap::new(mw + 2, mh + 2)?;
let pixels = bytemuck::cast_slice_mut::<u8, u32>(pixmap.data_mut());
for x in 0..mw {
for y in 0..mh {
let alpha = bitmap.coverage[(y * mw + x) as usize];
// To sample at the correct position, we need to convert each
// pixel's position in the bitmap (x and y) to its final
// expected position in the canvas. Due to padding, this
// pixel's position in the pixmap will be (x + 1, y + 1).
// Then, when drawing the pixmap to the canvas, we place its
// top-left corner at position (left - 1, top - 1). Therefore,
// the final position of this pixel in the canvas is given by
// (left - 1 + x + 1, top - 1 + y + 1) = (left + x, top + y).
let sample_pos = (
(left + x as i32).clamp(0, cw) as u32,
(top + y as i32).clamp(0, ch) as u32,
);
let color = sampler.sample(sample_pos);
let color = bytemuck::cast(color);
let applied = alpha_mul(color, alpha as u32);
pixels[((y + 1) * (mw + 2) + (x + 1)) as usize] = applied;
}
}
canvas.draw_pixmap(
left - 1,
top - 1,
pixmap.as_ref(),
&sk::PixmapPaint::default(),
sk::Transform::identity(),
state.mask,
);
} else {
let cw = canvas.width() as i32;
let ch = canvas.height() as i32;
let mw = bitmap.width as i32;
let mh = bitmap.height as i32;
// Determine the pixel bounding box that we actually need to draw.
let left = bitmap.left;
let right = left + mw;
let top = bitmap.top;
let bottom = top + mh;
// Blend the glyph bitmap with the existing pixels on the canvas.
let pixels = bytemuck::cast_slice_mut::<u8, u32>(canvas.data_mut());
for x in left.clamp(0, cw)..right.clamp(0, cw) {
for y in top.clamp(0, ch)..bottom.clamp(0, ch) {
let ai = ((y - top) * mw + (x - left)) as usize;
let cov = bitmap.coverage[ai];
if cov == 0 {
continue;
}
let color = sampler.sample((x as _, y as _));
let color = bytemuck::cast(color);
let pi = (y * cw + x) as usize;
// Fast path if color is opaque.
if cov == u8::MAX && color & 0xFF == 0xFF {
pixels[pi] = color;
continue;
}
let applied = alpha_mul(color, cov as u32);
pixels[pi] = blend_src_over(applied, pixels[pi]);
}
}
}
Some(())
}
/// Allows to build tiny-skia paths from glyph outlines.
struct WrappedPathBuilder(sk::PathBuilder);
impl OutlineBuilder for WrappedPathBuilder {
fn move_to(&mut self, x: f32, y: f32) {
self.0.move_to(x, y);
}
fn line_to(&mut self, x: f32, y: f32) {
self.0.line_to(x, y);
}
fn quad_to(&mut self, x1: f32, y1: f32, x: f32, y: f32) {
self.0.quad_to(x1, y1, x, y);
}
fn curve_to(&mut self, x1: f32, y1: f32, x2: f32, y2: f32, x: f32, y: f32) {
self.0.cubic_to(x1, y1, x2, y2, x, y);
}
fn close(&mut self) {
self.0.close();
}
}
// Alpha multiplication and blending are ported from:
// https://skia.googlesource.com/skia/+/refs/heads/main/include/core/SkColorPriv.h
/// Blends two premulitplied, packed 32-bit RGBA colors. Alpha channel must be
/// in the 8 high bits.
fn blend_src_over(src: u32, dst: u32) -> u32 {
src + alpha_mul(dst, 256 - (src >> 24))
}
/// Alpha multiply a color.
fn alpha_mul(color: u32, scale: u32) -> u32 {
let mask = 0xff00ff;
let rb = ((color & mask) * scale) >> 8;
let ag = ((color >> 8) & mask) * scale;
(rb & mask) | (ag & !mask)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/lexer.rs | crates/typst-syntax/src/lexer.rs | use std::num::IntErrorKind;
use ecow::{EcoString, eco_format};
use typst_utils::default_math_class;
use unicode_ident::{is_xid_continue, is_xid_start};
use unicode_math_class::MathClass;
use unicode_script::{Script, UnicodeScript};
use unicode_segmentation::UnicodeSegmentation;
use unscanny::Scanner;
use crate::{SyntaxError, SyntaxKind, SyntaxMode, SyntaxNode};
/// An iterator over a source code string which returns tokens.
#[derive(Clone)]
pub(super) struct Lexer<'s> {
/// The scanner: contains the underlying string and location as a "cursor".
s: Scanner<'s>,
/// The mode the lexer is in. This determines which kinds of tokens it
/// produces.
mode: SyntaxMode,
/// Whether the last token contained a newline.
newline: bool,
/// An error for the last token.
error: Option<SyntaxError>,
}
impl<'s> Lexer<'s> {
/// Create a new lexer with the given mode and a prefix to offset column
/// calculations.
pub fn new(text: &'s str, mode: SyntaxMode) -> Self {
Self {
s: Scanner::new(text),
mode,
newline: false,
error: None,
}
}
/// Get the current lexing mode.
pub fn mode(&self) -> SyntaxMode {
self.mode
}
/// Change the lexing mode.
pub fn set_mode(&mut self, mode: SyntaxMode) {
self.mode = mode;
}
/// The index in the string at which the last token ends and next token
/// will start.
pub fn cursor(&self) -> usize {
self.s.cursor()
}
/// Jump to the given index in the string.
pub fn jump(&mut self, index: usize) {
self.s.jump(index);
}
/// Whether the last token contained a newline.
pub fn newline(&self) -> bool {
self.newline
}
/// The number of characters until the most recent newline from an index.
pub fn column(&self, index: usize) -> usize {
let mut s = self.s; // Make a new temporary scanner (cheap).
s.jump(index);
s.before().chars().rev().take_while(|&c| !is_newline(c)).count()
}
}
impl Lexer<'_> {
/// Construct a full-positioned syntax error.
fn error(&mut self, message: impl Into<EcoString>) -> SyntaxKind {
self.error = Some(SyntaxError::new(message));
SyntaxKind::Error
}
/// If the current node is an error, adds a hint.
fn hint(&mut self, message: impl Into<EcoString>) {
if let Some(error) = &mut self.error {
error.hints.push(message.into());
}
}
}
/// Shared methods with all [`SyntaxMode`].
impl Lexer<'_> {
/// Return the next token in our text. Returns both the [`SyntaxNode`]
/// and the raw [`SyntaxKind`] to make it more ergonomic to check the kind
pub fn next(&mut self) -> (SyntaxKind, SyntaxNode) {
debug_assert!(self.error.is_none());
let start = self.s.cursor();
self.newline = false;
let kind = match self.s.eat() {
Some(c) if is_space(c, self.mode) => self.whitespace(start, c),
Some('#') if start == 0 && self.s.eat_if('!') => self.shebang(),
Some('/') if self.s.eat_if('/') => self.line_comment(),
Some('/') if self.s.eat_if('*') => self.block_comment(),
Some('*') if self.s.eat_if('/') => {
let kind = self.error("unexpected end of block comment");
self.hint(
"consider escaping the `*` with a backslash or \
opening the block comment with `/*`",
);
kind
}
Some('`') if self.mode != SyntaxMode::Math => return self.raw(),
Some(c) => match self.mode {
SyntaxMode::Markup => self.markup(start, c),
SyntaxMode::Math => match self.math(start, c) {
(kind, None) => kind,
(kind, Some(node)) => return (kind, node),
},
SyntaxMode::Code => self.code(start, c),
},
None => SyntaxKind::End,
};
let text = self.s.from(start);
let node = match self.error.take() {
Some(error) => SyntaxNode::error(error, text),
None => SyntaxNode::leaf(kind, text),
};
(kind, node)
}
/// Eat whitespace characters greedily.
fn whitespace(&mut self, start: usize, c: char) -> SyntaxKind {
let more = self.s.eat_while(|c| is_space(c, self.mode));
let newlines = match c {
// Optimize eating a single space.
' ' if more.is_empty() => 0,
_ => count_newlines(self.s.from(start)),
};
self.newline = newlines > 0;
if self.mode == SyntaxMode::Markup && newlines >= 2 {
SyntaxKind::Parbreak
} else {
SyntaxKind::Space
}
}
fn shebang(&mut self) -> SyntaxKind {
self.s.eat_until(is_newline);
SyntaxKind::Shebang
}
fn line_comment(&mut self) -> SyntaxKind {
self.s.eat_until(is_newline);
SyntaxKind::LineComment
}
fn block_comment(&mut self) -> SyntaxKind {
let mut state = '_';
let mut depth = 1;
// Find the first `*/` that does not correspond to a nested `/*`.
while let Some(c) = self.s.eat() {
state = match (state, c) {
('*', '/') => {
depth -= 1;
if depth == 0 {
break;
}
'_'
}
('/', '*') => {
depth += 1;
'_'
}
_ => c,
}
}
SyntaxKind::BlockComment
}
}
/// Markup.
impl Lexer<'_> {
fn markup(&mut self, start: usize, c: char) -> SyntaxKind {
match c {
'\\' => self.backslash(),
'h' if self.s.eat_if("ttp://") => self.link(),
'h' if self.s.eat_if("ttps://") => self.link(),
'<' if self.s.at(is_id_continue) => self.label(),
'@' if self.s.at(is_id_continue) => self.ref_marker(),
'.' if self.s.eat_if("..") => SyntaxKind::Shorthand,
'-' if self.s.eat_if("--") => SyntaxKind::Shorthand,
'-' if self.s.eat_if('-') => SyntaxKind::Shorthand,
'-' if self.s.eat_if('?') => SyntaxKind::Shorthand,
'-' if self.s.at(char::is_numeric) => SyntaxKind::Shorthand,
'*' if !self.in_word() => SyntaxKind::Star,
'_' if !self.in_word() => SyntaxKind::Underscore,
'#' => SyntaxKind::Hash,
'[' => SyntaxKind::LeftBracket,
']' => SyntaxKind::RightBracket,
'\'' => SyntaxKind::SmartQuote,
'"' => SyntaxKind::SmartQuote,
'$' => SyntaxKind::Dollar,
'~' => SyntaxKind::Shorthand,
':' => SyntaxKind::Colon,
'=' => {
self.s.eat_while('=');
if self.space_or_end() { SyntaxKind::HeadingMarker } else { self.text() }
}
'-' if self.space_or_end() => SyntaxKind::ListMarker,
'+' if self.space_or_end() => SyntaxKind::EnumMarker,
'/' if self.space_or_end() => SyntaxKind::TermMarker,
'0'..='9' => self.numbering(start),
_ => self.text(),
}
}
fn backslash(&mut self) -> SyntaxKind {
if self.s.eat_if("u{") {
let hex = self.s.eat_while(char::is_ascii_alphanumeric);
if !self.s.eat_if('}') {
return self.error("unclosed Unicode escape sequence");
}
if u32::from_str_radix(hex, 16)
.ok()
.and_then(std::char::from_u32)
.is_none()
{
return self.error(eco_format!("invalid Unicode codepoint: {}", hex));
}
return SyntaxKind::Escape;
}
if self.s.done() || self.s.at(char::is_whitespace) {
SyntaxKind::Linebreak
} else {
self.s.eat();
SyntaxKind::Escape
}
}
/// We parse entire raw segments in the lexer as a convenience to avoid
/// going to and from the parser for each raw section. See comments in
/// [`Self::blocky_raw`] and [`Self::inline_raw`] for specific details.
fn raw(&mut self) -> (SyntaxKind, SyntaxNode) {
let start = self.s.cursor() - 1;
// Determine number of opening backticks.
let mut backticks = 1;
while self.s.eat_if('`') {
backticks += 1;
}
// Special case for ``.
if backticks == 2 {
let nodes = vec![
SyntaxNode::leaf(SyntaxKind::RawDelim, "`"),
SyntaxNode::leaf(SyntaxKind::RawDelim, "`"),
];
return (SyntaxKind::Raw, SyntaxNode::inner(SyntaxKind::Raw, nodes));
}
// Find end of raw text.
let mut found = 0;
while found < backticks {
match self.s.eat() {
Some('`') => found += 1,
Some(_) => found = 0,
None => {
let msg = SyntaxError::new("unclosed raw text");
let error = SyntaxNode::error(msg, self.s.from(start));
return (SyntaxKind::Error, error);
}
}
}
let end = self.s.cursor();
let mut nodes = Vec::with_capacity(3); // Will have at least 3.
// A closure for pushing a node onto our raw vector. Assumes the caller
// will move the scanner to the next location at each step.
let mut prev_start = start;
let mut push_raw = |kind, s: &Scanner| {
nodes.push(SyntaxNode::leaf(kind, s.from(prev_start)));
prev_start = s.cursor();
};
// Opening delimiter.
self.s.jump(start + backticks);
push_raw(SyntaxKind::RawDelim, &self.s);
if backticks >= 3 {
self.blocky_raw(end - backticks, &mut push_raw);
} else {
self.inline_raw(end - backticks, &mut push_raw);
}
// Closing delimiter.
self.s.jump(end);
push_raw(SyntaxKind::RawDelim, &self.s);
(SyntaxKind::Raw, SyntaxNode::inner(SyntaxKind::Raw, nodes))
}
/// Raw blocks parse a language tag, have smart behavior for trimming
/// whitespace in the start/end lines, and trim common leading whitespace
/// from all other lines as the "dedent". The exact behavior is described
/// below.
///
/// ### The initial line:
/// - Text until the first whitespace or backtick is parsed as the language tag.
/// - We check the rest of the line and if all characters are whitespace,
/// trim it. Otherwise we trim a single leading space if present.
/// - If more trimmed characters follow on future lines, they will be
/// merged into the same trimmed element.
/// - If we didn't trim the entire line, the rest is kept as text.
///
/// ### Inner lines:
/// - We determine the "dedent" by iterating over the lines. The dedent is
/// the minimum number of leading whitespace characters (not bytes) before
/// each line that has any non-whitespace characters.
/// - The opening delimiter's line does not contribute to the dedent, but
/// the closing delimiter's line does (even if that line is entirely
/// whitespace up to the delimiter).
/// - We then trim the newline and dedent characters of each line, and add a
/// (potentially empty) text element of all remaining characters.
///
/// ### The final line:
/// - If the last line is entirely whitespace, it is trimmed.
/// - Otherwise its text is kept like an inner line. However, if the last
/// non-whitespace character of the final line is a backtick, then one
/// ascii space (if present) is trimmed from the end.
fn blocky_raw<F>(&mut self, inner_end: usize, mut push_raw: F)
where
F: FnMut(SyntaxKind, &Scanner),
{
// Language tag.
let tag = self.s.eat_until(|c: char| c.is_whitespace() || c == '`');
if !tag.is_empty() {
push_raw(SyntaxKind::RawLang, &self.s);
}
// The rest of the function operates on the lines between the backticks.
let mut lines = split_newlines(self.s.to(inner_end));
// Determine dedent level.
let dedent = lines
.iter()
.skip(1)
.filter(|line| !line.chars().all(char::is_whitespace))
// The line with the closing ``` is always taken into account
.chain(lines.last())
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0);
// Trim whitespace from the last line. Will be added as a `RawTrimmed`
// kind by the check for `self.s.cursor() != inner_end` below.
if lines.last().is_some_and(|last| last.chars().all(char::is_whitespace)) {
lines.pop();
} else if let Some(last) = lines.last_mut() {
// If last line ends in a backtick, try to trim a single space. This
// check must happen before we add the first line since the last and
// first lines might be the same.
if last.trim_end().ends_with('`') {
*last = last.strip_suffix(' ').unwrap_or(last);
}
}
let mut lines = lines.into_iter();
// Handle the first line: trim if all whitespace, or trim a single space
// at the start. Note that the first line does not affect the dedent
// value.
if let Some(first_line) = lines.next() {
if first_line.chars().all(char::is_whitespace) {
self.s.advance(first_line.len());
// This is the only spot we advance the scanner, but don't
// immediately call `push_raw`. But the rest of the function
// ensures we will always add this text to a `RawTrimmed` later.
debug_assert!(self.s.cursor() != inner_end);
// A proof by cases follows:
// # First case: The loop runs
// If the loop runs, there must be a newline following, so
// `cursor != inner_end`. And if the loop runs, the first thing
// it does is add a trimmed element.
// # Second case: The final if-statement runs.
// To _not_ reach the loop from here, we must have only one or
// two lines:
// 1. If one line, we cannot be here, because the first and last
// lines are the same, so this line will have been removed by
// the check for the last line being all whitespace.
// 2. If two lines, the loop will run unless the last is fully
// whitespace, but if it is, it will have been popped, then
// the final if-statement will run because the text removed
// by the last line must include at least a newline, so
// `cursor != inner_end` here.
} else {
let line_end = self.s.cursor() + first_line.len();
if self.s.eat_if(' ') {
// Trim a single space after the lang tag on the first line.
push_raw(SyntaxKind::RawTrimmed, &self.s);
}
// We know here that the rest of the line is non-empty.
self.s.jump(line_end);
push_raw(SyntaxKind::Text, &self.s);
}
}
// Add lines.
for line in lines {
let offset: usize = line.chars().take(dedent).map(char::len_utf8).sum();
self.s.eat_newline();
self.s.advance(offset);
push_raw(SyntaxKind::RawTrimmed, &self.s);
self.s.advance(line.len() - offset);
push_raw(SyntaxKind::Text, &self.s);
}
// Add final trimmed.
if self.s.cursor() < inner_end {
self.s.jump(inner_end);
push_raw(SyntaxKind::RawTrimmed, &self.s);
}
}
/// Inline raw text is split on lines with non-newlines as `Text` kinds and
/// newlines as `RawTrimmed`. Inline raw text does not dedent the text, all
/// non-newline whitespace is kept.
fn inline_raw<F>(&mut self, inner_end: usize, mut push_raw: F)
where
F: FnMut(SyntaxKind, &Scanner),
{
while self.s.cursor() < inner_end {
if self.s.at(is_newline) {
push_raw(SyntaxKind::Text, &self.s);
self.s.eat_newline();
push_raw(SyntaxKind::RawTrimmed, &self.s);
continue;
}
self.s.eat();
}
push_raw(SyntaxKind::Text, &self.s);
}
fn link(&mut self) -> SyntaxKind {
let (link, balanced) = link_prefix(self.s.after());
self.s.advance(link.len());
if !balanced {
return self.error(
"automatic links cannot contain unbalanced brackets, \
use the `link` function instead",
);
}
SyntaxKind::Link
}
fn numbering(&mut self, start: usize) -> SyntaxKind {
self.s.eat_while(char::is_ascii_digit);
let read = self.s.from(start);
if self.s.eat_if('.') && self.space_or_end() && read.parse::<u64>().is_ok() {
return SyntaxKind::EnumMarker;
}
self.text()
}
fn ref_marker(&mut self) -> SyntaxKind {
self.s.eat_while(is_valid_in_label_literal);
// Don't include the trailing characters likely to be part of text.
while matches!(self.s.scout(-1), Some('.' | ':')) {
self.s.uneat();
}
SyntaxKind::RefMarker
}
fn label(&mut self) -> SyntaxKind {
let label = self.s.eat_while(is_valid_in_label_literal);
if label.is_empty() {
return self.error("label cannot be empty");
}
if !self.s.eat_if('>') {
return self.error("unclosed label");
}
SyntaxKind::Label
}
fn text(&mut self) -> SyntaxKind {
macro_rules! table {
($(|$c:literal)*) => {
static TABLE: [bool; 128] = {
let mut t = [false; 128];
$(t[$c as usize] = true;)*
t
};
};
}
table! {
| ' ' | '\t' | '\n' | '\x0b' | '\x0c' | '\r' | '\\' | '/'
| '[' | ']' | '~' | '-' | '.' | '\'' | '"' | '*' | '_'
| ':' | 'h' | '`' | '$' | '<' | '>' | '@' | '#'
};
loop {
self.s.eat_until(|c: char| {
TABLE.get(c as usize).copied().unwrap_or_else(|| c.is_whitespace())
});
// Continue with the same text node if the thing would become text
// anyway.
let mut s = self.s;
match s.eat() {
Some(' ') if s.at(char::is_alphanumeric) => {}
Some('/') if !s.at(['/', '*']) => {}
Some('-') if !s.at(['-', '?']) => {}
Some('.') if !s.at("..") => {}
Some('h') if !s.at("ttp://") && !s.at("ttps://") => {}
Some('@') if !s.at(is_valid_in_label_literal) => {}
_ => break,
}
self.s = s;
}
SyntaxKind::Text
}
fn in_word(&self) -> bool {
let wordy = |c: Option<char>| {
c.is_some_and(|c| {
c.is_alphanumeric()
&& !matches!(
c.script(),
Script::Han
| Script::Hiragana
| Script::Katakana
| Script::Hangul
)
})
};
let prev = self.s.scout(-2);
let next = self.s.peek();
wordy(prev) && wordy(next)
}
fn space_or_end(&self) -> bool {
self.s.done()
|| self.s.at(char::is_whitespace)
|| self.s.at("//")
|| self.s.at("/*")
}
}
/// Math.
impl Lexer<'_> {
fn math(&mut self, start: usize, c: char) -> (SyntaxKind, Option<SyntaxNode>) {
let kind = match c {
'\\' => self.backslash(),
'"' => self.string(),
'-' if self.s.eat_if(">>") => SyntaxKind::MathShorthand,
'-' if self.s.eat_if('>') => SyntaxKind::MathShorthand,
'-' if self.s.eat_if("->") => SyntaxKind::MathShorthand,
':' if self.s.eat_if('=') => SyntaxKind::MathShorthand,
':' if self.s.eat_if(":=") => SyntaxKind::MathShorthand,
'!' if self.s.eat_if('=') => SyntaxKind::MathShorthand,
'.' if self.s.eat_if("..") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("==>") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("-->") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("--") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("-<") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("->") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("<-") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("<<") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("=>") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("==") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if("~~") => SyntaxKind::MathShorthand,
'<' if self.s.eat_if('=') => SyntaxKind::MathShorthand,
'<' if self.s.eat_if('<') => SyntaxKind::MathShorthand,
'<' if self.s.eat_if('-') => SyntaxKind::MathShorthand,
'<' if self.s.eat_if('~') => SyntaxKind::MathShorthand,
'>' if self.s.eat_if("->") => SyntaxKind::MathShorthand,
'>' if self.s.eat_if(">>") => SyntaxKind::MathShorthand,
'=' if self.s.eat_if("=>") => SyntaxKind::MathShorthand,
'=' if self.s.eat_if('>') => SyntaxKind::MathShorthand,
'=' if self.s.eat_if(':') => SyntaxKind::MathShorthand,
'>' if self.s.eat_if('=') => SyntaxKind::MathShorthand,
'>' if self.s.eat_if('>') => SyntaxKind::MathShorthand,
'|' if self.s.eat_if("->") => SyntaxKind::MathShorthand,
'|' if self.s.eat_if("=>") => SyntaxKind::MathShorthand,
'|' if self.s.eat_if('|') => SyntaxKind::MathShorthand,
'~' if self.s.eat_if("~>") => SyntaxKind::MathShorthand,
'~' if self.s.eat_if('>') => SyntaxKind::MathShorthand,
'*' | '-' | '~' => SyntaxKind::MathShorthand,
'.' => SyntaxKind::Dot,
',' => SyntaxKind::Comma,
';' => SyntaxKind::Semicolon,
'#' => SyntaxKind::Hash,
'_' => SyntaxKind::Underscore,
'$' => SyntaxKind::Dollar,
'/' => SyntaxKind::Slash,
'^' => SyntaxKind::Hat,
'&' => SyntaxKind::MathAlignPoint,
'√' | '∛' | '∜' => SyntaxKind::Root,
'!' => SyntaxKind::Bang,
'\'' => {
self.s.eat_while('\'');
SyntaxKind::MathPrimes
}
// We lex delimiters as `{Left,Right}{Brace,Paren}` and convert back
// to `MathText` or `MathShorthand` in the parser.
'(' => SyntaxKind::LeftParen,
')' => SyntaxKind::RightParen,
// TODO: We may instead want to add `MathOpening` and `MathClosing`
// kinds for these.
'[' if self.s.eat_if('|') => SyntaxKind::LeftBrace,
'|' if self.s.eat_if(']') => SyntaxKind::RightBrace,
c if default_math_class(c) == Some(MathClass::Opening) => {
SyntaxKind::LeftBrace
}
c if default_math_class(c) == Some(MathClass::Closing) => {
SyntaxKind::RightBrace
}
// Identifiers.
c if is_math_id_start(c) && self.s.at(is_math_id_continue) => {
self.s.eat_while(is_math_id_continue);
let (last_index, _) =
self.s.from(start).grapheme_indices(true).next_back().unwrap();
if last_index == 0 {
// If this was just a single grapheme.
SyntaxKind::MathText
} else {
let (kind, node) = self.math_ident_or_field(start);
return (kind, Some(node));
}
}
// Other math atoms.
_ => self.math_text(start, c),
};
(kind, None)
}
/// Parse a single `MathIdent` or an entire `FieldAccess`.
fn math_ident_or_field(&mut self, start: usize) -> (SyntaxKind, SyntaxNode) {
let mut kind = SyntaxKind::MathIdent;
let mut node = SyntaxNode::leaf(kind, self.s.from(start));
while let Some(ident) = self.maybe_dot_ident() {
kind = SyntaxKind::FieldAccess;
let field_children = vec![
node,
SyntaxNode::leaf(SyntaxKind::Dot, '.'),
SyntaxNode::leaf(SyntaxKind::Ident, ident),
];
node = SyntaxNode::inner(kind, field_children);
}
(kind, node)
}
/// If at a dot and a math identifier, eat and return the identifier.
fn maybe_dot_ident(&mut self) -> Option<&str> {
if self.s.scout(1).is_some_and(is_math_id_start) && self.s.eat_if('.') {
let ident_start = self.s.cursor();
self.s.eat();
self.s.eat_while(is_math_id_continue);
Some(self.s.from(ident_start))
} else {
None
}
}
fn math_text(&mut self, start: usize, c: char) -> SyntaxKind {
// Keep numbers and grapheme clusters together.
if c.is_numeric() {
self.s.eat_while(char::is_numeric);
let mut s = self.s;
if s.eat_if('.') && !s.eat_while(char::is_numeric).is_empty() {
self.s = s;
}
} else {
let len = self
.s
.get(start..self.s.string().len())
.graphemes(true)
.next()
.map_or(0, str::len);
self.s.jump(start + len);
}
SyntaxKind::MathText
}
/// Handle named arguments in math function call.
pub fn maybe_math_named_arg(&mut self, start: usize) -> Option<SyntaxNode> {
let cursor = self.s.cursor();
self.s.jump(start);
if self.s.eat_if(is_id_start) {
self.s.eat_while(is_id_continue);
// Check that a colon directly follows the identifier, and not the
// `:=` or `::=` math shorthands.
if self.s.at(':') && !self.s.at(":=") && !self.s.at("::=") {
// Check that the identifier is not just `_`.
let node = if self.s.from(start) != "_" {
SyntaxNode::leaf(SyntaxKind::Ident, self.s.from(start))
} else {
let msg = SyntaxError::new("expected identifier, found underscore");
SyntaxNode::error(msg, self.s.from(start))
};
return Some(node);
}
}
self.s.jump(cursor);
None
}
/// Handle spread arguments in math function call.
pub fn maybe_math_spread_arg(&mut self, start: usize) -> Option<SyntaxNode> {
let cursor = self.s.cursor();
self.s.jump(start);
if self.s.eat_if("..") {
// We only infer a spread operator if it is not followed by:
// - a space/trivia/end
// - a dot (this would clash with the `...` math shorthand)
// - an end of arg character: `,`, `;`, ')', `$` (spreads nothing)
if !self.space_or_end() && !self.s.at(['.', ',', ';', ')', '$']) {
let node = SyntaxNode::leaf(SyntaxKind::Dots, self.s.from(start));
return Some(node);
}
}
self.s.jump(cursor);
None
}
}
/// Code.
impl Lexer<'_> {
fn code(&mut self, start: usize, c: char) -> SyntaxKind {
match c {
'<' if self.s.at(is_id_continue) => self.label(),
'0'..='9' => self.number(start, c),
'.' if self.s.at(char::is_ascii_digit) => self.number(start, c),
'"' => self.string(),
'=' if self.s.eat_if('=') => SyntaxKind::EqEq,
'!' if self.s.eat_if('=') => SyntaxKind::ExclEq,
'<' if self.s.eat_if('=') => SyntaxKind::LtEq,
'>' if self.s.eat_if('=') => SyntaxKind::GtEq,
'+' if self.s.eat_if('=') => SyntaxKind::PlusEq,
'-' | '\u{2212}' if self.s.eat_if('=') => SyntaxKind::HyphEq,
'*' if self.s.eat_if('=') => SyntaxKind::StarEq,
'/' if self.s.eat_if('=') => SyntaxKind::SlashEq,
'.' if self.s.eat_if('.') => SyntaxKind::Dots,
'=' if self.s.eat_if('>') => SyntaxKind::Arrow,
'{' => SyntaxKind::LeftBrace,
'}' => SyntaxKind::RightBrace,
'[' => SyntaxKind::LeftBracket,
']' => SyntaxKind::RightBracket,
'(' => SyntaxKind::LeftParen,
')' => SyntaxKind::RightParen,
'$' => SyntaxKind::Dollar,
',' => SyntaxKind::Comma,
';' => SyntaxKind::Semicolon,
':' => SyntaxKind::Colon,
'.' => SyntaxKind::Dot,
'+' => SyntaxKind::Plus,
'-' | '\u{2212}' => SyntaxKind::Minus,
'*' => SyntaxKind::Star,
'/' => SyntaxKind::Slash,
'=' => SyntaxKind::Eq,
'<' => SyntaxKind::Lt,
'>' => SyntaxKind::Gt,
c if is_id_start(c) => self.ident(start),
c => self.error(eco_format!("the character `{c}` is not valid in code")),
}
}
fn ident(&mut self, start: usize) -> SyntaxKind {
self.s.eat_while(is_id_continue);
let ident = self.s.from(start);
let prev = self.s.get(0..start);
if (!prev.ends_with(['.', '@']) || prev.ends_with(".."))
&& let Some(keyword) = keyword(ident)
{
return keyword;
}
if ident == "_" { SyntaxKind::Underscore } else { SyntaxKind::Ident }
}
fn number(&mut self, start: usize, first_c: char) -> SyntaxKind {
// Handle alternative integer bases.
let base = match first_c {
'0' if self.s.eat_if('b') => 2,
'0' if self.s.eat_if('o') => 8,
'0' if self.s.eat_if('x') => 16,
_ => 10,
};
// Read the initial digits.
if base == 16 {
self.s.eat_while(char::is_ascii_alphanumeric);
} else {
self.s.eat_while(char::is_ascii_digit);
}
// Read floating point digits and exponents.
let mut is_float = false;
if base == 10 {
// Read digits following a dot. Make sure not to confuse a spread
// operator or a method call for the decimal separator.
if first_c == '.' {
is_float = true; // We already ate the trailing digits above.
} else if !self.s.at("..")
&& !self.s.scout(1).is_some_and(is_id_start)
&& self.s.eat_if('.')
{
is_float = true;
self.s.eat_while(char::is_ascii_digit);
}
// Read the exponent.
if !self.s.at("em") && self.s.eat_if(['e', 'E']) {
is_float = true;
self.s.eat_if(['+', '-']);
self.s.eat_while(char::is_ascii_digit);
}
}
let number = self.s.from(start);
let suffix = self.s.eat_while(|c: char| c.is_ascii_alphanumeric() || c == '%');
// Parse large integer literals as floats
if base == 10
&& !is_float
&& let Err(e) = i64::from_str_radix(number, base)
&& matches!(e.kind(), IntErrorKind::PosOverflow | IntErrorKind::NegOverflow)
&& number.parse::<f64>().is_ok()
{
is_float = true;
}
let mut suffix_result = match suffix {
"" => Ok(None),
"pt" | "mm" | "cm" | "in" | "deg" | "rad" | "em" | "fr" | "%" => Ok(Some(())),
_ => Err(eco_format!("invalid number suffix: {suffix}")),
};
let number_result = if is_float && number.parse::<f64>().is_err() {
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/ast.rs | crates/typst-syntax/src/ast.rs | /*!
# Abstract Syntax Tree Interface
Typst's Abstract Syntax Tree (AST) is a lazy, typed view over the untyped
Concrete Syntax Tree (CST) and is rooted in the [`Markup`] node.
## The AST is a View
Most AST nodes are wrapper structs around [`SyntaxNode`] pointers. This summary
will use a running example of the [`Raw`] node type, which is declared (after
macro expansion) as: `struct Raw<'a>(&'a SyntaxNode);`.
[`SyntaxNode`]s are generated by the parser and constitute the Concrete Syntax
Tree (CST). The CST is _concrete_ because it has the property that an in-order
tree traversal will recreate the text of the source file exactly.
[`SyntaxNode`]s in the CST contain their [`SyntaxKind`], but don't themselves
provide access to the semantic meaning of their contents. That semantic meaning
is available through the Abstract Syntax Tree by iterating over CST nodes and
inspecting their contents. The format is prepared ahead-of-time by the parser so
that this module can unpack the abstract meaning from the CST's structure.
Raw nodes are parsed by recognizing paired backtick delimiters, which you will
find as CST nodes with the [`RawDelim`] kind. However, the AST doesn't include
these delimiters because it _abstracts_ over the backticks. Instead, the parent
raw node will only use its child [`RawDelim`] CST nodes to determine whether the
element is a block or inline.
## The AST is Typed
AST nodes all implement the [`AstNode`] trait, but nodes can also implement
their own unique methods. These unique methods are the "real" interface of the
AST, and provide access to the abstract, semantic, representation of each kind
of node. For example, the [`Raw`] node provides 3 methods that specify its
abstract representation: [`Raw::lines()`] returns the raw text as an iterator of
lines, [`Raw::lang()`] provides the optionally present [`RawLang`] language tag,
and [`Raw::block()`] gives a bool for whether the raw element is a block or
inline.
This semantic information is unavailable in the CST. Only by converting a CST
node to an AST struct will Rust let you call a method of that struct. This is a
safe interface because the only way to create an AST node outside this file is
to call [`AstNode::from_untyped`]. The `node!` macro implements `from_untyped`
by checking the node's kind before constructing it, returning `Some()` only if
the kind matches. So we know that it will have the expected children underneath,
otherwise the parser wouldn't have produced this node.
## The AST is rooted in the [`Markup`] node
The AST is rooted in the [`Markup`] node, which provides only one method:
[`Markup::exprs`]. This returns an iterator of the main [`Expr`] enum. [`Expr`]
is important because it contains the majority of expressions that Typst will
evaluate. Not just markup, but also math and code expressions. Not all
expression types are available from the parser at every step, but this does
decrease the amount of wrapper enums needed in the AST (and this file is long
enough already).
Expressions also branch off into the remaining tree. You can view enums in this
file as edges on a graph: areas where the tree has paths from one type to
another (accessed through methods), then structs are the nodes of the graph,
providing methods that return enums, etc. etc.
## The AST is Lazy
Being lazy means that the untyped CST nodes are converted to typed AST nodes
only as the tree is traversed. If we parse a file and a raw block is contained
in a branch of an if-statement that we don't take, then we won't pay the cost of
creating an iterator over the lines or checking whether it was a block or
inline (although it will still be parsed into nodes).
This is also a factor of the current "tree-interpreter" evaluation model. A
bytecode interpreter might instead eagerly convert the AST into bytecode, but it
would still traverse using this lazy interface. While the tree-interpreter
evaluation is straightforward and easy to add new features onto, it has to
re-traverse the AST every time a function is evaluated. A bytecode interpreter
using the lazy interface would only need to traverse each node once, improving
throughput at the cost of initial latency and development flexibility.
*/
use std::num::NonZeroUsize;
use std::ops::Deref;
use std::path::Path;
use std::str::FromStr;
use ecow::EcoString;
use unscanny::Scanner;
use crate::package::PackageSpec;
use crate::{Span, SyntaxKind, SyntaxNode, is_ident, is_newline};
/// A typed AST node.
pub trait AstNode<'a>: Sized {
/// Convert a node into its typed variant.
fn from_untyped(node: &'a SyntaxNode) -> Option<Self>;
/// A reference to the underlying syntax node.
fn to_untyped(self) -> &'a SyntaxNode;
/// The source code location.
fn span(self) -> Span {
self.to_untyped().span()
}
}
// A generic interface for converting untyped nodes into typed AST nodes.
impl SyntaxNode {
/// Whether the node can be cast to the given AST node.
pub fn is<'a, T: AstNode<'a>>(&'a self) -> bool {
self.cast::<T>().is_some()
}
/// Try to convert the node to a typed AST node.
pub fn cast<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
T::from_untyped(self)
}
/// Find the first child that can cast to the AST type `T`.
fn try_cast_first<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
self.children().find_map(Self::cast)
}
/// Find the last child that can cast to the AST type `T`.
fn try_cast_last<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
self.children().rev().find_map(Self::cast)
}
/// Get the first child of AST type `T` or a placeholder if none.
fn cast_first<'a, T: AstNode<'a> + Default>(&'a self) -> T {
self.try_cast_first().unwrap_or_default()
}
/// Get the last child of AST type `T` or a placeholder if none.
fn cast_last<'a, T: AstNode<'a> + Default>(&'a self) -> T {
self.try_cast_last().unwrap_or_default()
}
}
/// Implements [`AstNode`] for a struct whose name matches a [`SyntaxKind`]
/// variant.
///
/// The struct becomes a wrapper around a [`SyntaxNode`] pointer, and the
/// implementation of [`AstNode::from_untyped`] checks that the pointer's kind
/// matches when converting, returning `Some` or `None` respectively.
///
/// The generated struct is the basis for typed accessor methods for properties
/// of this AST node. For example, the [`Raw`] struct has methods for accessing
/// its content by lines, its optional language tag, and whether the raw element
/// is inline or a block. These methods are accessible only _after_ a
/// `SyntaxNode` is coerced to the `Raw` struct type (via `from_untyped`),
/// guaranteeing their implementations will work with the expected structure.
macro_rules! node {
($(#[$attr:meta])* struct $name:ident) => {
// Create the struct as a wrapper around a `SyntaxNode` reference.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
#[repr(transparent)]
$(#[$attr])*
pub struct $name<'a>(&'a SyntaxNode);
impl<'a> AstNode<'a> for $name<'a> {
#[inline]
fn from_untyped(node: &'a SyntaxNode) -> Option<Self> {
if node.kind() == SyntaxKind::$name {
Some(Self(node))
} else {
Option::None
}
}
#[inline]
fn to_untyped(self) -> &'a SyntaxNode {
self.0
}
}
impl Default for $name<'_> {
#[inline]
fn default() -> Self {
static PLACEHOLDER: SyntaxNode
= SyntaxNode::placeholder(SyntaxKind::$name);
Self(&PLACEHOLDER)
}
}
};
}
node! {
/// The syntactical root capable of representing a full parsed document.
struct Markup
}
impl<'a> Markup<'a> {
/// The expressions.
pub fn exprs(self) -> impl DoubleEndedIterator<Item = Expr<'a>> {
let mut was_stmt = false;
self.0
.children()
.filter(move |node| {
// Ignore newline directly after statements without semicolons.
let kind = node.kind();
let keep = !was_stmt || node.kind() != SyntaxKind::Space;
was_stmt = kind.is_stmt();
keep
})
.filter_map(Expr::cast_with_space)
}
}
/// An expression in markup, math or code.
#[derive(Debug, Copy, Clone, Hash)]
pub enum Expr<'a> {
/// Plain text without markup.
Text(Text<'a>),
/// Whitespace in markup or math. Has at most one newline in markup, as more
/// indicate a paragraph break.
Space(Space<'a>),
/// A forced line break: `\`.
Linebreak(Linebreak<'a>),
/// A paragraph break, indicated by one or multiple blank lines.
Parbreak(Parbreak<'a>),
/// An escape sequence: `\#`, `\u{1F5FA}`.
Escape(Escape<'a>),
/// A shorthand for a unicode codepoint. For example, `~` for non-breaking
/// space or `-?` for a soft hyphen.
Shorthand(Shorthand<'a>),
/// A smart quote: `'` or `"`.
SmartQuote(SmartQuote<'a>),
/// Strong content: `*Strong*`.
Strong(Strong<'a>),
/// Emphasized content: `_Emphasized_`.
Emph(Emph<'a>),
/// Raw text with optional syntax highlighting: `` `...` ``.
Raw(Raw<'a>),
/// A hyperlink: `https://typst.org`.
Link(Link<'a>),
/// A label: `<intro>`.
Label(Label<'a>),
/// A reference: `@target`, `@target[..]`.
Ref(Ref<'a>),
/// A section heading: `= Introduction`.
Heading(Heading<'a>),
/// An item in a bullet list: `- ...`.
ListItem(ListItem<'a>),
/// An item in an enumeration (numbered list): `+ ...` or `1. ...`.
EnumItem(EnumItem<'a>),
/// An item in a term list: `/ Term: Details`.
TermItem(TermItem<'a>),
/// A mathematical equation: `$x$`, `$ x^2 $`.
Equation(Equation<'a>),
/// The contents of a mathematical equation: `x^2 + 1`.
Math(Math<'a>),
/// A lone text fragment in math: `x`, `25`, `3.1415`, `=`, `[`.
MathText(MathText<'a>),
/// An identifier in math: `pi`.
MathIdent(MathIdent<'a>),
/// A shorthand for a unicode codepoint in math: `a <= b`.
MathShorthand(MathShorthand<'a>),
/// An alignment point in math: `&`.
MathAlignPoint(MathAlignPoint<'a>),
/// Matched delimiters in math: `[x + y]`.
MathDelimited(MathDelimited<'a>),
/// A base with optional attachments in math: `a_1^2`.
MathAttach(MathAttach<'a>),
/// Grouped primes in math: `a'''`.
MathPrimes(MathPrimes<'a>),
/// A fraction in math: `x/2`.
MathFrac(MathFrac<'a>),
/// A root in math: `√x`, `∛x` or `∜x`.
MathRoot(MathRoot<'a>),
/// An identifier: `left`.
Ident(Ident<'a>),
/// The `none` literal.
None(None<'a>),
/// The `auto` literal.
Auto(Auto<'a>),
/// A boolean: `true`, `false`.
Bool(Bool<'a>),
/// An integer: `120`.
Int(Int<'a>),
/// A floating-point number: `1.2`, `10e-4`.
Float(Float<'a>),
/// A numeric value with a unit: `12pt`, `3cm`, `2em`, `90deg`, `50%`.
Numeric(Numeric<'a>),
/// A quoted string: `"..."`.
Str(Str<'a>),
/// A code block: `{ let x = 1; x + 2 }`.
CodeBlock(CodeBlock<'a>),
/// A content block: `[*Hi* there!]`.
ContentBlock(ContentBlock<'a>),
/// A grouped expression: `(1 + 2)`.
Parenthesized(Parenthesized<'a>),
/// An array: `(1, "hi", 12cm)`.
Array(Array<'a>),
/// A dictionary: `(thickness: 3pt, dash: "solid")`.
Dict(Dict<'a>),
/// A unary operation: `-x`.
Unary(Unary<'a>),
/// A binary operation: `a + b`.
Binary(Binary<'a>),
/// A field access: `properties.age`.
FieldAccess(FieldAccess<'a>),
/// An invocation of a function or method: `f(x, y)`.
FuncCall(FuncCall<'a>),
/// A closure: `(x, y) => z`.
Closure(Closure<'a>),
/// A let binding: `let x = 1`.
LetBinding(LetBinding<'a>),
/// A destructuring assignment: `(x, y) = (1, 2)`.
DestructAssignment(DestructAssignment<'a>),
/// A set rule: `set text(...)`.
SetRule(SetRule<'a>),
/// A show rule: `show heading: it => emph(it.body)`.
ShowRule(ShowRule<'a>),
/// A contextual expression: `context text.lang`.
Contextual(Contextual<'a>),
/// An if-else conditional: `if x { y } else { z }`.
Conditional(Conditional<'a>),
/// A while loop: `while x { y }`.
WhileLoop(WhileLoop<'a>),
/// A for loop: `for x in y { z }`.
ForLoop(ForLoop<'a>),
/// A module import: `import "utils.typ": a, b, c`.
ModuleImport(ModuleImport<'a>),
/// A module include: `include "chapter1.typ"`.
ModuleInclude(ModuleInclude<'a>),
/// A break from a loop: `break`.
LoopBreak(LoopBreak<'a>),
/// A continue in a loop: `continue`.
LoopContinue(LoopContinue<'a>),
/// A return from a function: `return`, `return x + 1`.
FuncReturn(FuncReturn<'a>),
}
impl<'a> Expr<'a> {
fn cast_with_space(node: &'a SyntaxNode) -> Option<Self> {
match node.kind() {
SyntaxKind::Space => Some(Self::Space(Space(node))),
_ => Self::from_untyped(node),
}
}
}
impl<'a> AstNode<'a> for Expr<'a> {
fn from_untyped(node: &'a SyntaxNode) -> Option<Self> {
match node.kind() {
SyntaxKind::Space => Option::None, // Skipped unless using `cast_with_space`.
SyntaxKind::Linebreak => Some(Self::Linebreak(Linebreak(node))),
SyntaxKind::Parbreak => Some(Self::Parbreak(Parbreak(node))),
SyntaxKind::Text => Some(Self::Text(Text(node))),
SyntaxKind::Escape => Some(Self::Escape(Escape(node))),
SyntaxKind::Shorthand => Some(Self::Shorthand(Shorthand(node))),
SyntaxKind::SmartQuote => Some(Self::SmartQuote(SmartQuote(node))),
SyntaxKind::Strong => Some(Self::Strong(Strong(node))),
SyntaxKind::Emph => Some(Self::Emph(Emph(node))),
SyntaxKind::Raw => Some(Self::Raw(Raw(node))),
SyntaxKind::Link => Some(Self::Link(Link(node))),
SyntaxKind::Label => Some(Self::Label(Label(node))),
SyntaxKind::Ref => Some(Self::Ref(Ref(node))),
SyntaxKind::Heading => Some(Self::Heading(Heading(node))),
SyntaxKind::ListItem => Some(Self::ListItem(ListItem(node))),
SyntaxKind::EnumItem => Some(Self::EnumItem(EnumItem(node))),
SyntaxKind::TermItem => Some(Self::TermItem(TermItem(node))),
SyntaxKind::Equation => Some(Self::Equation(Equation(node))),
SyntaxKind::Math => Some(Self::Math(Math(node))),
SyntaxKind::MathText => Some(Self::MathText(MathText(node))),
SyntaxKind::MathIdent => Some(Self::MathIdent(MathIdent(node))),
SyntaxKind::MathShorthand => Some(Self::MathShorthand(MathShorthand(node))),
SyntaxKind::MathAlignPoint => {
Some(Self::MathAlignPoint(MathAlignPoint(node)))
}
SyntaxKind::MathDelimited => Some(Self::MathDelimited(MathDelimited(node))),
SyntaxKind::MathAttach => Some(Self::MathAttach(MathAttach(node))),
SyntaxKind::MathPrimes => Some(Self::MathPrimes(MathPrimes(node))),
SyntaxKind::MathFrac => Some(Self::MathFrac(MathFrac(node))),
SyntaxKind::MathRoot => Some(Self::MathRoot(MathRoot(node))),
SyntaxKind::Ident => Some(Self::Ident(Ident(node))),
SyntaxKind::None => Some(Self::None(None(node))),
SyntaxKind::Auto => Some(Self::Auto(Auto(node))),
SyntaxKind::Bool => Some(Self::Bool(Bool(node))),
SyntaxKind::Int => Some(Self::Int(Int(node))),
SyntaxKind::Float => Some(Self::Float(Float(node))),
SyntaxKind::Numeric => Some(Self::Numeric(Numeric(node))),
SyntaxKind::Str => Some(Self::Str(Str(node))),
SyntaxKind::CodeBlock => Some(Self::CodeBlock(CodeBlock(node))),
SyntaxKind::ContentBlock => Some(Self::ContentBlock(ContentBlock(node))),
SyntaxKind::Parenthesized => Some(Self::Parenthesized(Parenthesized(node))),
SyntaxKind::Array => Some(Self::Array(Array(node))),
SyntaxKind::Dict => Some(Self::Dict(Dict(node))),
SyntaxKind::Unary => Some(Self::Unary(Unary(node))),
SyntaxKind::Binary => Some(Self::Binary(Binary(node))),
SyntaxKind::FieldAccess => Some(Self::FieldAccess(FieldAccess(node))),
SyntaxKind::FuncCall => Some(Self::FuncCall(FuncCall(node))),
SyntaxKind::Closure => Some(Self::Closure(Closure(node))),
SyntaxKind::LetBinding => Some(Self::LetBinding(LetBinding(node))),
SyntaxKind::DestructAssignment => {
Some(Self::DestructAssignment(DestructAssignment(node)))
}
SyntaxKind::SetRule => Some(Self::SetRule(SetRule(node))),
SyntaxKind::ShowRule => Some(Self::ShowRule(ShowRule(node))),
SyntaxKind::Contextual => Some(Self::Contextual(Contextual(node))),
SyntaxKind::Conditional => Some(Self::Conditional(Conditional(node))),
SyntaxKind::WhileLoop => Some(Self::WhileLoop(WhileLoop(node))),
SyntaxKind::ForLoop => Some(Self::ForLoop(ForLoop(node))),
SyntaxKind::ModuleImport => Some(Self::ModuleImport(ModuleImport(node))),
SyntaxKind::ModuleInclude => Some(Self::ModuleInclude(ModuleInclude(node))),
SyntaxKind::LoopBreak => Some(Self::LoopBreak(LoopBreak(node))),
SyntaxKind::LoopContinue => Some(Self::LoopContinue(LoopContinue(node))),
SyntaxKind::FuncReturn => Some(Self::FuncReturn(FuncReturn(node))),
_ => Option::None,
}
}
fn to_untyped(self) -> &'a SyntaxNode {
match self {
Self::Text(v) => v.to_untyped(),
Self::Space(v) => v.to_untyped(),
Self::Linebreak(v) => v.to_untyped(),
Self::Parbreak(v) => v.to_untyped(),
Self::Escape(v) => v.to_untyped(),
Self::Shorthand(v) => v.to_untyped(),
Self::SmartQuote(v) => v.to_untyped(),
Self::Strong(v) => v.to_untyped(),
Self::Emph(v) => v.to_untyped(),
Self::Raw(v) => v.to_untyped(),
Self::Link(v) => v.to_untyped(),
Self::Label(v) => v.to_untyped(),
Self::Ref(v) => v.to_untyped(),
Self::Heading(v) => v.to_untyped(),
Self::ListItem(v) => v.to_untyped(),
Self::EnumItem(v) => v.to_untyped(),
Self::TermItem(v) => v.to_untyped(),
Self::Equation(v) => v.to_untyped(),
Self::Math(v) => v.to_untyped(),
Self::MathText(v) => v.to_untyped(),
Self::MathIdent(v) => v.to_untyped(),
Self::MathShorthand(v) => v.to_untyped(),
Self::MathAlignPoint(v) => v.to_untyped(),
Self::MathDelimited(v) => v.to_untyped(),
Self::MathAttach(v) => v.to_untyped(),
Self::MathPrimes(v) => v.to_untyped(),
Self::MathFrac(v) => v.to_untyped(),
Self::MathRoot(v) => v.to_untyped(),
Self::Ident(v) => v.to_untyped(),
Self::None(v) => v.to_untyped(),
Self::Auto(v) => v.to_untyped(),
Self::Bool(v) => v.to_untyped(),
Self::Int(v) => v.to_untyped(),
Self::Float(v) => v.to_untyped(),
Self::Numeric(v) => v.to_untyped(),
Self::Str(v) => v.to_untyped(),
Self::CodeBlock(v) => v.to_untyped(),
Self::ContentBlock(v) => v.to_untyped(),
Self::Array(v) => v.to_untyped(),
Self::Dict(v) => v.to_untyped(),
Self::Parenthesized(v) => v.to_untyped(),
Self::Unary(v) => v.to_untyped(),
Self::Binary(v) => v.to_untyped(),
Self::FieldAccess(v) => v.to_untyped(),
Self::FuncCall(v) => v.to_untyped(),
Self::Closure(v) => v.to_untyped(),
Self::LetBinding(v) => v.to_untyped(),
Self::DestructAssignment(v) => v.to_untyped(),
Self::SetRule(v) => v.to_untyped(),
Self::ShowRule(v) => v.to_untyped(),
Self::Contextual(v) => v.to_untyped(),
Self::Conditional(v) => v.to_untyped(),
Self::WhileLoop(v) => v.to_untyped(),
Self::ForLoop(v) => v.to_untyped(),
Self::ModuleImport(v) => v.to_untyped(),
Self::ModuleInclude(v) => v.to_untyped(),
Self::LoopBreak(v) => v.to_untyped(),
Self::LoopContinue(v) => v.to_untyped(),
Self::FuncReturn(v) => v.to_untyped(),
}
}
}
impl Expr<'_> {
/// Can this expression be embedded into markup with a hash?
pub fn hash(self) -> bool {
matches!(
self,
Self::Ident(_)
| Self::None(_)
| Self::Auto(_)
| Self::Bool(_)
| Self::Int(_)
| Self::Float(_)
| Self::Numeric(_)
| Self::Str(_)
| Self::CodeBlock(_)
| Self::ContentBlock(_)
| Self::Array(_)
| Self::Dict(_)
| Self::Parenthesized(_)
| Self::FieldAccess(_)
| Self::FuncCall(_)
| Self::LetBinding(_)
| Self::SetRule(_)
| Self::ShowRule(_)
| Self::Contextual(_)
| Self::Conditional(_)
| Self::WhileLoop(_)
| Self::ForLoop(_)
| Self::ModuleImport(_)
| Self::ModuleInclude(_)
| Self::LoopBreak(_)
| Self::LoopContinue(_)
| Self::FuncReturn(_)
)
}
/// Is this a literal?
pub fn is_literal(self) -> bool {
matches!(
self,
Self::None(_)
| Self::Auto(_)
| Self::Bool(_)
| Self::Int(_)
| Self::Float(_)
| Self::Numeric(_)
| Self::Str(_)
)
}
}
impl Default for Expr<'_> {
fn default() -> Self {
Expr::None(None::default())
}
}
node! {
/// Plain text without markup.
struct Text
}
impl<'a> Text<'a> {
/// Get the text.
pub fn get(self) -> &'a EcoString {
self.0.text()
}
}
node! {
/// Whitespace in markup or math. Has at most one newline in markup, as more
/// indicate a paragraph break.
struct Space
}
node! {
/// A forced line break: `\`.
struct Linebreak
}
node! {
/// A paragraph break, indicated by one or multiple blank lines.
struct Parbreak
}
node! {
/// An escape sequence: `\#`, `\u{1F5FA}`.
struct Escape
}
impl Escape<'_> {
/// Get the escaped character.
pub fn get(self) -> char {
let mut s = Scanner::new(self.0.text());
s.expect('\\');
if s.eat_if("u{") {
let hex = s.eat_while(char::is_ascii_hexdigit);
u32::from_str_radix(hex, 16)
.ok()
.and_then(std::char::from_u32)
.unwrap_or_default()
} else {
s.eat().unwrap_or_default()
}
}
}
node! {
/// A shorthand for a unicode codepoint. For example, `~` for a non-breaking
/// space or `-?` for a soft hyphen.
struct Shorthand
}
impl Shorthand<'_> {
/// A list of all shorthands in markup mode.
pub const LIST: &'static [(&'static str, char)] = &[
("...", '…'),
("~", '\u{00A0}'),
("-", '\u{2212}'), // Only before a digit
("--", '\u{2013}'),
("---", '\u{2014}'),
("-?", '\u{00AD}'),
];
/// Get the shorthanded character.
pub fn get(self) -> char {
let text = self.0.text();
Self::LIST
.iter()
.find(|&&(s, _)| s == text)
.map_or_else(char::default, |&(_, c)| c)
}
}
node! {
/// A smart quote: `'` or `"`.
struct SmartQuote
}
impl SmartQuote<'_> {
/// Whether this is a double quote.
pub fn double(self) -> bool {
self.0.text() == "\""
}
}
node! {
/// Strong content: `*Strong*`.
struct Strong
}
impl<'a> Strong<'a> {
/// The contents of the strong node.
pub fn body(self) -> Markup<'a> {
self.0.cast_first()
}
}
node! {
/// Emphasized content: `_Emphasized_`.
struct Emph
}
impl<'a> Emph<'a> {
/// The contents of the emphasis node.
pub fn body(self) -> Markup<'a> {
self.0.cast_first()
}
}
node! {
/// Raw text with optional syntax highlighting: `` `...` ``.
struct Raw
}
impl<'a> Raw<'a> {
/// The lines in the raw block.
pub fn lines(self) -> impl DoubleEndedIterator<Item = Text<'a>> {
self.0.children().filter_map(SyntaxNode::cast)
}
/// An optional identifier specifying the language to syntax-highlight in.
pub fn lang(self) -> Option<RawLang<'a>> {
// Only blocky literals are supposed to contain a language.
let delim: RawDelim = self.0.try_cast_first()?;
if delim.0.len() < 3 {
return Option::None;
}
self.0.try_cast_first()
}
/// Whether the raw text should be displayed in a separate block.
pub fn block(self) -> bool {
self.0
.try_cast_first()
.is_some_and(|delim: RawDelim| delim.0.len() >= 3)
&& self.0.children().any(|e| {
e.kind() == SyntaxKind::RawTrimmed && e.text().chars().any(is_newline)
})
}
}
node! {
/// A language tag at the start of raw element: ``typ ``.
struct RawLang
}
impl<'a> RawLang<'a> {
/// Get the language tag.
pub fn get(self) -> &'a EcoString {
self.0.text()
}
}
node! {
/// A raw delimiter in single or 3+ backticks: `` ` ``.
struct RawDelim
}
node! {
/// A hyperlink: `https://typst.org`.
struct Link
}
impl<'a> Link<'a> {
/// Get the URL.
pub fn get(self) -> &'a EcoString {
self.0.text()
}
}
node! {
/// A label: `<intro>`.
struct Label
}
impl<'a> Label<'a> {
/// Get the label's text.
pub fn get(self) -> &'a str {
self.0.text().trim_start_matches('<').trim_end_matches('>')
}
}
node! {
/// A reference: `@target`, `@target[..]`.
struct Ref
}
impl<'a> Ref<'a> {
/// Get the target.
///
/// Will not be empty.
pub fn target(self) -> &'a str {
self.0
.children()
.find(|node| node.kind() == SyntaxKind::RefMarker)
.map(|node| node.text().trim_start_matches('@'))
.unwrap_or_default()
}
/// Get the supplement.
pub fn supplement(self) -> Option<ContentBlock<'a>> {
self.0.try_cast_last()
}
}
node! {
/// A section heading: `= Introduction`.
struct Heading
}
impl<'a> Heading<'a> {
/// The contents of the heading.
pub fn body(self) -> Markup<'a> {
self.0.cast_first()
}
/// The section depth (number of equals signs).
pub fn depth(self) -> NonZeroUsize {
self.0
.children()
.find(|node| node.kind() == SyntaxKind::HeadingMarker)
.and_then(|node| node.len().try_into().ok())
.unwrap_or(NonZeroUsize::new(1).unwrap())
}
}
node! {
/// An item in a bullet list: `- ...`.
struct ListItem
}
impl<'a> ListItem<'a> {
/// The contents of the list item.
pub fn body(self) -> Markup<'a> {
self.0.cast_first()
}
}
node! {
/// An item in an enumeration (numbered list): `+ ...` or `1. ...`.
struct EnumItem
}
impl<'a> EnumItem<'a> {
/// The explicit numbering, if any: `23.`.
pub fn number(self) -> Option<u64> {
self.0.children().find_map(|node| match node.kind() {
SyntaxKind::EnumMarker => node.text().trim_end_matches('.').parse().ok(),
_ => Option::None,
})
}
/// The contents of the list item.
pub fn body(self) -> Markup<'a> {
self.0.cast_first()
}
}
node! {
/// An item in a term list: `/ Term: Details`.
struct TermItem
}
impl<'a> TermItem<'a> {
/// The term described by the item.
pub fn term(self) -> Markup<'a> {
self.0.cast_first()
}
/// The description of the term.
pub fn description(self) -> Markup<'a> {
self.0.cast_last()
}
}
node! {
/// A mathematical equation: `$x$`, `$ x^2 $`.
struct Equation
}
impl<'a> Equation<'a> {
/// The contained math.
pub fn body(self) -> Math<'a> {
self.0.cast_first()
}
/// Whether the equation should be displayed as a separate block.
pub fn block(self) -> bool {
let is_space = |node: Option<&SyntaxNode>| {
node.map(SyntaxNode::kind) == Some(SyntaxKind::Space)
};
is_space(self.0.children().nth(1)) && is_space(self.0.children().nth_back(1))
}
}
node! {
/// The contents of a mathematical equation: `x^2 + 1`.
struct Math
}
impl<'a> Math<'a> {
/// The expressions the mathematical content consists of.
pub fn exprs(self) -> impl DoubleEndedIterator<Item = Expr<'a>> {
self.0.children().filter_map(Expr::cast_with_space)
}
/// Whether this `Math` node was originally parenthesized.
pub fn was_deparenthesized(self) -> bool {
let mut iter = self.0.children();
matches!(iter.next().map(SyntaxNode::kind), Some(SyntaxKind::LeftParen))
&& matches!(
iter.next_back().map(SyntaxNode::kind),
Some(SyntaxKind::RightParen)
)
}
}
node! {
/// A lone text fragment in math: `x`, `25`, `3.1415`, `=`, `[`.
struct MathText
}
/// The underlying text kind.
pub enum MathTextKind<'a> {
Grapheme(&'a EcoString),
Number(&'a EcoString),
}
impl<'a> MathText<'a> {
/// Return the underlying text.
pub fn get(self) -> MathTextKind<'a> {
let text = self.0.text();
if text.chars().next().unwrap().is_numeric() {
// Numbers are potentially grouped as multiple characters. This is
// done in `Lexer::math_text()`.
MathTextKind::Number(text)
} else {
MathTextKind::Grapheme(text)
}
}
}
node! {
/// An identifier in math: `pi`.
struct MathIdent
}
impl<'a> MathIdent<'a> {
/// Get the identifier.
pub fn get(self) -> &'a EcoString {
self.0.text()
}
/// Get the identifier as a string slice.
pub fn as_str(self) -> &'a str {
self.get()
}
}
impl Deref for MathIdent<'_> {
type Target = str;
/// Dereference to a string. Note that this shortens the lifetime, so you
/// may need to use [`get()`](Self::get) instead in some situations.
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
node! {
/// A shorthand for a unicode codepoint in math: `a <= b`.
struct MathShorthand
}
impl MathShorthand<'_> {
/// A list of all shorthands in math mode.
pub const LIST: &'static [(&'static str, char)] = &[
("...", '…'),
("-", '−'),
("*", '∗'),
("~", '∼'),
("!=", '≠'),
(":=", '≔'),
("::=", '⩴'),
("=:", '≕'),
("<<", '≪'),
("<<<", '⋘'),
(">>", '≫'),
(">>>", '⋙'),
("<=", '≤'),
(">=", '≥'),
("->", '→'),
("-->", '⟶'),
("|->", '↦'),
(">->", '↣'),
("->>", '↠'),
("<-", '←'),
("<--", '⟵'),
("<-<", '↢'),
("<<-", '↞'),
("<->", '↔'),
("<-->", '⟷'),
("~>", '⇝'),
("~~>", '⟿'),
("<~", '⇜'),
("<~~", '⬳'),
("=>", '⇒'),
("|=>", '⤇'),
("==>", '⟹'),
("<==", '⟸'),
("<=>", '⇔'),
("<==>", '⟺'),
("[|", '⟦'),
("|]", '⟧'),
("||", '‖'),
];
/// Get the shorthanded character.
pub fn get(self) -> char {
let text = self.0.text();
Self::LIST
.iter()
.find(|&&(s, _)| s == text)
.map_or_else(char::default, |&(_, c)| c)
}
}
node! {
/// An alignment point in math: `&`.
struct MathAlignPoint
}
node! {
/// Matched delimiters in math: `[x + y]`.
struct MathDelimited
}
impl<'a> MathDelimited<'a> {
/// The opening delimiter.
pub fn open(self) -> Expr<'a> {
self.0.cast_first()
}
/// The contents, including the delimiters.
pub fn body(self) -> Math<'a> {
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/node.rs | crates/typst-syntax/src/node.rs | use std::fmt::{self, Debug, Display, Formatter};
use std::ops::{Deref, Range};
use std::rc::Rc;
use std::sync::Arc;
use ecow::{EcoString, EcoVec, eco_format, eco_vec};
use crate::{FileId, Span, SyntaxKind};
/// A node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct SyntaxNode(NodeKind);
/// The three internal representations.
#[derive(Clone, Eq, PartialEq, Hash)]
enum NodeKind {
/// A leaf node.
Leaf(LeafNode),
/// A reference-counted inner node.
Inner(Arc<InnerNode>),
/// An error node.
Error(Arc<ErrorNode>),
}
impl SyntaxNode {
/// Create a new leaf node.
pub fn leaf(kind: SyntaxKind, text: impl Into<EcoString>) -> Self {
Self(NodeKind::Leaf(LeafNode::new(kind, text)))
}
/// Create a new inner node with children.
pub fn inner(kind: SyntaxKind, children: Vec<SyntaxNode>) -> Self {
Self(NodeKind::Inner(Arc::new(InnerNode::new(kind, children))))
}
/// Create a new error node.
pub fn error(error: SyntaxError, text: impl Into<EcoString>) -> Self {
Self(NodeKind::Error(Arc::new(ErrorNode::new(error, text))))
}
/// Create a dummy node of the given kind.
///
/// Panics if `kind` is `SyntaxKind::Error`.
#[track_caller]
pub const fn placeholder(kind: SyntaxKind) -> Self {
if matches!(kind, SyntaxKind::Error) {
panic!("cannot create error placeholder");
}
Self(NodeKind::Leaf(LeafNode {
kind,
text: EcoString::new(),
span: Span::detached(),
}))
}
/// The type of the node.
pub fn kind(&self) -> SyntaxKind {
match &self.0 {
NodeKind::Leaf(leaf) => leaf.kind,
NodeKind::Inner(inner) => inner.kind,
NodeKind::Error(_) => SyntaxKind::Error,
}
}
/// Return `true` if the length is 0.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// The byte length of the node in the source text.
pub fn len(&self) -> usize {
match &self.0 {
NodeKind::Leaf(leaf) => leaf.len(),
NodeKind::Inner(inner) => inner.len,
NodeKind::Error(node) => node.len(),
}
}
/// The span of the node.
pub fn span(&self) -> Span {
match &self.0 {
NodeKind::Leaf(leaf) => leaf.span,
NodeKind::Inner(inner) => inner.span,
NodeKind::Error(node) => node.error.span,
}
}
/// The text of the node if it is a leaf or error node.
///
/// Returns the empty string if this is an inner node.
pub fn text(&self) -> &EcoString {
static EMPTY: EcoString = EcoString::new();
match &self.0 {
NodeKind::Leaf(leaf) => &leaf.text,
NodeKind::Inner(_) => &EMPTY,
NodeKind::Error(node) => &node.text,
}
}
/// Extract the text from the node.
///
/// Builds the string if this is an inner node.
pub fn into_text(self) -> EcoString {
match self.0 {
NodeKind::Leaf(leaf) => leaf.text,
NodeKind::Inner(inner) => {
inner.children.iter().cloned().map(Self::into_text).collect()
}
NodeKind::Error(node) => node.text.clone(),
}
}
/// The node's children.
pub fn children(&self) -> std::slice::Iter<'_, SyntaxNode> {
match &self.0 {
NodeKind::Leaf(_) | NodeKind::Error(_) => [].iter(),
NodeKind::Inner(inner) => inner.children.iter(),
}
}
/// Whether the node or its children contain an error.
pub fn erroneous(&self) -> bool {
match &self.0 {
NodeKind::Leaf(_) => false,
NodeKind::Inner(inner) => inner.erroneous,
NodeKind::Error(_) => true,
}
}
/// The error messages for this node and its descendants.
pub fn errors(&self) -> Vec<SyntaxError> {
if !self.erroneous() {
return vec![];
}
if let NodeKind::Error(node) = &self.0 {
vec![node.error.clone()]
} else {
self.children()
.filter(|node| node.erroneous())
.flat_map(|node| node.errors())
.collect()
}
}
/// Add a user-presentable hint if this is an error node.
pub fn hint(&mut self, hint: impl Into<EcoString>) {
if let NodeKind::Error(node) = &mut self.0 {
Arc::make_mut(node).hint(hint);
}
}
/// Set a synthetic span for the node and all its descendants.
pub fn synthesize(&mut self, span: Span) {
match &mut self.0 {
NodeKind::Leaf(leaf) => leaf.span = span,
NodeKind::Inner(inner) => Arc::make_mut(inner).synthesize(span),
NodeKind::Error(node) => Arc::make_mut(node).error.span = span,
}
}
/// Whether the two syntax nodes are the same apart from spans.
pub fn spanless_eq(&self, other: &Self) -> bool {
match (&self.0, &other.0) {
(NodeKind::Leaf(a), NodeKind::Leaf(b)) => a.spanless_eq(b),
(NodeKind::Inner(a), NodeKind::Inner(b)) => a.spanless_eq(b),
(NodeKind::Error(a), NodeKind::Error(b)) => a.spanless_eq(b),
_ => false,
}
}
}
impl SyntaxNode {
/// Convert the child to another kind.
///
/// Don't use this for converting to an error!
#[track_caller]
pub(super) fn convert_to_kind(&mut self, kind: SyntaxKind) {
debug_assert!(!kind.is_error());
match &mut self.0 {
NodeKind::Leaf(leaf) => leaf.kind = kind,
NodeKind::Inner(inner) => Arc::make_mut(inner).kind = kind,
NodeKind::Error(_) => panic!("cannot convert error"),
}
}
/// Convert the child to an error, if it isn't already one.
pub(super) fn convert_to_error(&mut self, message: impl Into<EcoString>) {
if !self.kind().is_error() {
let text = std::mem::take(self).into_text();
*self = SyntaxNode::error(SyntaxError::new(message), text);
}
}
/// Convert the child to an error stating that the given thing was
/// expected, but the current kind was found.
pub(super) fn expected(&mut self, expected: &str) {
let kind = self.kind();
self.convert_to_error(eco_format!("expected {expected}, found {}", kind.name()));
if kind.is_keyword() && matches!(expected, "identifier" | "pattern") {
self.hint(eco_format!(
"keyword `{text}` is not allowed as an identifier; try `{text}_` instead",
text = self.text(),
));
}
}
/// Convert the child to an error stating it was unexpected.
pub(super) fn unexpected(&mut self) {
self.convert_to_error(eco_format!("unexpected {}", self.kind().name()));
}
/// Assign spans to each node.
pub(super) fn numberize(
&mut self,
id: FileId,
within: Range<u64>,
) -> NumberingResult {
if within.start >= within.end {
return Err(Unnumberable);
}
let mid = Span::from_number(id, (within.start + within.end) / 2).unwrap();
match &mut self.0 {
NodeKind::Leaf(leaf) => leaf.span = mid,
NodeKind::Inner(inner) => Arc::make_mut(inner).numberize(id, None, within)?,
NodeKind::Error(node) => Arc::make_mut(node).error.span = mid,
}
Ok(())
}
/// Whether this is a leaf node.
pub(super) fn is_leaf(&self) -> bool {
matches!(self.0, NodeKind::Leaf(_))
}
/// The number of descendants, including the node itself.
pub(super) fn descendants(&self) -> usize {
match &self.0 {
NodeKind::Leaf(_) | NodeKind::Error(_) => 1,
NodeKind::Inner(inner) => inner.descendants,
}
}
/// The node's children, mutably.
pub(super) fn children_mut(&mut self) -> &mut [SyntaxNode] {
match &mut self.0 {
NodeKind::Leaf(_) | NodeKind::Error(_) => &mut [],
NodeKind::Inner(inner) => &mut Arc::make_mut(inner).children,
}
}
/// Replaces a range of children with a replacement.
///
/// May have mutated the children if it returns `Err(_)`.
pub(super) fn replace_children(
&mut self,
range: Range<usize>,
replacement: Vec<SyntaxNode>,
) -> NumberingResult {
if let NodeKind::Inner(inner) = &mut self.0 {
Arc::make_mut(inner).replace_children(range, replacement)?;
}
Ok(())
}
/// Update this node after changes were made to one of its children.
pub(super) fn update_parent(
&mut self,
prev_len: usize,
new_len: usize,
prev_descendants: usize,
new_descendants: usize,
) {
if let NodeKind::Inner(inner) = &mut self.0 {
Arc::make_mut(inner).update_parent(
prev_len,
new_len,
prev_descendants,
new_descendants,
);
}
}
/// The upper bound of assigned numbers in this subtree.
pub(super) fn upper(&self) -> u64 {
match &self.0 {
NodeKind::Leaf(leaf) => leaf.span.number() + 1,
NodeKind::Inner(inner) => inner.upper,
NodeKind::Error(node) => node.error.span.number() + 1,
}
}
}
impl Debug for SyntaxNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match &self.0 {
NodeKind::Leaf(leaf) => leaf.fmt(f),
NodeKind::Inner(inner) => inner.fmt(f),
NodeKind::Error(node) => node.fmt(f),
}
}
}
impl Default for SyntaxNode {
fn default() -> Self {
Self::leaf(SyntaxKind::End, EcoString::new())
}
}
/// A leaf node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
struct LeafNode {
/// What kind of node this is (each kind would have its own struct in a
/// strongly typed AST).
kind: SyntaxKind,
/// The source text of the node.
text: EcoString,
/// The node's span.
span: Span,
}
impl LeafNode {
/// Create a new leaf node.
#[track_caller]
fn new(kind: SyntaxKind, text: impl Into<EcoString>) -> Self {
debug_assert!(!kind.is_error());
Self { kind, text: text.into(), span: Span::detached() }
}
/// The byte length of the node in the source text.
fn len(&self) -> usize {
self.text.len()
}
/// Whether the two leaf nodes are the same apart from spans.
fn spanless_eq(&self, other: &Self) -> bool {
self.kind == other.kind && self.text == other.text
}
}
impl Debug for LeafNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{:?}: {:?}", self.kind, self.text)
}
}
/// An inner node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
struct InnerNode {
/// What kind of node this is (each kind would have its own struct in a
/// strongly typed AST).
kind: SyntaxKind,
/// The byte length of the node in the source.
len: usize,
/// The node's span.
span: Span,
/// The number of nodes in the whole subtree, including this node.
descendants: usize,
/// Whether this node or any of its children are erroneous.
erroneous: bool,
/// The upper bound of this node's numbering range.
upper: u64,
/// This node's children, losslessly make up this node.
children: Vec<SyntaxNode>,
}
impl InnerNode {
/// Create a new inner node with the given kind and children.
#[track_caller]
fn new(kind: SyntaxKind, children: Vec<SyntaxNode>) -> Self {
debug_assert!(!kind.is_error());
let mut len = 0;
let mut descendants = 1;
let mut erroneous = false;
for child in &children {
len += child.len();
descendants += child.descendants();
erroneous |= child.erroneous();
}
Self {
kind,
len,
span: Span::detached(),
descendants,
erroneous,
upper: 0,
children,
}
}
/// Set a synthetic span for the node and all its descendants.
fn synthesize(&mut self, span: Span) {
self.span = span;
self.upper = span.number();
for child in &mut self.children {
child.synthesize(span);
}
}
/// Assign span numbers `within` an interval to this node's subtree or just
/// a `range` of its children.
fn numberize(
&mut self,
id: FileId,
range: Option<Range<usize>>,
within: Range<u64>,
) -> NumberingResult {
// Determine how many nodes we will number.
let descendants = match &range {
Some(range) if range.is_empty() => return Ok(()),
Some(range) => self.children[range.clone()]
.iter()
.map(SyntaxNode::descendants)
.sum::<usize>(),
None => self.descendants,
};
// Determine the distance between two neighbouring assigned numbers. If
// possible, we try to fit all numbers into the left half of `within`
// so that there is space for future insertions.
let space = within.end - within.start;
let mut stride = space / (2 * descendants as u64);
if stride == 0 {
stride = space / self.descendants as u64;
if stride == 0 {
return Err(Unnumberable);
}
}
// Number the node itself.
let mut start = within.start;
if range.is_none() {
let end = start + stride;
self.span = Span::from_number(id, (start + end) / 2).unwrap();
self.upper = within.end;
start = end;
}
// Number the children.
let len = self.children.len();
for child in &mut self.children[range.unwrap_or(0..len)] {
let end = start + child.descendants() as u64 * stride;
child.numberize(id, start..end)?;
start = end;
}
Ok(())
}
/// Whether the two inner nodes are the same apart from spans.
fn spanless_eq(&self, other: &Self) -> bool {
self.kind == other.kind
&& self.len == other.len
&& self.descendants == other.descendants
&& self.erroneous == other.erroneous
&& self.children.len() == other.children.len()
&& self
.children
.iter()
.zip(&other.children)
.all(|(a, b)| a.spanless_eq(b))
}
/// Replaces a range of children with a replacement.
///
/// May have mutated the children if it returns `Err(_)`.
fn replace_children(
&mut self,
mut range: Range<usize>,
replacement: Vec<SyntaxNode>,
) -> NumberingResult {
let Some(id) = self.span.id() else { return Err(Unnumberable) };
let mut replacement_range = 0..replacement.len();
// Trim off common prefix.
while range.start < range.end
&& replacement_range.start < replacement_range.end
&& self.children[range.start]
.spanless_eq(&replacement[replacement_range.start])
{
range.start += 1;
replacement_range.start += 1;
}
// Trim off common suffix.
while range.start < range.end
&& replacement_range.start < replacement_range.end
&& self.children[range.end - 1]
.spanless_eq(&replacement[replacement_range.end - 1])
{
range.end -= 1;
replacement_range.end -= 1;
}
let mut replacement_vec = replacement;
let replacement = &replacement_vec[replacement_range.clone()];
let superseded = &self.children[range.clone()];
// Compute the new byte length.
self.len = self.len + replacement.iter().map(SyntaxNode::len).sum::<usize>()
- superseded.iter().map(SyntaxNode::len).sum::<usize>();
// Compute the new number of descendants.
self.descendants = self.descendants
+ replacement.iter().map(SyntaxNode::descendants).sum::<usize>()
- superseded.iter().map(SyntaxNode::descendants).sum::<usize>();
// Determine whether we're still erroneous after the replacement. That's
// the case if
// - any of the new nodes is erroneous,
// - or if we were erroneous before due to a non-superseded node.
self.erroneous = replacement.iter().any(SyntaxNode::erroneous)
|| (self.erroneous
&& (self.children[..range.start].iter().any(SyntaxNode::erroneous))
|| self.children[range.end..].iter().any(SyntaxNode::erroneous));
// Perform the replacement.
self.children
.splice(range.clone(), replacement_vec.drain(replacement_range.clone()));
range.end = range.start + replacement_range.len();
// Renumber the new children. Retries until it works, taking
// exponentially more children into account.
let mut left = 0;
let mut right = 0;
let max_left = range.start;
let max_right = self.children.len() - range.end;
loop {
let renumber = range.start - left..range.end + right;
// The minimum assignable number is either
// - the upper bound of the node right before the to-be-renumbered
// children,
// - or this inner node's span number plus one if renumbering starts
// at the first child.
let start_number = renumber
.start
.checked_sub(1)
.and_then(|i| self.children.get(i))
.map_or(self.span.number() + 1, |child| child.upper());
// The upper bound for renumbering is either
// - the span number of the first child after the to-be-renumbered
// children,
// - or this node's upper bound if renumbering ends behind the last
// child.
let end_number = self
.children
.get(renumber.end)
.map_or(self.upper, |next| next.span().number());
// Try to renumber.
let within = start_number..end_number;
if self.numberize(id, Some(renumber), within).is_ok() {
return Ok(());
}
// If it didn't even work with all children, we give up.
if left == max_left && right == max_right {
return Err(Unnumberable);
}
// Exponential expansion to both sides.
left = (left + 1).next_power_of_two().min(max_left);
right = (right + 1).next_power_of_two().min(max_right);
}
}
/// Update this node after changes were made to one of its children.
fn update_parent(
&mut self,
prev_len: usize,
new_len: usize,
prev_descendants: usize,
new_descendants: usize,
) {
self.len = self.len + new_len - prev_len;
self.descendants = self.descendants + new_descendants - prev_descendants;
self.erroneous = self.children.iter().any(SyntaxNode::erroneous);
}
}
impl Debug for InnerNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{:?}: {}", self.kind, self.len)?;
if !self.children.is_empty() {
f.write_str(" ")?;
f.debug_list().entries(&self.children).finish()?;
}
Ok(())
}
}
/// An error node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
struct ErrorNode {
/// The source text of the node.
text: EcoString,
/// The syntax error.
error: SyntaxError,
}
impl ErrorNode {
/// Create new error node.
fn new(error: SyntaxError, text: impl Into<EcoString>) -> Self {
Self { text: text.into(), error }
}
/// The byte length of the node in the source text.
fn len(&self) -> usize {
self.text.len()
}
/// Add a user-presentable hint to this error node.
fn hint(&mut self, hint: impl Into<EcoString>) {
self.error.hints.push(hint.into());
}
/// Whether the two leaf nodes are the same apart from spans.
fn spanless_eq(&self, other: &Self) -> bool {
self.text == other.text && self.error.spanless_eq(&other.error)
}
}
impl Debug for ErrorNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Error: {:?} ({})", self.text, self.error.message)
}
}
/// A syntactical error.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct SyntaxError {
/// The node's span.
pub span: Span,
/// The error message.
pub message: EcoString,
/// Additional hints to the user, indicating how this error could be avoided
/// or worked around.
pub hints: EcoVec<EcoString>,
}
impl SyntaxError {
/// Create a new detached syntax error.
pub fn new(message: impl Into<EcoString>) -> Self {
Self {
span: Span::detached(),
message: message.into(),
hints: eco_vec![],
}
}
/// Whether the two errors are the same apart from spans.
fn spanless_eq(&self, other: &Self) -> bool {
self.message == other.message && self.hints == other.hints
}
}
/// A syntax node in a context.
///
/// Knows its exact offset in the file and provides access to its
/// children, parent and siblings.
///
/// **Note that all sibling and leaf accessors skip over trivia!**
#[derive(Clone)]
pub struct LinkedNode<'a> {
/// The underlying syntax node.
node: &'a SyntaxNode,
/// The parent of this node.
parent: Option<Rc<Self>>,
/// The index of this node in its parent's children array.
index: usize,
/// This node's byte offset in the source file.
offset: usize,
}
impl<'a> LinkedNode<'a> {
/// Start a new traversal at a root node.
pub fn new(root: &'a SyntaxNode) -> Self {
Self { node: root, parent: None, index: 0, offset: 0 }
}
/// Get the contained syntax node.
pub fn get(&self) -> &'a SyntaxNode {
self.node
}
/// The index of this node in its parent's children list.
pub fn index(&self) -> usize {
self.index
}
/// The absolute byte offset of this node in the source file.
pub fn offset(&self) -> usize {
self.offset
}
/// The byte range of this node in the source file.
pub fn range(&self) -> Range<usize> {
self.offset..self.offset + self.node.len()
}
/// An iterator over this node's children.
pub fn children(&self) -> LinkedChildren<'a> {
LinkedChildren {
parent: Rc::new(self.clone()),
iter: self.node.children().enumerate(),
front: self.offset,
back: self.offset + self.len(),
}
}
/// Find a descendant with the given span.
pub fn find(&self, span: Span) -> Option<LinkedNode<'a>> {
if self.span() == span {
return Some(self.clone());
}
if let NodeKind::Inner(inner) = &self.0 {
// The parent of a subtree has a smaller span number than all of its
// descendants. Therefore, we can bail out early if the target span's
// number is smaller than our number.
if span.number() < inner.span.number() {
return None;
}
let mut children = self.children().peekable();
while let Some(child) = children.next() {
// Every node in this child's subtree has a smaller span number than
// the next sibling. Therefore we only need to recurse if the next
// sibling's span number is larger than the target span's number.
if children
.peek()
.is_none_or(|next| next.span().number() > span.number())
&& let Some(found) = child.find(span)
{
return Some(found);
}
}
}
None
}
}
/// Access to parents and siblings.
impl LinkedNode<'_> {
/// Get this node's parent.
pub fn parent(&self) -> Option<&Self> {
self.parent.as_deref()
}
/// Get the first previous non-trivia sibling node.
pub fn prev_sibling(&self) -> Option<Self> {
let parent = self.parent.as_ref()?;
let children = parent.node.children().as_slice();
let mut offset = self.offset;
for (index, node) in children[..self.index].iter().enumerate().rev() {
offset -= node.len();
if !node.kind().is_trivia() {
let parent = Some(parent.clone());
return Some(Self { node, parent, index, offset });
}
}
None
}
/// Get the next non-trivia sibling node.
pub fn next_sibling(&self) -> Option<Self> {
let parent = self.parent.as_ref()?;
let children = parent.node.children();
let mut offset = self.offset + self.len();
for (index, node) in children.enumerate().skip(self.index + 1) {
if !node.kind().is_trivia() {
let parent = Some(parent.clone());
return Some(Self { node, parent, index, offset });
}
offset += node.len();
}
None
}
/// Get the kind of this node's parent.
pub fn parent_kind(&self) -> Option<SyntaxKind> {
Some(self.parent()?.node.kind())
}
/// Get the kind of this node's first previous non-trivia sibling.
pub fn prev_sibling_kind(&self) -> Option<SyntaxKind> {
Some(self.prev_sibling()?.node.kind())
}
/// Get the kind of this node's next non-trivia sibling.
pub fn next_sibling_kind(&self) -> Option<SyntaxKind> {
Some(self.next_sibling()?.node.kind())
}
}
/// Indicates whether the cursor is before the related byte index, or after.
#[derive(Debug, Clone)]
pub enum Side {
Before,
After,
}
/// Access to leaves.
impl LinkedNode<'_> {
/// Get the rightmost non-trivia leaf before this node.
pub fn prev_leaf(&self) -> Option<Self> {
let mut node = self.clone();
while let Some(prev) = node.prev_sibling() {
if let Some(leaf) = prev.rightmost_leaf() {
return Some(leaf);
}
node = prev;
}
self.parent()?.prev_leaf()
}
/// Find the leftmost contained non-trivia leaf.
pub fn leftmost_leaf(&self) -> Option<Self> {
if self.is_leaf() && !self.kind().is_trivia() && !self.kind().is_error() {
return Some(self.clone());
}
for child in self.children() {
if let Some(leaf) = child.leftmost_leaf() {
return Some(leaf);
}
}
None
}
/// Get the leaf immediately before the specified byte offset.
fn leaf_before(&self, cursor: usize) -> Option<Self> {
if self.node.children().len() == 0 && cursor <= self.offset + self.len() {
return Some(self.clone());
}
let mut offset = self.offset;
let count = self.node.children().len();
for (i, child) in self.children().enumerate() {
let len = child.len();
if (offset < cursor && cursor <= offset + len)
|| (offset == cursor && i + 1 == count)
{
return child.leaf_before(cursor);
}
offset += len;
}
None
}
/// Get the leaf after the specified byte offset.
fn leaf_after(&self, cursor: usize) -> Option<Self> {
if self.node.children().len() == 0 && cursor < self.offset + self.len() {
return Some(self.clone());
}
let mut offset = self.offset;
for child in self.children() {
let len = child.len();
if offset <= cursor && cursor < offset + len {
return child.leaf_after(cursor);
}
offset += len;
}
None
}
/// Get the leaf at the specified byte offset.
pub fn leaf_at(&self, cursor: usize, side: Side) -> Option<Self> {
match side {
Side::Before => self.leaf_before(cursor),
Side::After => self.leaf_after(cursor),
}
}
/// Find the rightmost contained non-trivia leaf.
pub fn rightmost_leaf(&self) -> Option<Self> {
if self.is_leaf() && !self.kind().is_trivia() {
return Some(self.clone());
}
for child in self.children().rev() {
if let Some(leaf) = child.rightmost_leaf() {
return Some(leaf);
}
}
None
}
/// Get the leftmost non-trivia leaf after this node.
pub fn next_leaf(&self) -> Option<Self> {
let mut node = self.clone();
while let Some(next) = node.next_sibling() {
if let Some(leaf) = next.leftmost_leaf() {
return Some(leaf);
}
node = next;
}
self.parent()?.next_leaf()
}
}
impl Deref for LinkedNode<'_> {
type Target = SyntaxNode;
/// Dereference to a syntax node. Note that this shortens the lifetime, so
/// you may need to use [`get()`](Self::get) instead in some situations.
fn deref(&self) -> &Self::Target {
self.get()
}
}
impl Debug for LinkedNode<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.node.fmt(f)
}
}
/// An iterator over the children of a linked node.
pub struct LinkedChildren<'a> {
/// The parent whose children we're iterating.
parent: Rc<LinkedNode<'a>>,
/// The underlying syntax nodes and their indices.
iter: std::iter::Enumerate<std::slice::Iter<'a, SyntaxNode>>,
/// The byte offset of the next child's start.
front: usize,
/// The byte offset after the final child.
back: usize,
}
impl<'a> Iterator for LinkedChildren<'a> {
type Item = LinkedNode<'a>;
fn next(&mut self) -> Option<Self::Item> {
let (index, node) = self.iter.next()?;
let offset = self.front;
self.front += node.len();
Some(LinkedNode {
node,
parent: Some(self.parent.clone()),
index,
offset,
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl DoubleEndedIterator for LinkedChildren<'_> {
fn next_back(&mut self) -> Option<Self::Item> {
let (index, node) = self.iter.next_back()?;
self.back -= node.len();
Some(LinkedNode {
node,
parent: Some(self.parent.clone()),
index,
offset: self.back,
})
}
}
impl ExactSizeIterator for LinkedChildren<'_> {}
/// Result of numbering a node within an interval.
pub(super) type NumberingResult = Result<(), Unnumberable>;
/// Indicates that a node cannot be numbered within a given interval.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(super) struct Unnumberable;
impl Display for Unnumberable {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.pad("cannot number within this interval")
}
}
impl std::error::Error for Unnumberable {}
#[cfg(test)]
mod tests {
use super::*;
use crate::Source;
#[test]
fn test_linked_node() {
let source = Source::detached("#set text(12pt, red)");
// Find "text" with Before.
let node = LinkedNode::new(source.root()).leaf_at(7, Side::Before).unwrap();
assert_eq!(node.offset(), 5);
assert_eq!(node.text(), "text");
// Find "text" with After.
let node = LinkedNode::new(source.root()).leaf_at(7, Side::After).unwrap();
assert_eq!(node.offset(), 5);
assert_eq!(node.text(), "text");
// Go back to "#set". Skips the space.
let prev = node.prev_sibling().unwrap();
assert_eq!(prev.offset(), 1);
assert_eq!(prev.text(), "set");
}
#[test]
fn test_linked_node_non_trivia_leaf() {
let source = Source::detached("#set fun(12pt, red)");
let leaf = LinkedNode::new(source.root()).leaf_at(6, Side::Before).unwrap();
let prev = leaf.prev_leaf().unwrap();
assert_eq!(leaf.text(), "fun");
assert_eq!(prev.text(), "set");
// Check position 9 with Before.
let source = Source::detached("#let x = 10");
let leaf = LinkedNode::new(source.root()).leaf_at(9, Side::Before).unwrap();
let prev = leaf.prev_leaf().unwrap();
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/path.rs | crates/typst-syntax/src/path.rs | use std::fmt::{self, Debug, Display, Formatter};
use std::path::{Component, Path, PathBuf};
/// An absolute path in the virtual file system of a project or package.
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct VirtualPath(PathBuf);
impl VirtualPath {
/// Create a new virtual path.
///
/// Even if it doesn't start with `/` or `\`, it is still interpreted as
/// starting from the root.
pub fn new(path: impl AsRef<Path>) -> Self {
Self::new_impl(path.as_ref())
}
/// Non generic new implementation.
fn new_impl(path: &Path) -> Self {
let mut out = Path::new(&Component::RootDir).to_path_buf();
for component in path.components() {
match component {
Component::Prefix(_) | Component::RootDir => {}
Component::CurDir => {}
Component::ParentDir => match out.components().next_back() {
Some(Component::Normal(_)) => {
out.pop();
}
_ => out.push(component),
},
Component::Normal(_) => out.push(component),
}
}
Self(out)
}
/// Create a virtual path from a real path and a real root.
///
/// Returns `None` if the file path is not contained in the root (i.e. if
/// `root` is not a lexical prefix of `path`). No file system operations are
/// performed.
pub fn within_root(path: &Path, root: &Path) -> Option<Self> {
path.strip_prefix(root).ok().map(Self::new)
}
/// Get the underlying path with a leading `/` or `\`.
pub fn as_rooted_path(&self) -> &Path {
&self.0
}
/// Get the underlying path without a leading `/` or `\`.
pub fn as_rootless_path(&self) -> &Path {
self.0.strip_prefix(Component::RootDir).unwrap_or(&self.0)
}
/// Resolve the virtual path relative to an actual file system root
/// (where the project or package resides).
///
/// Returns `None` if the path lexically escapes the root. The path might
/// still escape through symlinks.
pub fn resolve(&self, root: &Path) -> Option<PathBuf> {
let root_len = root.as_os_str().len();
let mut out = root.to_path_buf();
for component in self.0.components() {
match component {
Component::Prefix(_) => {}
Component::RootDir => {}
Component::CurDir => {}
Component::ParentDir => {
out.pop();
if out.as_os_str().len() < root_len {
return None;
}
}
Component::Normal(_) => out.push(component),
}
}
Some(out)
}
/// Resolve a path relative to this virtual path.
pub fn join(&self, path: impl AsRef<Path>) -> Self {
if let Some(parent) = self.0.parent() {
Self::new(parent.join(path))
} else {
Self::new(path)
}
}
/// The same path, but with a different extension.
pub fn with_extension(&self, extension: &str) -> Self {
Self(self.0.with_extension(extension))
}
}
impl Debug for VirtualPath {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(&self.0.display(), f)
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/source.rs | crates/typst-syntax/src/source.rs | //! Source file management.
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::ops::Range;
use std::sync::Arc;
use typst_utils::LazyHash;
use crate::lines::Lines;
use crate::reparser::reparse;
use crate::{FileId, LinkedNode, Span, SyntaxNode, VirtualPath, parse};
/// A source file.
///
/// All line and column indices start at zero, just like byte indices. Only for
/// user-facing display, you should add 1 to them.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone)]
pub struct Source(Arc<SourceInner>);
/// The internal representation of a [`Source`].
#[derive(Clone)]
struct SourceInner {
id: FileId,
root: LazyHash<SyntaxNode>,
lines: LazyHash<Lines<String>>,
}
impl Source {
/// Create a new source file.
pub fn new(id: FileId, text: String) -> Self {
let _scope = typst_timing::TimingScope::new("create source");
let mut root = parse(&text);
root.numberize(id, Span::FULL).unwrap();
Self(Arc::new(SourceInner {
id,
lines: LazyHash::new(Lines::new(text)),
root: LazyHash::new(root),
}))
}
/// Create a source file without a real id and path, usually for testing.
pub fn detached(text: impl Into<String>) -> Self {
Self::new(FileId::new(None, VirtualPath::new("main.typ")), text.into())
}
/// The root node of the file's untyped syntax tree.
pub fn root(&self) -> &SyntaxNode {
&self.0.root
}
/// The id of the source file.
pub fn id(&self) -> FileId {
self.0.id
}
/// The whole source as a string slice.
pub fn text(&self) -> &str {
self.0.lines.text()
}
/// An acceleration structure for conversion of UTF-8, UTF-16 and
/// line/column indices.
pub fn lines(&self) -> &Lines<String> {
&self.0.lines
}
/// Fully replace the source text.
///
/// This performs a naive (suffix/prefix-based) diff of the old and new text
/// to produce the smallest single edit that transforms old into new and
/// then calls [`edit`](Self::edit) with it.
///
/// Returns the range in the new source that was ultimately reparsed.
pub fn replace(&mut self, new: &str) -> Range<usize> {
let _scope = typst_timing::TimingScope::new("replace source");
let Some((prefix, suffix)) = self.0.lines.replacement_range(new) else {
return 0..0;
};
let old = self.text();
let replace = prefix..old.len() - suffix;
let with = &new[prefix..new.len() - suffix];
self.edit(replace, with)
}
/// Edit the source file by replacing the given range.
///
/// Returns the range in the new source that was ultimately reparsed.
///
/// The method panics if the `replace` range is out of bounds.
#[track_caller]
pub fn edit(&mut self, replace: Range<usize>, with: &str) -> Range<usize> {
let inner = Arc::make_mut(&mut self.0);
// Update the text and lines.
inner.lines.edit(replace.clone(), with);
// Incrementally reparse the replaced range.
reparse(&mut inner.root, inner.lines.text(), replace, with.len())
}
/// Find the node with the given span.
///
/// Returns `None` if the span does not point into this source file.
pub fn find(&self, span: Span) -> Option<LinkedNode<'_>> {
LinkedNode::new(self.root()).find(span)
}
/// Get the byte range for the given span in this file.
///
/// Returns `None` if the span does not point into this source file.
///
/// Typically, it's easier to use `WorldExt::range` instead.
pub fn range(&self, span: Span) -> Option<Range<usize>> {
Some(self.find(span)?.range())
}
}
impl Debug for Source {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Source({:?})", self.id().vpath())
}
}
impl Hash for Source {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.id.hash(state);
self.0.lines.hash(state);
self.0.root.hash(state);
}
}
impl AsRef<str> for Source {
fn as_ref(&self) -> &str {
self.text()
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/lib.rs | crates/typst-syntax/src/lib.rs | //! Parser and syntax tree for Typst.
pub mod ast;
pub mod package;
mod file;
mod highlight;
mod kind;
mod lexer;
mod lines;
mod node;
mod parser;
mod path;
mod reparser;
mod set;
mod source;
mod span;
pub use self::file::FileId;
pub use self::highlight::{Tag, highlight, highlight_html};
pub use self::kind::SyntaxKind;
pub use self::lexer::{
is_id_continue, is_id_start, is_ident, is_newline, is_valid_label_literal_id,
link_prefix, split_newlines,
};
pub use self::lines::Lines;
pub use self::node::{LinkedChildren, LinkedNode, Side, SyntaxError, SyntaxNode};
pub use self::parser::{parse, parse_code, parse_math};
pub use self::path::VirtualPath;
pub use self::source::Source;
pub use self::span::{Span, Spanned};
use self::lexer::Lexer;
use self::parser::{reparse_block, reparse_markup};
/// The syntax mode of a portion of Typst code.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum SyntaxMode {
/// Text and markup, as in the top level.
Markup,
/// Math atoms, operators, etc., as in equations.
Math,
/// Keywords, literals and operators, as after hashes.
Code,
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/highlight.rs | crates/typst-syntax/src/highlight.rs | use crate::{LinkedNode, SyntaxKind, SyntaxNode, ast};
/// A syntax highlighting tag.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Tag {
/// A line or block comment.
Comment,
/// Punctuation in code.
Punctuation,
/// An escape sequence or shorthand.
Escape,
/// Strong markup.
Strong,
/// Emphasized markup.
Emph,
/// A hyperlink.
Link,
/// Raw text.
Raw,
/// A label.
Label,
/// A reference to a label.
Ref,
/// A section heading.
Heading,
/// A marker of a list, enumeration, or term list.
ListMarker,
/// A term in a term list.
ListTerm,
/// The delimiters of an equation.
MathDelimiter,
/// An operator with special meaning in an equation.
MathOperator,
/// A keyword.
Keyword,
/// An operator in code.
Operator,
/// A numeric literal.
Number,
/// A string literal.
String,
/// A function or method name.
Function,
/// An interpolated variable in markup or math.
Interpolated,
/// A syntax error.
Error,
}
impl Tag {
/// The list of all tags, in the same order as thy are defined.
///
/// Can be used as the counter-part to `tag as usize`.
pub const LIST: &'static [Tag] = &[
Self::Comment,
Self::Punctuation,
Self::Escape,
Self::Strong,
Self::Emph,
Self::Link,
Self::Raw,
Self::Label,
Self::Ref,
Self::Heading,
Self::ListMarker,
Self::ListTerm,
Self::MathDelimiter,
Self::MathOperator,
Self::Keyword,
Self::Operator,
Self::Number,
Self::String,
Self::Function,
Self::Interpolated,
Self::Error,
];
/// Return the recommended TextMate grammar scope for the given highlighting
/// tag.
pub fn tm_scope(&self) -> &'static str {
match self {
Self::Comment => "comment.typst",
Self::Punctuation => "punctuation.typst",
Self::Escape => "constant.character.escape.typst",
Self::Strong => "markup.bold.typst",
Self::Emph => "markup.italic.typst",
Self::Link => "markup.underline.link.typst",
Self::Raw => "markup.raw.typst",
Self::MathDelimiter => "punctuation.definition.math.typst",
Self::MathOperator => "keyword.operator.math.typst",
Self::Heading => "markup.heading.typst",
Self::ListMarker => "punctuation.definition.list.typst",
Self::ListTerm => "markup.list.term.typst",
Self::Label => "entity.name.label.typst",
Self::Ref => "markup.other.reference.typst",
Self::Keyword => "keyword.typst",
Self::Operator => "keyword.operator.typst",
Self::Number => "constant.numeric.typst",
Self::String => "string.quoted.double.typst",
Self::Function => "entity.name.function.typst",
Self::Interpolated => "meta.interpolation.typst",
Self::Error => "invalid.typst",
}
}
/// The recommended CSS class for the highlighting tag.
pub fn css_class(self) -> &'static str {
match self {
Self::Comment => "typ-comment",
Self::Punctuation => "typ-punct",
Self::Escape => "typ-escape",
Self::Strong => "typ-strong",
Self::Emph => "typ-emph",
Self::Link => "typ-link",
Self::Raw => "typ-raw",
Self::Label => "typ-label",
Self::Ref => "typ-ref",
Self::Heading => "typ-heading",
Self::ListMarker => "typ-marker",
Self::ListTerm => "typ-term",
Self::MathDelimiter => "typ-math-delim",
Self::MathOperator => "typ-math-op",
Self::Keyword => "typ-key",
Self::Operator => "typ-op",
Self::Number => "typ-num",
Self::String => "typ-str",
Self::Function => "typ-func",
Self::Interpolated => "typ-pol",
Self::Error => "typ-error",
}
}
}
/// Determine the highlight tag of a linked syntax node.
///
/// Returns `None` if the node should not be highlighted.
pub fn highlight(node: &LinkedNode) -> Option<Tag> {
match node.kind() {
SyntaxKind::Markup
if node.parent_kind() == Some(SyntaxKind::TermItem)
&& node.next_sibling_kind() == Some(SyntaxKind::Colon) =>
{
Some(Tag::ListTerm)
}
SyntaxKind::Markup => None,
SyntaxKind::Text => None,
SyntaxKind::Space => None,
SyntaxKind::Linebreak => Some(Tag::Escape),
SyntaxKind::Parbreak => None,
SyntaxKind::Escape => Some(Tag::Escape),
SyntaxKind::Shorthand => Some(Tag::Escape),
SyntaxKind::SmartQuote => None,
SyntaxKind::Strong => Some(Tag::Strong),
SyntaxKind::Emph => Some(Tag::Emph),
SyntaxKind::Raw => Some(Tag::Raw),
SyntaxKind::RawLang => None,
SyntaxKind::RawTrimmed => None,
SyntaxKind::RawDelim => None,
SyntaxKind::Link => Some(Tag::Link),
SyntaxKind::Label => Some(Tag::Label),
SyntaxKind::Ref => Some(Tag::Ref),
SyntaxKind::RefMarker => None,
SyntaxKind::Heading => Some(Tag::Heading),
SyntaxKind::HeadingMarker => None,
SyntaxKind::ListItem => None,
SyntaxKind::ListMarker => Some(Tag::ListMarker),
SyntaxKind::EnumItem => None,
SyntaxKind::EnumMarker => Some(Tag::ListMarker),
SyntaxKind::TermItem => None,
SyntaxKind::TermMarker => Some(Tag::ListMarker),
SyntaxKind::Equation => None,
SyntaxKind::Math => None,
SyntaxKind::MathText => None,
SyntaxKind::MathIdent => highlight_ident(node),
SyntaxKind::MathShorthand => Some(Tag::Escape),
SyntaxKind::MathAlignPoint => Some(Tag::MathOperator),
SyntaxKind::MathDelimited => None,
SyntaxKind::MathAttach => None,
SyntaxKind::MathFrac => None,
SyntaxKind::MathRoot => None,
SyntaxKind::MathPrimes => Some(Tag::MathOperator),
SyntaxKind::Hash => highlight_hash(node),
SyntaxKind::LeftBrace => Some(Tag::Punctuation),
SyntaxKind::RightBrace => Some(Tag::Punctuation),
SyntaxKind::LeftBracket => Some(Tag::Punctuation),
SyntaxKind::RightBracket => Some(Tag::Punctuation),
SyntaxKind::LeftParen => Some(Tag::Punctuation),
SyntaxKind::RightParen => Some(Tag::Punctuation),
SyntaxKind::Comma => Some(Tag::Punctuation),
SyntaxKind::Semicolon => Some(Tag::Punctuation),
SyntaxKind::Colon => Some(Tag::Punctuation),
SyntaxKind::Star => match node.parent_kind() {
Some(SyntaxKind::Strong) => None,
_ => Some(Tag::Operator),
},
SyntaxKind::Underscore => match node.parent_kind() {
Some(SyntaxKind::MathAttach) => Some(Tag::MathOperator),
_ => None,
},
SyntaxKind::Dollar => Some(Tag::MathDelimiter),
SyntaxKind::Plus => Some(Tag::Operator),
SyntaxKind::Minus => Some(Tag::Operator),
SyntaxKind::Slash => Some(match node.parent_kind() {
Some(SyntaxKind::MathFrac) => Tag::MathOperator,
_ => Tag::Operator,
}),
SyntaxKind::Hat => Some(Tag::MathOperator),
SyntaxKind::Dot => Some(Tag::Punctuation),
SyntaxKind::Eq => match node.parent_kind() {
Some(SyntaxKind::Heading) => None,
_ => Some(Tag::Operator),
},
SyntaxKind::EqEq => Some(Tag::Operator),
SyntaxKind::ExclEq => Some(Tag::Operator),
SyntaxKind::Lt => Some(Tag::Operator),
SyntaxKind::LtEq => Some(Tag::Operator),
SyntaxKind::Gt => Some(Tag::Operator),
SyntaxKind::GtEq => Some(Tag::Operator),
SyntaxKind::PlusEq => Some(Tag::Operator),
SyntaxKind::HyphEq => Some(Tag::Operator),
SyntaxKind::StarEq => Some(Tag::Operator),
SyntaxKind::SlashEq => Some(Tag::Operator),
SyntaxKind::Dots => Some(Tag::Operator),
SyntaxKind::Arrow => Some(Tag::Operator),
SyntaxKind::Root => Some(Tag::MathOperator),
SyntaxKind::Bang => None,
SyntaxKind::Not => Some(Tag::Keyword),
SyntaxKind::And => Some(Tag::Keyword),
SyntaxKind::Or => Some(Tag::Keyword),
SyntaxKind::None => Some(Tag::Keyword),
SyntaxKind::Auto => Some(Tag::Keyword),
SyntaxKind::Let => Some(Tag::Keyword),
SyntaxKind::Set => Some(Tag::Keyword),
SyntaxKind::Show => Some(Tag::Keyword),
SyntaxKind::Context => Some(Tag::Keyword),
SyntaxKind::If => Some(Tag::Keyword),
SyntaxKind::Else => Some(Tag::Keyword),
SyntaxKind::For => Some(Tag::Keyword),
SyntaxKind::In => Some(Tag::Keyword),
SyntaxKind::While => Some(Tag::Keyword),
SyntaxKind::Break => Some(Tag::Keyword),
SyntaxKind::Continue => Some(Tag::Keyword),
SyntaxKind::Return => Some(Tag::Keyword),
SyntaxKind::Import => Some(Tag::Keyword),
SyntaxKind::Include => Some(Tag::Keyword),
SyntaxKind::As => Some(Tag::Keyword),
SyntaxKind::Code => None,
SyntaxKind::Ident => highlight_ident(node),
SyntaxKind::Bool => Some(Tag::Keyword),
SyntaxKind::Int => Some(Tag::Number),
SyntaxKind::Float => Some(Tag::Number),
SyntaxKind::Numeric => Some(Tag::Number),
SyntaxKind::Str => Some(Tag::String),
SyntaxKind::CodeBlock => None,
SyntaxKind::ContentBlock => None,
SyntaxKind::Parenthesized => None,
SyntaxKind::Array => None,
SyntaxKind::Dict => None,
SyntaxKind::Named => None,
SyntaxKind::Keyed => None,
SyntaxKind::Unary => None,
SyntaxKind::Binary => None,
SyntaxKind::FieldAccess => None,
SyntaxKind::FuncCall => None,
SyntaxKind::Args => None,
SyntaxKind::Spread => None,
SyntaxKind::Closure => None,
SyntaxKind::Params => None,
SyntaxKind::LetBinding => None,
SyntaxKind::SetRule => None,
SyntaxKind::ShowRule => None,
SyntaxKind::Contextual => None,
SyntaxKind::Conditional => None,
SyntaxKind::WhileLoop => None,
SyntaxKind::ForLoop => None,
SyntaxKind::ModuleImport => None,
SyntaxKind::ImportItems => None,
SyntaxKind::ImportItemPath => None,
SyntaxKind::RenamedImportItem => None,
SyntaxKind::ModuleInclude => None,
SyntaxKind::LoopBreak => None,
SyntaxKind::LoopContinue => None,
SyntaxKind::FuncReturn => None,
SyntaxKind::Destructuring => None,
SyntaxKind::DestructAssignment => None,
SyntaxKind::Shebang => Some(Tag::Comment),
SyntaxKind::LineComment => Some(Tag::Comment),
SyntaxKind::BlockComment => Some(Tag::Comment),
SyntaxKind::Error => Some(Tag::Error),
SyntaxKind::End => None,
}
}
/// Highlight an identifier based on context.
fn highlight_ident(node: &LinkedNode) -> Option<Tag> {
// Are we directly before an argument list?
let next_leaf = node.next_leaf();
if let Some(next) = &next_leaf
&& node.range().end == next.offset()
&& ((next.kind() == SyntaxKind::LeftParen
&& matches!(next.parent_kind(), Some(SyntaxKind::Args | SyntaxKind::Params)))
|| (next.kind() == SyntaxKind::LeftBracket
&& next.parent_kind() == Some(SyntaxKind::ContentBlock)))
{
return Some(Tag::Function);
}
// Are we in math?
if node.kind() == SyntaxKind::MathIdent {
return Some(Tag::Interpolated);
}
// Find the first non-field access ancestor.
let mut ancestor = node;
while ancestor.parent_kind() == Some(SyntaxKind::FieldAccess) {
ancestor = ancestor.parent()?;
}
// Are we directly before or behind a show rule colon?
if ancestor.parent_kind() == Some(SyntaxKind::ShowRule)
&& (next_leaf.map(|leaf| leaf.kind()) == Some(SyntaxKind::Colon)
|| node.prev_leaf().map(|leaf| leaf.kind()) == Some(SyntaxKind::Colon))
{
return Some(Tag::Function);
}
// Are we (or an ancestor field access) directly after a hash.
if ancestor.prev_leaf().map(|leaf| leaf.kind()) == Some(SyntaxKind::Hash) {
return Some(Tag::Interpolated);
}
// Are we behind a dot, that is behind another identifier?
let prev = node.prev_leaf()?;
if prev.kind() == SyntaxKind::Dot {
let prev_prev = prev.prev_leaf()?;
if is_ident(&prev_prev) {
return highlight_ident(&prev_prev);
}
}
None
}
/// Highlight a hash based on context.
fn highlight_hash(node: &LinkedNode) -> Option<Tag> {
let next = node.next_sibling()?;
let expr = next.cast::<ast::Expr>()?;
if !expr.hash() {
return None;
}
highlight(&next.leftmost_leaf()?)
}
/// Whether the node is one of the two identifier nodes.
fn is_ident(node: &LinkedNode) -> bool {
matches!(node.kind(), SyntaxKind::Ident | SyntaxKind::MathIdent)
}
/// Highlight a node to an HTML `code` element.
///
/// This uses these [CSS classes for categories](Tag::css_class).
pub fn highlight_html(root: &SyntaxNode) -> String {
let mut buf = String::from("<code>");
let node = LinkedNode::new(root);
highlight_html_impl(&mut buf, &node);
buf.push_str("</code>");
buf
}
/// Highlight one source node, emitting HTML.
fn highlight_html_impl(html: &mut String, node: &LinkedNode) {
let mut span = false;
if let Some(tag) = highlight(node)
&& tag != Tag::Error
{
span = true;
html.push_str("<span class=\"");
html.push_str(tag.css_class());
html.push_str("\">");
}
let text = node.text();
if !text.is_empty() {
for c in text.chars() {
match c {
'<' => html.push_str("<"),
'>' => html.push_str(">"),
'&' => html.push_str("&"),
'\'' => html.push_str("'"),
'"' => html.push_str("""),
_ => html.push(c),
}
}
} else {
for child in node.children() {
highlight_html_impl(html, &child);
}
}
if span {
html.push_str("</span>");
}
}
#[cfg(test)]
mod tests {
use std::ops::Range;
use super::*;
#[test]
fn test_highlighting() {
use Tag::*;
#[track_caller]
fn test(text: &str, goal: &[(Range<usize>, Tag)]) {
let mut vec = vec![];
let root = crate::parse(text);
highlight_tree(&mut vec, &LinkedNode::new(&root));
assert_eq!(vec, goal);
}
fn highlight_tree(tags: &mut Vec<(Range<usize>, Tag)>, node: &LinkedNode) {
if let Some(tag) = highlight(node) {
tags.push((node.range(), tag));
}
for child in node.children() {
highlight_tree(tags, &child);
}
}
test("= *AB*", &[(0..6, Heading), (2..6, Strong)]);
test(
"#f(x + 1)",
&[
(0..1, Function),
(1..2, Function),
(2..3, Punctuation),
(5..6, Operator),
(7..8, Number),
(8..9, Punctuation),
],
);
test(
"#let f(x) = x",
&[
(0..1, Keyword),
(1..4, Keyword),
(5..6, Function),
(6..7, Punctuation),
(8..9, Punctuation),
(10..11, Operator),
],
);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/parser.rs | crates/typst-syntax/src/parser.rs | use std::mem;
use std::ops::{DerefMut, Index, IndexMut, Range};
use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_utils::{default_math_class, defer};
use unicode_math_class::MathClass;
use crate::set::{SyntaxSet, syntax_set};
use crate::{Lexer, SyntaxError, SyntaxKind, SyntaxMode, SyntaxNode, ast, set};
// Picked by gut feeling.
const MAX_DEPTH: u32 = 256;
/// Parses a source file as top-level markup.
pub fn parse(text: &str) -> SyntaxNode {
let _scope = typst_timing::TimingScope::new("parse");
let mut p = Parser::new(text, 0, SyntaxMode::Markup);
markup_exprs(&mut p, true, syntax_set!(End));
p.finish_into(SyntaxKind::Markup)
}
/// Parses top-level code.
pub fn parse_code(text: &str) -> SyntaxNode {
let _scope = typst_timing::TimingScope::new("parse code");
let mut p = Parser::new(text, 0, SyntaxMode::Code);
code_exprs(&mut p, syntax_set!(End));
p.finish_into(SyntaxKind::Code)
}
/// Parses top-level math.
pub fn parse_math(text: &str) -> SyntaxNode {
let _scope = typst_timing::TimingScope::new("parse math");
let mut p = Parser::new(text, 0, SyntaxMode::Math);
math_exprs(&mut p, syntax_set!(End));
p.finish_into(SyntaxKind::Math)
}
/// Parses markup expressions until a stop condition is met.
fn markup(p: &mut Parser, at_start: bool, wrap_trivia: bool, stop_set: SyntaxSet) {
let m = if wrap_trivia { p.before_trivia() } else { p.marker() };
markup_exprs(p, at_start, stop_set);
if wrap_trivia {
p.flush_trivia();
}
p.wrap(m, SyntaxKind::Markup);
}
/// Parses a sequence of markup expressions.
fn markup_exprs(p: &mut Parser, mut at_start: bool, stop_set: SyntaxSet) {
debug_assert!(stop_set.contains(SyntaxKind::End));
let Some(p) = p.check_depth_until(stop_set) else { return };
at_start |= p.had_newline();
let mut nesting: usize = 0;
// Keep going if we're at a nested right-bracket regardless of the stop set.
while !p.at_set(stop_set) || (nesting > 0 && p.at(SyntaxKind::RightBracket)) {
markup_expr(p, at_start, &mut nesting);
at_start = p.had_newline();
}
}
/// Reparses a subsection of markup incrementally.
pub(super) fn reparse_markup(
text: &str,
range: Range<usize>,
at_start: &mut bool,
nesting: &mut usize,
top_level: bool,
) -> Option<Vec<SyntaxNode>> {
let mut p = Parser::new(text, range.start, SyntaxMode::Markup);
*at_start |= p.had_newline();
while !p.end() && p.current_start() < range.end {
// If not top-level and at a new RightBracket, stop the reparse.
if !top_level && *nesting == 0 && p.at(SyntaxKind::RightBracket) {
break;
}
markup_expr(&mut p, *at_start, nesting);
*at_start = p.had_newline();
}
(p.balanced && p.current_start() == range.end).then(|| p.finish())
}
/// Parses a single markup expression. This includes markup elements like text,
/// headings, strong/emph, lists/enums, etc. This is also the entry point for
/// parsing math equations and embedded code expressions.
fn markup_expr(p: &mut Parser, at_start: bool, nesting: &mut usize) {
let Some(p) = &mut p.increase_depth() else { return };
match p.current() {
SyntaxKind::LeftBracket => {
*nesting += 1;
p.convert_and_eat(SyntaxKind::Text);
}
SyntaxKind::RightBracket if *nesting > 0 => {
*nesting -= 1;
p.convert_and_eat(SyntaxKind::Text);
}
SyntaxKind::RightBracket => {
p.unexpected();
p.hint("try using a backslash escape: \\]");
}
SyntaxKind::Shebang => p.eat(),
SyntaxKind::Text
| SyntaxKind::Linebreak
| SyntaxKind::Escape
| SyntaxKind::Shorthand
| SyntaxKind::SmartQuote
| SyntaxKind::Link
| SyntaxKind::Label => p.eat(),
SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
SyntaxKind::Hash => embedded_code_expr(p),
SyntaxKind::Star => strong(p),
SyntaxKind::Underscore => emph(p),
SyntaxKind::HeadingMarker if at_start => heading(p),
SyntaxKind::ListMarker if at_start => list_item(p),
SyntaxKind::EnumMarker if at_start => enum_item(p),
SyntaxKind::TermMarker if at_start => term_item(p),
SyntaxKind::RefMarker => reference(p),
SyntaxKind::Dollar => equation(p),
SyntaxKind::HeadingMarker
| SyntaxKind::ListMarker
| SyntaxKind::EnumMarker
| SyntaxKind::TermMarker
| SyntaxKind::Colon => p.convert_and_eat(SyntaxKind::Text),
_ => p.unexpected(),
}
}
/// Parses strong content: `*Strong*`.
fn strong(p: &mut Parser) {
p.with_nl_mode(AtNewline::StopParBreak, |p| {
let m = p.marker();
p.assert(SyntaxKind::Star);
markup(p, false, true, syntax_set!(Star, RightBracket, End));
p.expect_closing_delimiter(m, SyntaxKind::Star);
p.wrap(m, SyntaxKind::Strong);
});
}
/// Parses emphasized content: `_Emphasized_`.
fn emph(p: &mut Parser) {
p.with_nl_mode(AtNewline::StopParBreak, |p| {
let m = p.marker();
p.assert(SyntaxKind::Underscore);
markup(p, false, true, syntax_set!(Underscore, RightBracket, End));
p.expect_closing_delimiter(m, SyntaxKind::Underscore);
p.wrap(m, SyntaxKind::Emph);
});
}
/// Parses a section heading: `= Introduction`.
fn heading(p: &mut Parser) {
p.with_nl_mode(AtNewline::Stop, |p| {
let m = p.marker();
p.assert(SyntaxKind::HeadingMarker);
markup(p, false, false, syntax_set!(Label, RightBracket, End));
p.wrap(m, SyntaxKind::Heading);
});
}
/// Parses an item in a bullet list: `- ...`.
fn list_item(p: &mut Parser) {
p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
let m = p.marker();
p.assert(SyntaxKind::ListMarker);
markup(p, true, false, syntax_set!(RightBracket, End));
p.wrap(m, SyntaxKind::ListItem);
});
}
/// Parses an item in an enumeration (numbered list): `+ ...` or `1. ...`.
fn enum_item(p: &mut Parser) {
p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
let m = p.marker();
p.assert(SyntaxKind::EnumMarker);
markup(p, true, false, syntax_set!(RightBracket, End));
p.wrap(m, SyntaxKind::EnumItem);
});
}
/// Parses an item in a term list: `/ Term: Details`.
fn term_item(p: &mut Parser) {
p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
let m = p.marker();
p.with_nl_mode(AtNewline::Stop, |p| {
p.assert(SyntaxKind::TermMarker);
markup(p, false, false, syntax_set!(Colon, RightBracket, End));
});
p.expect(SyntaxKind::Colon);
markup(p, true, false, syntax_set!(RightBracket, End));
p.wrap(m, SyntaxKind::TermItem);
});
}
/// Parses a reference: `@target`, `@target[..]`.
fn reference(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::RefMarker);
if p.directly_at(SyntaxKind::LeftBracket) {
content_block(p);
}
p.wrap(m, SyntaxKind::Ref);
}
/// Parses a mathematical equation: `$x$`, `$ x^2 $`.
fn equation(p: &mut Parser) {
let m = p.marker();
p.enter_modes(SyntaxMode::Math, AtNewline::Continue, |p| {
p.assert(SyntaxKind::Dollar);
math(p, syntax_set!(Dollar, End));
p.expect_closing_delimiter(m, SyntaxKind::Dollar);
});
p.wrap(m, SyntaxKind::Equation);
}
/// Parses the contents of a mathematical equation: `x^2 + 1`.
fn math(p: &mut Parser, stop_set: SyntaxSet) {
let m = p.marker();
math_exprs(p, stop_set);
p.wrap(m, SyntaxKind::Math);
}
/// Parses a sequence of math expressions. Returns the number of expressions
/// parsed (including errors).
fn math_exprs(p: &mut Parser, stop_set: SyntaxSet) -> usize {
debug_assert!(stop_set.contains(SyntaxKind::End));
let Some(p) = p.check_depth_until(stop_set) else { return 1 };
let mut count = 0;
while !p.at_set(stop_set) {
if p.at_set(set::MATH_EXPR) {
math_expr(p);
} else {
p.unexpected();
}
count += 1;
}
count
}
/// Parses a single math expression: This includes math elements like
/// attachment, fractions, roots, and embedded code expressions.
fn math_expr(p: &mut Parser) {
math_expr_prec(p, 0, syntax_set!())
}
/// Parses a math expression with at least the given precedence, possibly
/// chaining with another operator by returning early.
fn math_expr_prec(p: &mut Parser, min_prec: u8, stop_set: SyntaxSet) {
let Some(p) = &mut p.increase_depth() else { return };
let m = p.marker();
let mut continuable = false;
match p.current() {
SyntaxKind::Hash => embedded_code_expr(p),
// The lexer manages creating full FieldAccess nodes if needed.
SyntaxKind::MathIdent | SyntaxKind::FieldAccess => {
continuable = true;
p.eat();
// Parse a function call for an identifier or field access.
if MATH_FUNC_PREC >= min_prec && p.directly_at(SyntaxKind::LeftParen) {
math_args(p);
p.wrap(m, SyntaxKind::FuncCall);
continuable = false;
}
}
SyntaxKind::LeftBrace | SyntaxKind::LeftParen => {
math_delimited(p);
}
SyntaxKind::RightBrace if p.current_text() == "|]" => {
p.convert_and_eat(SyntaxKind::MathShorthand);
}
SyntaxKind::Dot
| SyntaxKind::Bang
| SyntaxKind::Comma
| SyntaxKind::Semicolon
| SyntaxKind::RightBrace
| SyntaxKind::RightParen => {
p.convert_and_eat(SyntaxKind::MathText);
}
SyntaxKind::MathText => {
continuable = is_math_alphabetic(p.current_text());
p.eat();
}
SyntaxKind::Linebreak
| SyntaxKind::MathAlignPoint
| SyntaxKind::MathShorthand => p.eat(),
SyntaxKind::MathPrimes | SyntaxKind::Escape | SyntaxKind::Str => {
continuable = true;
p.eat();
}
SyntaxKind::Root => {
p.eat();
let m2 = p.marker();
math_expr_prec(p, MATH_ROOT_PREC, syntax_set!());
math_unparen(p, m2);
p.wrap(m, SyntaxKind::MathRoot);
}
_ => p.expected("expression"),
}
// Maybe recognize an implicit function call: a 'continuable' token followed
// by delimiters will group as one with the precedence of a normal function.
// E.g. `a(b)/c` parses as `(a(b))/c` when `a` is continuable.
if continuable
&& MATH_FUNC_PREC >= min_prec
&& !p.had_trivia()
&& p.at_set(syntax_set!(LeftBrace, LeftParen))
{
math_delimited(p);
p.wrap(m, SyntaxKind::Math);
}
// Parse infix and postfix operators. The general form of a parsed op looks
// like: `MathAttach[ MathText("x"), Hat("^"), MathText("2") ]`.
while !p.at_set(stop_set)
&& let op_kind = p.current()
&& let had_trivia = p.had_trivia()
&& let Some((wrapper, infix_assoc, prec)) = math_op(op_kind, had_trivia)
&& prec >= min_prec
{
// Prepare a chaining set for the attachment operators.
let mut chain_set = if wrapper == SyntaxKind::MathAttach {
// Hat can chain with Underscore, Underscore can chain with Hat, and
// Prime can chain with either (but prime can't interrupt a chain,
// see below).
syntax_set!(Hat, Underscore).remove(op_kind)
} else {
syntax_set!()
};
// Eat the operator itself.
if op_kind == SyntaxKind::Bang {
p.convert_and_eat(SyntaxKind::MathText);
} else {
p.eat();
}
// Slash is the only operator that removes parens from its left operand.
if wrapper == SyntaxKind::MathFrac {
math_unparen(p, m);
}
// Parse the operator's right operand.
if let Some(assoc) = infix_assoc {
let prec = match assoc {
ast::Assoc::Left => prec + 1,
ast::Assoc::Right => prec,
};
let m_rhs = p.marker();
math_expr_prec(p, prec, chain_set);
math_unparen(p, m_rhs);
}
// Avoid interrupting a chain when initially parsing a prime.
// For `a^b'_c^d` the grouping is `(a^(b')_c)^d` and not `a^(b'_c^d)`.
if !(op_kind == SyntaxKind::MathPrimes && p.at_set(stop_set)) {
// Parse chained attachment operators as a single attachment.
while p.at_set(chain_set) {
chain_set = chain_set.remove(p.current());
p.eat();
let m_chain_rhs = p.marker();
math_expr_prec(p, prec, chain_set);
math_unparen(p, m_chain_rhs);
}
}
// Finish the operator by wrapping from its left operand.
p.wrap(m, wrapper);
}
}
// These are declared here so they're easier to compare with `math_op`.
const MATH_FUNC_PREC: u8 = 2;
const MATH_ROOT_PREC: u8 = 2;
/// Precedence and wrapper kinds for infix and postfix math operators.
fn math_op(
kind: SyntaxKind,
had_trivia: bool,
) -> Option<(SyntaxKind, Option<ast::Assoc>, u8)> {
let op = match kind {
SyntaxKind::Slash => (SyntaxKind::MathFrac, Some(ast::Assoc::Left), 1),
SyntaxKind::Underscore => (SyntaxKind::MathAttach, Some(ast::Assoc::Right), 2),
SyntaxKind::Hat => (SyntaxKind::MathAttach, Some(ast::Assoc::Right), 2),
SyntaxKind::MathPrimes if !had_trivia => (SyntaxKind::MathAttach, None, 2),
SyntaxKind::Bang if !had_trivia => (SyntaxKind::Math, None, 3),
_ => return None,
};
Some(op)
}
/// Whether text counts as alphabetic in math. For the `Text` and `MathText`
/// kinds, this causes them to group with parens as an implicit function call.
fn is_math_alphabetic(text: &str) -> bool {
if let Some((0, c)) = text.char_indices().next_back() {
// Just a single character.
c.is_alphabetic() || default_math_class(c) == Some(MathClass::Alphabetic)
} else {
// Multiple characters.
text.chars().all(char::is_alphabetic)
}
}
/// Parse matched delimiters in math: `[x + y]`.
///
/// The lexer produces `{Left,Right}{Brace,Paren}` for delimiters, and it's our
/// job to convert them back to `MathText` or `MathShorthand` before eating.
fn math_delimited(p: &mut Parser) {
let m = p.marker();
if p.current_text() == "[|" {
p.convert_and_eat(SyntaxKind::MathShorthand);
} else {
p.convert_and_eat(SyntaxKind::MathText);
}
let m_body = p.marker();
math_exprs(p, syntax_set!(Dollar, End, RightBrace, RightParen));
if p.at_set(syntax_set!(RightBrace, RightParen)) {
p.wrap(m_body, SyntaxKind::Math);
if p.current_text() == "|]" {
p.convert_and_eat(SyntaxKind::MathShorthand);
} else {
p.convert_and_eat(SyntaxKind::MathText);
}
p.wrap(m, SyntaxKind::MathDelimited);
} else {
// If we had no closing delimiter, just produce a math sequence.
p.wrap(m, SyntaxKind::Math);
}
}
/// Remove one set of parentheses (if any) from a previously parsed expression
/// by converting to non-expression SyntaxKinds.
fn math_unparen(p: &mut Parser, m: Marker) {
let Some(node) = p.nodes.get_mut(m.0) else { return };
if node.kind() != SyntaxKind::MathDelimited {
return;
}
if let [first, .., last] = node.children_mut()
&& first.text() == "("
&& last.text() == ")"
{
first.convert_to_kind(SyntaxKind::LeftParen);
last.convert_to_kind(SyntaxKind::RightParen);
// Only convert if we did have regular parens.
node.convert_to_kind(SyntaxKind::Math);
}
}
/// Parse an argument list in math: `(a, b; c, d; size: #50%)`.
fn math_args(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::LeftParen);
let mut positional = true;
let mut has_arrays = false;
let mut maybe_array_start = p.marker();
let mut seen = FxHashSet::default();
while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
positional = math_arg(p, &mut seen);
match p.current() {
SyntaxKind::Comma => {
p.eat();
if !positional {
maybe_array_start = p.marker();
}
}
SyntaxKind::Semicolon => {
if !positional {
maybe_array_start = p.marker();
}
// Parses an array: `a, b, c;`.
// The semicolon merges preceding arguments separated by commas
// into an array argument.
p.wrap(maybe_array_start, SyntaxKind::Array);
p.eat();
maybe_array_start = p.marker();
has_arrays = true;
}
SyntaxKind::End | SyntaxKind::Dollar | SyntaxKind::RightParen => {}
_ => p.expected("comma or semicolon"),
}
}
// Check if we need to wrap the preceding arguments in an array.
if maybe_array_start != p.marker() && has_arrays && positional {
p.wrap(maybe_array_start, SyntaxKind::Array);
}
p.expect_closing_delimiter(m, SyntaxKind::RightParen);
p.wrap(m, SyntaxKind::Args);
}
/// Parses a single argument in a math argument list.
///
/// Returns whether the parsed argument was positional or not.
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) -> bool {
let m = p.marker();
let start = p.current_start();
let mut arg_kind = None;
if p.at(SyntaxKind::Dot)
&& let Some(spread) = p.lexer.maybe_math_spread_arg(start)
{
// Parses a spread argument: `..args`.
arg_kind = Some(SyntaxKind::Spread);
p.token.node = spread;
p.eat();
} else if p.at_set(syntax_set!(MathText, MathIdent, Underscore))
&& let Some(named) = p.lexer.maybe_math_named_arg(start)
{
// Parses a named argument: `thickness: #12pt`.
arg_kind = Some(SyntaxKind::Named);
p.token.node = named;
let text = p.current_text();
p.eat();
p.convert_and_eat(SyntaxKind::Colon);
if !seen.insert(text) {
p[m].convert_to_error(eco_format!("duplicate argument: {text}"));
}
}
// Parses the argument itself.
let m_arg = p.marker();
let count = math_exprs(p, syntax_set!(End, Dollar, Comma, Semicolon, RightParen));
if count == 0 {
// This can't happen due to checks in `Lexer::maybe_math_spread_arg`.
assert_ne!(arg_kind, Some(SyntaxKind::Spread));
// Named arguments require a value.
if arg_kind == Some(SyntaxKind::Named) {
p.expected("expression");
}
// Flush trivia so that the new empty Math node will be wrapped _inside_
// any `SyntaxKind::Array` elements created in `math_args`.
// (And if we don't follow by wrapping in an array, it has no effect.)
// The difference in node layout without this would look like:
// - Expression: `$ mat( ;) $`
// - Correct: [ .., Space(" "), Array[Math[], ], Semicolon(";"), .. ]
// - Incorrect: [ .., Math[], Array[], Space(" "), Semicolon(";"), .. ]
p.flush_trivia();
}
// Wrap math function arguments to join adjacent math content or create an
// empty 'Math' node for when we have 0 args. We don't wrap when
// `count == 1`, since wrapping would change the type of the expression
// from potentially non-content to content. E.g. `$ func(#12pt) $` would
// change the type of `#12pt` from size to content if wrapped.
if count != 1 {
p.wrap(m_arg, SyntaxKind::Math);
}
if let Some(kind) = arg_kind {
p.wrap(m, kind);
}
arg_kind != Some(SyntaxKind::Named)
}
/// Parses the contents of a code block.
fn code(p: &mut Parser, stop_set: SyntaxSet) {
let m = p.marker();
code_exprs(p, stop_set);
p.wrap(m, SyntaxKind::Code);
}
/// Parses a sequence of code expressions.
fn code_exprs(p: &mut Parser, stop_set: SyntaxSet) {
debug_assert!(stop_set.contains(SyntaxKind::End));
let Some(p) = p.check_depth_until(stop_set) else { return };
while !p.at_set(stop_set) {
p.with_nl_mode(AtNewline::ContextualContinue, |p| {
if !p.at_set(set::CODE_EXPR) {
p.unexpected();
return;
}
code_expr(p);
if !p.at_set(stop_set) && !p.eat_if(SyntaxKind::Semicolon) {
p.expected("semicolon or line break");
if p.at(SyntaxKind::Label) {
p.hint("labels can only be applied in markup mode");
p.hint("try wrapping your code in a markup block (`[ ]`)");
}
}
});
}
}
/// Parses an atomic code expression embedded in markup or math.
fn embedded_code_expr(p: &mut Parser) {
p.enter_modes(SyntaxMode::Code, AtNewline::Stop, |p| {
p.assert(SyntaxKind::Hash);
if p.had_trivia() || p.end() {
p.expected("expression");
return;
}
let stmt = p.at_set(set::STMT);
let at = p.at_set(set::ATOMIC_CODE_EXPR);
code_expr_prec(p, true, 0);
// Consume error for things like `#12p` or `#"abc\"`.#
if !at {
p.unexpected();
}
let semi = (stmt || p.directly_at(SyntaxKind::Semicolon))
&& p.eat_if(SyntaxKind::Semicolon);
if stmt && !semi && !p.end() && !p.at(SyntaxKind::RightBracket) {
p.expected("semicolon or line break");
}
});
}
/// Parses a single code expression.
fn code_expr(p: &mut Parser) {
code_expr_prec(p, false, 0)
}
/// Parses a code expression with at least the given precedence.
fn code_expr_prec(p: &mut Parser, atomic: bool, min_prec: u8) {
let Some(p) = &mut p.increase_depth() else { return };
let m = p.marker();
if !atomic && p.at_set(set::UNARY_OP) {
let op = ast::UnOp::from_kind(p.current()).unwrap();
p.eat();
code_expr_prec(p, atomic, op.precedence());
p.wrap(m, SyntaxKind::Unary);
} else {
code_primary(p, atomic);
}
loop {
if p.directly_at(SyntaxKind::LeftParen) || p.directly_at(SyntaxKind::LeftBracket)
{
args(p);
p.wrap(m, SyntaxKind::FuncCall);
continue;
}
let at_field_or_method = p.directly_at(SyntaxKind::Dot)
&& p.lexer.clone().next().0 == SyntaxKind::Ident;
if atomic && !at_field_or_method {
break;
}
if p.eat_if(SyntaxKind::Dot) {
p.expect(SyntaxKind::Ident);
p.wrap(m, SyntaxKind::FieldAccess);
continue;
}
let binop = if p.at_set(set::BINARY_OP) {
ast::BinOp::from_kind(p.current())
} else if min_prec <= ast::BinOp::NotIn.precedence() && p.eat_if(SyntaxKind::Not)
{
if p.at(SyntaxKind::In) {
Some(ast::BinOp::NotIn)
} else {
p.expected("keyword `in`");
break;
}
} else {
None
};
if let Some(op) = binop {
let mut prec = op.precedence();
if prec < min_prec {
break;
}
match op.assoc() {
ast::Assoc::Left => prec += 1,
ast::Assoc::Right => {}
}
p.eat();
code_expr_prec(p, false, prec);
p.wrap(m, SyntaxKind::Binary);
continue;
}
break;
}
}
/// Parses an primary in a code expression. These are the atoms that unary and
/// binary operations, functions calls, and field accesses start with / are
/// composed of.
fn code_primary(p: &mut Parser, atomic: bool) {
let m = p.marker();
match p.current() {
SyntaxKind::Ident => {
p.eat();
if !atomic && p.at(SyntaxKind::Arrow) {
p.wrap(m, SyntaxKind::Params);
p.assert(SyntaxKind::Arrow);
code_expr(p);
p.wrap(m, SyntaxKind::Closure);
}
}
SyntaxKind::Underscore if !atomic => {
p.eat();
if p.at(SyntaxKind::Arrow) {
p.wrap(m, SyntaxKind::Params);
p.eat();
code_expr(p);
p.wrap(m, SyntaxKind::Closure);
} else if p.eat_if(SyntaxKind::Eq) {
code_expr(p);
p.wrap(m, SyntaxKind::DestructAssignment);
} else {
p[m].expected("expression");
}
}
SyntaxKind::LeftBrace => code_block(p),
SyntaxKind::LeftBracket => content_block(p),
SyntaxKind::LeftParen => expr_with_paren(p, atomic),
SyntaxKind::Dollar => equation(p),
SyntaxKind::Let => let_binding(p),
SyntaxKind::Set => set_rule(p),
SyntaxKind::Show => show_rule(p),
SyntaxKind::Context => contextual(p, atomic),
SyntaxKind::If => conditional(p),
SyntaxKind::While => while_loop(p),
SyntaxKind::For => for_loop(p),
SyntaxKind::Import => module_import(p),
SyntaxKind::Include => module_include(p),
SyntaxKind::Break => break_stmt(p),
SyntaxKind::Continue => continue_stmt(p),
SyntaxKind::Return => return_stmt(p),
SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
SyntaxKind::None
| SyntaxKind::Auto
| SyntaxKind::Int
| SyntaxKind::Float
| SyntaxKind::Bool
| SyntaxKind::Numeric
| SyntaxKind::Str
| SyntaxKind::Label => p.eat(),
_ => p.expected("expression"),
}
}
/// Reparses a full content or code block.
pub(super) fn reparse_block(text: &str, range: Range<usize>) -> Option<SyntaxNode> {
let mut p = Parser::new(text, range.start, SyntaxMode::Code);
assert!(p.at(SyntaxKind::LeftBracket) || p.at(SyntaxKind::LeftBrace));
block(&mut p);
(p.balanced && p.prev_end() == range.end)
.then(|| p.finish().into_iter().next().unwrap())
}
/// Parses a content or code block.
fn block(p: &mut Parser) {
match p.current() {
SyntaxKind::LeftBracket => content_block(p),
SyntaxKind::LeftBrace => code_block(p),
_ => p.expected("block"),
}
}
/// Parses a code block: `{ let x = 1; x + 2 }`.
fn code_block(p: &mut Parser) {
let m = p.marker();
p.enter_modes(SyntaxMode::Code, AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftBrace);
code(p, syntax_set!(RightBrace, RightBracket, RightParen, End));
p.expect_closing_delimiter(m, SyntaxKind::RightBrace);
});
p.wrap(m, SyntaxKind::CodeBlock);
}
/// Parses a content block: `[*Hi* there!]`.
fn content_block(p: &mut Parser) {
let m = p.marker();
p.enter_modes(SyntaxMode::Markup, AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftBracket);
markup(p, true, true, syntax_set!(RightBracket, End));
p.expect_closing_delimiter(m, SyntaxKind::RightBracket);
});
p.wrap(m, SyntaxKind::ContentBlock);
}
/// Parses a let binding: `let x = 1`.
fn let_binding(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Let);
let m2 = p.marker();
let mut closure = false;
let mut other = false;
if p.eat_if(SyntaxKind::Ident) {
if p.directly_at(SyntaxKind::LeftParen) {
params(p);
closure = true;
}
} else {
pattern(p, false, &mut FxHashSet::default(), None);
other = true;
}
let f = if closure || other { Parser::expect } else { Parser::eat_if };
if f(p, SyntaxKind::Eq) {
code_expr(p);
}
if closure {
p.wrap(m2, SyntaxKind::Closure);
}
p.wrap(m, SyntaxKind::LetBinding);
}
/// Parses a set rule: `set text(...)`.
fn set_rule(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Set);
let m2 = p.marker();
p.expect(SyntaxKind::Ident);
while p.eat_if(SyntaxKind::Dot) {
p.expect(SyntaxKind::Ident);
p.wrap(m2, SyntaxKind::FieldAccess);
}
args(p);
if p.eat_if(SyntaxKind::If) {
code_expr(p);
}
p.wrap(m, SyntaxKind::SetRule);
}
/// Parses a show rule: `show heading: it => emph(it.body)`.
fn show_rule(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Show);
let m2 = p.before_trivia();
if !p.at(SyntaxKind::Colon) {
code_expr(p);
}
if p.eat_if(SyntaxKind::Colon) {
code_expr(p);
} else {
p.expected_at(m2, "colon");
}
p.wrap(m, SyntaxKind::ShowRule);
}
/// Parses a contextual expression: `context text.lang`.
fn contextual(p: &mut Parser, atomic: bool) {
let m = p.marker();
p.assert(SyntaxKind::Context);
code_expr_prec(p, atomic, 0);
p.wrap(m, SyntaxKind::Contextual);
}
/// Parses an if-else conditional: `if x { y } else { z }`.
fn conditional(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::If);
code_expr(p);
block(p);
if p.eat_if(SyntaxKind::Else) {
if p.at(SyntaxKind::If) {
conditional(p);
} else {
block(p);
}
}
p.wrap(m, SyntaxKind::Conditional);
}
/// Parses a while loop: `while x { y }`.
fn while_loop(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::While);
code_expr(p);
block(p);
p.wrap(m, SyntaxKind::WhileLoop);
}
/// Parses a for loop: `for x in y { z }`.
fn for_loop(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::For);
let mut seen = FxHashSet::default();
pattern(p, false, &mut seen, None);
if p.at(SyntaxKind::Comma) {
let node = p.eat_and_get();
node.unexpected();
node.hint("destructuring patterns must be wrapped in parentheses");
if p.at_set(set::PATTERN) {
pattern(p, false, &mut seen, None);
}
}
p.expect(SyntaxKind::In);
code_expr(p);
block(p);
p.wrap(m, SyntaxKind::ForLoop);
}
/// Parses a module import: `import "utils.typ": a, b, c`.
fn module_import(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Import);
code_expr(p);
if p.eat_if(SyntaxKind::As) {
// Allow renaming a full module import.
// If items are included, both the full module and the items are
// imported at the same time.
p.expect(SyntaxKind::Ident);
}
if p.eat_if(SyntaxKind::Colon) {
if p.at(SyntaxKind::LeftParen) {
p.with_nl_mode(AtNewline::Continue, |p| {
let m2 = p.marker();
p.assert(SyntaxKind::LeftParen);
import_items(p);
p.expect_closing_delimiter(m2, SyntaxKind::RightParen);
});
} else if !p.eat_if(SyntaxKind::Star) {
import_items(p);
}
}
p.wrap(m, SyntaxKind::ModuleImport);
}
/// Parses items to import from a module: `a, b, c`.
fn import_items(p: &mut Parser) {
let m = p.marker();
while !p.current().is_terminator() {
let item_marker = p.marker();
if !p.eat_if(SyntaxKind::Ident) {
p.unexpected();
}
// Nested import path: `a.b.c`
while p.eat_if(SyntaxKind::Dot) {
p.expect(SyntaxKind::Ident);
}
p.wrap(item_marker, SyntaxKind::ImportItemPath);
// Rename imported item.
if p.eat_if(SyntaxKind::As) {
p.expect(SyntaxKind::Ident);
p.wrap(item_marker, SyntaxKind::RenamedImportItem);
}
if !p.current().is_terminator() {
p.expect(SyntaxKind::Comma);
}
}
p.wrap(m, SyntaxKind::ImportItems);
}
/// Parses a module include: `include "chapter1.typ"`.
fn module_include(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Include);
code_expr(p);
p.wrap(m, SyntaxKind::ModuleInclude);
}
/// Parses a break from a loop: `break`.
fn break_stmt(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Break);
p.wrap(m, SyntaxKind::LoopBreak);
}
/// Parses a continue in a loop: `continue`.
fn continue_stmt(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::Continue);
p.wrap(m, SyntaxKind::LoopContinue);
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/kind.rs | crates/typst-syntax/src/kind.rs | /// A syntactical building block of a Typst file.
///
/// Can be created by the lexer or by the parser.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
#[repr(u8)]
pub enum SyntaxKind {
/// The end of token stream.
End,
/// An invalid sequence of characters.
Error,
/// A shebang: `#! ...`
Shebang,
/// A line comment: `// ...`.
LineComment,
/// A block comment: `/* ... */`.
BlockComment,
/// The contents of a file or content block.
Markup,
/// Plain text without markup.
Text,
/// Whitespace. Contains at most one newline in markup, as more indicate a
/// paragraph break.
Space,
/// A forced line break: `\`.
Linebreak,
/// A paragraph break, indicated by one or multiple blank lines.
Parbreak,
/// An escape sequence: `\#`, `\u{1F5FA}`.
Escape,
/// A shorthand for a unicode codepoint. For example, `~` for non-breaking
/// space or `-?` for a soft hyphen.
Shorthand,
/// A smart quote: `'` or `"`.
SmartQuote,
/// Strong content: `*Strong*`.
Strong,
/// Emphasized content: `_Emphasized_`.
Emph,
/// Raw text with optional syntax highlighting: `` `...` ``.
Raw,
/// A language tag at the start of raw text: ``typ ``.
RawLang,
/// A raw delimiter consisting of 1 or 3+ backticks: `` ` ``.
RawDelim,
/// A sequence of whitespace to ignore in a raw text: ` `.
RawTrimmed,
/// A hyperlink: `https://typst.org`.
Link,
/// A label: `<intro>`.
Label,
/// A reference: `@target`, `@target[..]`.
Ref,
/// Introduces a reference: `@target`.
RefMarker,
/// A section heading: `= Introduction`.
Heading,
/// Introduces a section heading: `=`, `==`, ...
HeadingMarker,
/// An item in a bullet list: `- ...`.
ListItem,
/// Introduces a list item: `-`.
ListMarker,
/// An item in an enumeration (numbered list): `+ ...` or `1. ...`.
EnumItem,
/// Introduces an enumeration item: `+`, `1.`.
EnumMarker,
/// An item in a term list: `/ Term: Details`.
TermItem,
/// Introduces a term item: `/`.
TermMarker,
/// A mathematical equation: `$x$`, `$ x^2 $`.
Equation,
/// The contents of a mathematical equation: `x^2 + 1`.
Math,
/// A lone text fragment in math: `x`, `25`, `3.1415`, `=`, `|`, `[`.
MathText,
/// An identifier in math: `pi`.
MathIdent,
/// A shorthand for a unicode codepoint in math: `a <= b`.
MathShorthand,
/// An alignment point in math: `&`.
MathAlignPoint,
/// Matched delimiters in math: `[x + y]`.
MathDelimited,
/// A base with optional attachments in math: `a_1^2`.
MathAttach,
/// Grouped primes in math: `a'''`.
MathPrimes,
/// A fraction in math: `x/2`.
MathFrac,
/// A root in math: `√x`, `∛x` or `∜x`.
MathRoot,
/// A hash that switches into code mode: `#`.
Hash,
/// A left curly brace, starting a code block: `{`.
LeftBrace,
/// A right curly brace, terminating a code block: `}`.
RightBrace,
/// A left square bracket, starting a content block: `[`.
LeftBracket,
/// A right square bracket, terminating a content block: `]`.
RightBracket,
/// A left round parenthesis, starting a grouped expression, collection,
/// argument or parameter list: `(`.
LeftParen,
/// A right round parenthesis, terminating a grouped expression, collection,
/// argument or parameter list: `)`.
RightParen,
/// A comma separator in a sequence: `,`.
Comma,
/// A semicolon terminating an expression: `;`.
Semicolon,
/// A colon between name/key and value in a dictionary, argument or
/// parameter list, or between the term and body of a term list term: `:`.
Colon,
/// The strong text toggle, multiplication operator, and wildcard import
/// symbol: `*`.
Star,
/// Toggles emphasized text and indicates a subscript in math: `_`.
Underscore,
/// Starts and ends a mathematical equation: `$`.
Dollar,
/// The unary plus and binary addition operator: `+`.
Plus,
/// The unary negation and binary subtraction operator: `-`.
Minus,
/// The division operator and fraction operator in math: `/`.
Slash,
/// The superscript operator in math: `^`.
Hat,
/// The field access and method call operator: `.`.
Dot,
/// The assignment operator: `=`.
Eq,
/// The equality operator: `==`.
EqEq,
/// The inequality operator: `!=`.
ExclEq,
/// The less-than operator: `<`.
Lt,
/// The less-than or equal operator: `<=`.
LtEq,
/// The greater-than operator: `>`.
Gt,
/// The greater-than or equal operator: `>=`.
GtEq,
/// The add-assign operator: `+=`.
PlusEq,
/// The subtract-assign operator: `-=`.
HyphEq,
/// The multiply-assign operator: `*=`.
StarEq,
/// The divide-assign operator: `/=`.
SlashEq,
/// Indicates a spread or sink: `..`.
Dots,
/// An arrow between a closure's parameters and body: `=>`.
Arrow,
/// A root: `√`, `∛` or `∜`.
Root,
/// An exclamation mark; groups with directly preceding text in math: `!`.
Bang,
/// The `not` operator.
Not,
/// The `and` operator.
And,
/// The `or` operator.
Or,
/// The `none` literal.
None,
/// The `auto` literal.
Auto,
/// The `let` keyword.
Let,
/// The `set` keyword.
Set,
/// The `show` keyword.
Show,
/// The `context` keyword.
Context,
/// The `if` keyword.
If,
/// The `else` keyword.
Else,
/// The `for` keyword.
For,
/// The `in` keyword.
In,
/// The `while` keyword.
While,
/// The `break` keyword.
Break,
/// The `continue` keyword.
Continue,
/// The `return` keyword.
Return,
/// The `import` keyword.
Import,
/// The `include` keyword.
Include,
/// The `as` keyword.
As,
/// The contents of a code block.
Code,
/// An identifier: `it`.
Ident,
/// A boolean: `true`, `false`.
Bool,
/// An integer: `120`.
Int,
/// A floating-point number: `1.2`, `10e-4`.
Float,
/// A numeric value with a unit: `12pt`, `3cm`, `2em`, `90deg`, `50%`.
Numeric,
/// A quoted string: `"..."`.
Str,
/// A code block: `{ let x = 1; x + 2 }`.
CodeBlock,
/// A content block: `[*Hi* there!]`.
ContentBlock,
/// A grouped expression: `(1 + 2)`.
Parenthesized,
/// An array: `(1, "hi", 12cm)`.
Array,
/// A dictionary: `(thickness: 3pt, dash: "solid")`.
Dict,
/// A named pair: `thickness: 3pt`.
Named,
/// A keyed pair: `"spacy key": true`.
Keyed,
/// A unary operation: `-x`.
Unary,
/// A binary operation: `a + b`.
Binary,
/// A field access: `properties.age`.
FieldAccess,
/// An invocation of a function or method: `f(x, y)`.
FuncCall,
/// A function call's argument list: `(12pt, y)`.
Args,
/// Spread arguments or an argument sink: `..x`.
Spread,
/// A closure: `(x, y) => z`.
Closure,
/// A closure's parameters: `(x, y)`.
Params,
/// A let binding: `let x = 1`.
LetBinding,
/// A set rule: `set text(...)`.
SetRule,
/// A show rule: `show heading: it => emph(it.body)`.
ShowRule,
/// A contextual expression: `context text.lang`.
Contextual,
/// An if-else conditional: `if x { y } else { z }`.
Conditional,
/// A while loop: `while x { y }`.
WhileLoop,
/// A for loop: `for x in y { z }`.
ForLoop,
/// A module import: `import "utils.typ": a, b, c`.
ModuleImport,
/// Items to import from a module: `a, b, c`.
ImportItems,
/// A path to an imported name from a submodule: `a.b.c`.
ImportItemPath,
/// A renamed import item: `a as d`.
RenamedImportItem,
/// A module include: `include "chapter1.typ"`.
ModuleInclude,
/// A break from a loop: `break`.
LoopBreak,
/// A continue in a loop: `continue`.
LoopContinue,
/// A return from a function: `return`, `return x + 1`.
FuncReturn,
/// A destructuring pattern: `(x, _, ..y)`.
Destructuring,
/// A destructuring assignment expression: `(x, y) = (1, 2)`.
DestructAssignment,
}
impl SyntaxKind {
/// Is this a bracket, brace, or parenthesis?
pub fn is_grouping(self) -> bool {
matches!(
self,
Self::LeftBracket
| Self::LeftBrace
| Self::LeftParen
| Self::RightBracket
| Self::RightBrace
| Self::RightParen
)
}
/// Does this node terminate a preceding expression?
pub fn is_terminator(self) -> bool {
matches!(
self,
Self::End
| Self::Semicolon
| Self::RightBrace
| Self::RightParen
| Self::RightBracket
)
}
/// Is this a code or content block.
pub fn is_block(self) -> bool {
matches!(self, Self::CodeBlock | Self::ContentBlock)
}
/// Does this node need termination through a semicolon or linebreak?
pub fn is_stmt(self) -> bool {
matches!(
self,
Self::LetBinding
| Self::SetRule
| Self::ShowRule
| Self::ModuleImport
| Self::ModuleInclude
)
}
/// Is this node is a keyword.
pub fn is_keyword(self) -> bool {
matches!(
self,
Self::Not
| Self::And
| Self::Or
| Self::None
| Self::Auto
| Self::Let
| Self::Set
| Self::Show
| Self::Context
| Self::If
| Self::Else
| Self::For
| Self::In
| Self::While
| Self::Break
| Self::Continue
| Self::Return
| Self::Import
| Self::Include
| Self::As
)
}
/// Whether this kind of node is automatically skipped by the parser in
/// code and math mode.
pub fn is_trivia(self) -> bool {
matches!(
self,
Self::Shebang
| Self::LineComment
| Self::BlockComment
| Self::Space
| Self::Parbreak
)
}
/// Whether this is an error.
pub fn is_error(self) -> bool {
self == Self::Error
}
/// A human-readable name for the kind.
pub fn name(self) -> &'static str {
match self {
Self::End => "end of tokens",
Self::Error => "syntax error",
Self::Shebang => "shebang",
Self::LineComment => "line comment",
Self::BlockComment => "block comment",
Self::Markup => "markup",
Self::Text => "text",
Self::Space => "space",
Self::Linebreak => "line break",
Self::Parbreak => "paragraph break",
Self::Escape => "escape sequence",
Self::Shorthand => "shorthand",
Self::SmartQuote => "smart quote",
Self::Strong => "strong content",
Self::Emph => "emphasized content",
Self::Raw => "raw block",
Self::RawLang => "raw language tag",
Self::RawTrimmed => "raw trimmed",
Self::RawDelim => "raw delimiter",
Self::Link => "link",
Self::Label => "label",
Self::Ref => "reference",
Self::RefMarker => "reference marker",
Self::Heading => "heading",
Self::HeadingMarker => "heading marker",
Self::ListItem => "list item",
Self::ListMarker => "list marker",
Self::EnumItem => "enum item",
Self::EnumMarker => "enum marker",
Self::TermItem => "term list item",
Self::TermMarker => "term marker",
Self::Equation => "equation",
Self::Math => "math",
Self::MathText => "math text",
Self::MathIdent => "math identifier",
Self::MathShorthand => "math shorthand",
Self::MathAlignPoint => "math alignment point",
Self::MathDelimited => "delimited math",
Self::MathAttach => "math attachments",
Self::MathFrac => "math fraction",
Self::MathRoot => "math root",
Self::MathPrimes => "math primes",
Self::Hash => "hash",
Self::LeftBrace => "opening brace",
Self::RightBrace => "closing brace",
Self::LeftBracket => "opening bracket",
Self::RightBracket => "closing bracket",
Self::LeftParen => "opening paren",
Self::RightParen => "closing paren",
Self::Comma => "comma",
Self::Semicolon => "semicolon",
Self::Colon => "colon",
Self::Star => "star",
Self::Underscore => "underscore",
Self::Dollar => "dollar sign",
Self::Plus => "plus",
Self::Minus => "minus",
Self::Slash => "slash",
Self::Hat => "hat",
Self::Dot => "dot",
Self::Eq => "equals sign",
Self::EqEq => "equality operator",
Self::ExclEq => "inequality operator",
Self::Lt => "less-than operator",
Self::LtEq => "less-than or equal operator",
Self::Gt => "greater-than operator",
Self::GtEq => "greater-than or equal operator",
Self::PlusEq => "add-assign operator",
Self::HyphEq => "subtract-assign operator",
Self::StarEq => "multiply-assign operator",
Self::SlashEq => "divide-assign operator",
Self::Dots => "dots",
Self::Arrow => "arrow",
Self::Root => "root",
Self::Bang => "exclamation mark",
Self::Not => "operator `not`",
Self::And => "operator `and`",
Self::Or => "operator `or`",
Self::None => "`none`",
Self::Auto => "`auto`",
Self::Let => "keyword `let`",
Self::Set => "keyword `set`",
Self::Show => "keyword `show`",
Self::Context => "keyword `context`",
Self::If => "keyword `if`",
Self::Else => "keyword `else`",
Self::For => "keyword `for`",
Self::In => "keyword `in`",
Self::While => "keyword `while`",
Self::Break => "keyword `break`",
Self::Continue => "keyword `continue`",
Self::Return => "keyword `return`",
Self::Import => "keyword `import`",
Self::Include => "keyword `include`",
Self::As => "keyword `as`",
Self::Code => "code",
Self::Ident => "identifier",
Self::Bool => "boolean",
Self::Int => "integer",
Self::Float => "float",
Self::Numeric => "numeric value",
Self::Str => "string",
Self::CodeBlock => "code block",
Self::ContentBlock => "content block",
Self::Parenthesized => "group",
Self::Array => "array",
Self::Dict => "dictionary",
Self::Named => "named pair",
Self::Keyed => "keyed pair",
Self::Unary => "unary expression",
Self::Binary => "binary expression",
Self::FieldAccess => "field access",
Self::FuncCall => "function call",
Self::Args => "call arguments",
Self::Spread => "spread",
Self::Closure => "closure",
Self::Params => "closure parameters",
Self::LetBinding => "`let` expression",
Self::SetRule => "`set` expression",
Self::ShowRule => "`show` expression",
Self::Contextual => "`context` expression",
Self::Conditional => "`if` expression",
Self::WhileLoop => "while-loop expression",
Self::ForLoop => "for-loop expression",
Self::ModuleImport => "`import` expression",
Self::ImportItems => "import items",
Self::ImportItemPath => "imported item path",
Self::RenamedImportItem => "renamed import item",
Self::ModuleInclude => "`include` expression",
Self::LoopBreak => "`break` expression",
Self::LoopContinue => "`continue` expression",
Self::FuncReturn => "`return` expression",
Self::Destructuring => "destructuring pattern",
Self::DestructAssignment => "destructuring assignment expression",
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/file.rs | crates/typst-syntax/src/file.rs | //! File and package management.
use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU16;
use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
use crate::VirtualPath;
use crate::package::PackageSpec;
/// The global package-path interner.
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
RwLock::new(Interner { to_id: FxHashMap::default(), from_id: Vec::new() })
});
/// A package-path interner.
struct Interner {
to_id: FxHashMap<Pair, FileId>,
from_id: Vec<Pair>,
}
/// An interned pair of a package specification and a path.
type Pair = &'static (Option<PackageSpec>, VirtualPath);
/// Identifies a file in a project or package.
///
/// This type is globally interned and thus cheap to copy, compare, and hash.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct FileId(NonZeroU16);
impl FileId {
/// Create a new interned file specification.
///
/// The path must start with a `/` or this function will panic.
/// Note that the path is normalized before interning.
#[track_caller]
pub fn new(package: Option<PackageSpec>, path: VirtualPath) -> Self {
// Try to find an existing entry that we can reuse.
//
// We could check with just a read lock, but if the pair is not yet
// present, we would then need to recheck after acquiring a write lock,
// which is probably not worth it.
let pair = (package, path);
let mut interner = INTERNER.write().unwrap();
if let Some(&id) = interner.to_id.get(&pair) {
return id;
}
// Create a new entry forever by leaking the pair. We can't leak more
// than 2^16 pair (and typically will leak a lot less), so its not a
// big deal.
let num = u16::try_from(interner.from_id.len() + 1)
.and_then(NonZeroU16::try_from)
.expect("out of file ids");
let id = FileId(num);
let leaked = Box::leak(Box::new(pair));
interner.to_id.insert(leaked, id);
interner.from_id.push(leaked);
id
}
/// Create a new unique ("fake") file specification, which is not
/// accessible by path.
///
/// Caution: the ID returned by this method is the *only* identifier of the
/// file, constructing a file ID with a path will *not* reuse the ID even
/// if the path is the same. This method should only be used for generating
/// "virtual" file ids such as content read from stdin.
#[track_caller]
pub fn new_fake(path: VirtualPath) -> Self {
let mut interner = INTERNER.write().unwrap();
let num = u16::try_from(interner.from_id.len() + 1)
.and_then(NonZeroU16::try_from)
.expect("out of file ids");
let id = FileId(num);
let leaked = Box::leak(Box::new((None, path)));
interner.from_id.push(leaked);
id
}
/// The package the file resides in, if any.
pub fn package(&self) -> Option<&'static PackageSpec> {
self.pair().0.as_ref()
}
/// The absolute and normalized path to the file _within_ the project or
/// package.
pub fn vpath(&self) -> &'static VirtualPath {
&self.pair().1
}
/// Resolve a file location relative to this file.
pub fn join(self, path: &str) -> Self {
Self::new(self.package().cloned(), self.vpath().join(path))
}
/// The same file location, but with a different extension.
pub fn with_extension(&self, extension: &str) -> Self {
Self::new(self.package().cloned(), self.vpath().with_extension(extension))
}
/// Construct from a raw number.
///
/// Should only be used with numbers retrieved via
/// [`into_raw`](Self::into_raw). Misuse may results in panics, but no
/// unsafety.
pub const fn from_raw(v: NonZeroU16) -> Self {
Self(v)
}
/// Extract the raw underlying number.
pub const fn into_raw(self) -> NonZeroU16 {
self.0
}
/// Get the static pair.
fn pair(&self) -> Pair {
INTERNER.read().unwrap().from_id[usize::from(self.0.get() - 1)]
}
}
impl Debug for FileId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let vpath = self.vpath();
match self.package() {
Some(package) => write!(f, "{package:?}{vpath:?}"),
None => write!(f, "{vpath:?}"),
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/span.rs | crates/typst-syntax/src/span.rs | use std::fmt::{self, Debug, Formatter};
use std::num::{NonZeroU16, NonZeroU64};
use std::ops::Range;
use ecow::EcoString;
use crate::FileId;
/// Defines a range in a file.
///
/// This is used throughout the compiler to track which source section an
/// element stems from or an error applies to.
///
/// - The [`.id()`](Self::id) function can be used to get the `FileId` for the
/// span and, by extension, its file system path.
/// - The `WorldExt::range` function can be used to map the span to a
/// `Range<usize>`.
///
/// This type takes up 8 bytes and is copyable and null-optimized (i.e.
/// `Option<Span>` also takes 8 bytes).
///
/// Spans come in two flavors: Numbered spans and raw range spans. The
/// `WorldExt::range` function automatically handles both cases, yielding a
/// `Range<usize>`.
///
/// # Numbered spans
/// Typst source files use _numbered spans._ Rather than using byte ranges,
/// which shift a lot as you type, each AST node gets a unique number.
///
/// During editing, the span numbers stay mostly stable, even for nodes behind
/// an insertion. This is not true for simple ranges as they would shift. Spans
/// can be used as inputs to memoized functions without hurting cache
/// performance when text is inserted somewhere in the document other than the
/// end.
///
/// Span ids are ordered in the syntax tree to enable quickly finding the node
/// with some id:
/// - The id of a parent is always smaller than the ids of any of its children.
/// - The id of a node is always greater than any id in the subtrees of any left
/// sibling and smaller than any id in the subtrees of any right sibling.
///
/// # Raw range spans
/// Non Typst-files use raw ranges instead of numbered spans. The maximum
/// encodable value for start and end is 2^23. Larger values will be saturated.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Span(NonZeroU64);
impl Span {
/// The full range of numbers available for source file span numbering.
pub(crate) const FULL: Range<u64> = 2..(1 << 47);
/// The value reserved for the detached span.
const DETACHED: u64 = 1;
/// Data layout:
/// | 16 bits file id | 48 bits number |
///
/// Number =
/// - 1 means detached
/// - 2..2^47-1 is a numbered span
/// - 2^47..2^48-1 is a raw range span. To retrieve it, you must subtract
/// `RANGE_BASE` and then use shifting/bitmasking to extract the
/// components.
const NUMBER_BITS: usize = 48;
const FILE_ID_SHIFT: usize = Self::NUMBER_BITS;
const NUMBER_MASK: u64 = (1 << Self::NUMBER_BITS) - 1;
const RANGE_BASE: u64 = Self::FULL.end;
const RANGE_PART_BITS: usize = 23;
const RANGE_PART_SHIFT: usize = Self::RANGE_PART_BITS;
const RANGE_PART_MASK: u64 = (1 << Self::RANGE_PART_BITS) - 1;
/// Create a span that does not point into any file.
pub const fn detached() -> Self {
Self(NonZeroU64::new(Self::DETACHED).unwrap())
}
/// Create a new span from a file id and a number.
///
/// Returns `None` if `number` is not contained in `FULL`.
pub(crate) const fn from_number(id: FileId, number: u64) -> Option<Self> {
if number < Self::FULL.start || number >= Self::FULL.end {
return None;
}
Some(Self::pack(id, number))
}
/// Create a new span from a raw byte range instead of a span number.
///
/// If one of the range's parts exceeds the maximum value (2^23), it is
/// saturated.
pub const fn from_range(id: FileId, range: Range<usize>) -> Self {
let max = 1 << Self::RANGE_PART_BITS;
let start = if range.start > max { max } else { range.start } as u64;
let end = if range.end > max { max } else { range.end } as u64;
let number = (start << Self::RANGE_PART_SHIFT) | end;
Self::pack(id, Self::RANGE_BASE + number)
}
/// Construct from a raw number.
///
/// Should only be used with numbers retrieved via
/// [`into_raw`](Self::into_raw). Misuse may results in panics, but no
/// unsafety.
pub const fn from_raw(v: NonZeroU64) -> Self {
Self(v)
}
/// Pack a file ID and the low bits into a span.
const fn pack(id: FileId, low: u64) -> Self {
let bits = ((id.into_raw().get() as u64) << Self::FILE_ID_SHIFT) | low;
// The file ID is non-zero.
Self(NonZeroU64::new(bits).unwrap())
}
/// Whether the span is detached.
pub const fn is_detached(self) -> bool {
self.0.get() == Self::DETACHED
}
/// The id of the file the span points into.
///
/// Returns `None` if the span is detached.
pub const fn id(self) -> Option<FileId> {
// Detached span has only zero high bits, so it will trigger the
// `None` case.
match NonZeroU16::new((self.0.get() >> Self::FILE_ID_SHIFT) as u16) {
Some(v) => Some(FileId::from_raw(v)),
None => None,
}
}
/// The unique number of the span within its [`Source`](crate::Source).
pub(crate) const fn number(self) -> u64 {
self.0.get() & Self::NUMBER_MASK
}
/// Extract a raw byte range from the span, if it is a raw range span.
///
/// Typically, you should use `WorldExt::range` instead.
pub const fn range(self) -> Option<Range<usize>> {
let Some(number) = self.number().checked_sub(Self::RANGE_BASE) else {
return None;
};
let start = (number >> Self::RANGE_PART_SHIFT) as usize;
let end = (number & Self::RANGE_PART_MASK) as usize;
Some(start..end)
}
/// Extract the raw underlying number.
pub const fn into_raw(self) -> NonZeroU64 {
self.0
}
/// Return `other` if `self` is detached and `self` otherwise.
pub fn or(self, other: Self) -> Self {
if self.is_detached() { other } else { self }
}
/// Find the first non-detached span in the iterator.
pub fn find(iter: impl IntoIterator<Item = Self>) -> Self {
iter.into_iter()
.find(|span| !span.is_detached())
.unwrap_or(Span::detached())
}
/// Resolve a file location relative to this span's source.
pub fn resolve_path(self, path: &str) -> Result<FileId, EcoString> {
let Some(file) = self.id() else {
return Err("cannot access file system from here".into());
};
Ok(file.join(path))
}
}
/// A value with a span locating it in the source code.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Spanned<T> {
/// The spanned value.
pub v: T,
/// The value's location in source code.
pub span: Span,
}
impl<T> Spanned<T> {
/// Create a new instance from a value and its span.
pub const fn new(v: T, span: Span) -> Self {
Self { v, span }
}
/// Create a new instance with a span that does not point into any file.
pub const fn detached(v: T) -> Self {
Self { v, span: Span::detached() }
}
/// Convert from `&Spanned<T>` to `Spanned<&T>`
pub const fn as_ref(&self) -> Spanned<&T> {
Spanned { v: &self.v, span: self.span }
}
/// Map the value using a function.
pub fn map<F, U>(self, f: F) -> Spanned<U>
where
F: FnOnce(T) -> U,
{
Spanned { v: f(self.v), span: self.span }
}
}
impl<T: Debug> Debug for Spanned<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.v.fmt(f)
}
}
#[cfg(test)]
mod tests {
use std::num::NonZeroU16;
use std::ops::Range;
use crate::{FileId, Span};
#[test]
fn test_span_detached() {
let span = Span::detached();
assert!(span.is_detached());
assert_eq!(span.id(), None);
assert_eq!(span.range(), None);
}
#[test]
fn test_span_number_encoding() {
let id = FileId::from_raw(NonZeroU16::new(5).unwrap());
let span = Span::from_number(id, 10).unwrap();
assert_eq!(span.id(), Some(id));
assert_eq!(span.number(), 10);
assert_eq!(span.range(), None);
}
#[test]
fn test_span_range_encoding() {
let id = FileId::from_raw(NonZeroU16::new(u16::MAX).unwrap());
let roundtrip = |range: Range<usize>| {
let span = Span::from_range(id, range.clone());
assert_eq!(span.id(), Some(id));
assert_eq!(span.range(), Some(range));
};
roundtrip(0..0);
roundtrip(177..233);
roundtrip(0..8388607);
roundtrip(8388606..8388607);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/lines.rs | crates/typst-syntax/src/lines.rs | use std::hash::{Hash, Hasher};
use std::iter::zip;
use std::ops::Range;
use std::sync::Arc;
use crate::is_newline;
/// A text buffer and metadata about lines.
///
/// This is internally reference-counted and thus cheap to clone.
#[derive(Clone)]
pub struct Lines<S>(Arc<LinesInner<S>>);
/// The internal representation of [`Lines`].
#[derive(Clone)]
struct LinesInner<T> {
lines: Vec<Line>,
text: T,
}
/// Metadata about a line.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Line {
/// The UTF-8 byte offset where the line starts.
byte_idx: usize,
/// The UTF-16 codepoint offset where the line starts.
utf16_idx: usize,
}
impl<T: AsRef<str>> Lines<T> {
/// Create from the text buffer and compute the line metadata.
pub fn new(text: T) -> Self {
let lines = lines(text.as_ref());
Lines(Arc::new(LinesInner { lines, text }))
}
/// The text as a string slice.
pub fn text(&self) -> &str {
self.0.text.as_ref()
}
/// Get the length of the file in UTF-8 encoded bytes.
pub fn len_bytes(&self) -> usize {
self.0.text.as_ref().len()
}
/// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize {
let last = self.0.lines.last().unwrap();
last.utf16_idx + len_utf16(&self.text()[last.byte_idx..])
}
/// Get the length of the file in lines.
pub fn len_lines(&self) -> usize {
self.0.lines.len()
}
/// Return the index of the UTF-16 code unit at the byte index.
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
let line_idx = self.byte_to_line(byte_idx)?;
let line = self.0.lines.get(line_idx)?;
let head = self.text().get(line.byte_idx..byte_idx)?;
Some(line.utf16_idx + len_utf16(head))
}
/// Return the index of the line that contains the given byte index.
pub fn byte_to_line(&self, byte_idx: usize) -> Option<usize> {
(byte_idx <= self.text().len()).then(|| {
match self.0.lines.binary_search_by_key(&byte_idx, |line| line.byte_idx) {
Ok(i) => i,
Err(i) => i - 1,
}
})
}
/// Return the index of the column at the byte index.
///
/// The column is defined as the number of characters in the line before the
/// byte index.
pub fn byte_to_column(&self, byte_idx: usize) -> Option<usize> {
let line = self.byte_to_line(byte_idx)?;
let start = self.line_to_byte(line)?;
let head = self.text().get(start..byte_idx)?;
Some(head.chars().count())
}
/// Return the index of the line and column at the byte index.
pub fn byte_to_line_column(&self, byte_idx: usize) -> Option<(usize, usize)> {
let line = self.byte_to_line(byte_idx)?;
let start = self.line_to_byte(line)?;
let head = self.text().get(start..byte_idx)?;
let col = head.chars().count();
Some((line, col))
}
/// Return the byte index at the UTF-16 code unit.
pub fn utf16_to_byte(&self, utf16_idx: usize) -> Option<usize> {
let line = self.0.lines.get(
match self.0.lines.binary_search_by_key(&utf16_idx, |line| line.utf16_idx) {
Ok(i) => i,
Err(i) => i - 1,
},
)?;
let text = self.text();
let mut k = line.utf16_idx;
for (i, c) in text[line.byte_idx..].char_indices() {
if k >= utf16_idx {
return Some(line.byte_idx + i);
}
k += c.len_utf16();
}
(k == utf16_idx).then_some(text.len())
}
/// Return the byte position at which the given line starts.
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
self.0.lines.get(line_idx).map(|line| line.byte_idx)
}
/// Return the range which encloses the given line.
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
let start = self.line_to_byte(line_idx)?;
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text().len());
Some(start..end)
}
/// Return the byte index of the given (line, column) pair.
///
/// The column defines the number of characters to go beyond the start of
/// the line.
pub fn line_column_to_byte(
&self,
line_idx: usize,
column_idx: usize,
) -> Option<usize> {
let range = self.line_to_range(line_idx)?;
let line = self.text().get(range.clone())?;
let mut chars = line.chars();
for _ in 0..column_idx {
chars.next();
}
Some(range.start + (line.len() - chars.as_str().len()))
}
}
impl Lines<String> {
/// Fully replace the source text.
///
/// This performs a naive (suffix/prefix-based) diff of the old and new text
/// to produce the smallest single edit that transforms old into new and
/// then calls [`edit`](Self::edit) with it.
///
/// Returns whether any changes were made.
pub fn replace(&mut self, new: &str) -> bool {
let Some((prefix, suffix)) = self.replacement_range(new) else {
return false;
};
let old = self.text();
let replace = prefix..old.len() - suffix;
let with = &new[prefix..new.len() - suffix];
self.edit(replace, with);
true
}
/// Returns the common prefix and suffix lengths.
/// Returns [`None`] if the old and new strings are equal.
pub fn replacement_range(&self, new: &str) -> Option<(usize, usize)> {
let old = self.text();
let mut prefix =
zip(old.bytes(), new.bytes()).take_while(|(x, y)| x == y).count();
if prefix == old.len() && prefix == new.len() {
return None;
}
while !old.is_char_boundary(prefix) || !new.is_char_boundary(prefix) {
prefix -= 1;
}
let mut suffix = zip(old[prefix..].bytes().rev(), new[prefix..].bytes().rev())
.take_while(|(x, y)| x == y)
.count();
while !old.is_char_boundary(old.len() - suffix)
|| !new.is_char_boundary(new.len() - suffix)
{
suffix += 1;
}
Some((prefix, suffix))
}
/// Edit the source file by replacing the given range.
///
/// Returns the range in the new source that was ultimately reparsed.
///
/// The method panics if the `replace` range is out of bounds.
#[track_caller]
pub fn edit(&mut self, replace: Range<usize>, with: &str) {
let start_byte = replace.start;
let start_utf16 = self.byte_to_utf16(start_byte).unwrap();
let line = self.byte_to_line(start_byte).unwrap();
let inner = Arc::make_mut(&mut self.0);
// Update the text itself.
inner.text.replace_range(replace.clone(), with);
// Remove invalidated line starts.
inner.lines.truncate(line + 1);
// Handle adjoining of \r and \n.
if inner.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
inner.lines.pop();
}
// Recalculate the line starts after the edit.
inner.lines.extend(lines_from(
start_byte,
start_utf16,
&inner.text[start_byte..],
));
}
}
impl<S: Hash> Hash for Lines<S> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.text.hash(state);
}
}
impl<S: AsRef<str>> AsRef<str> for Lines<S> {
fn as_ref(&self) -> &str {
self.0.text.as_ref()
}
}
/// Create a line vector.
fn lines(text: &str) -> Vec<Line> {
std::iter::once(Line { byte_idx: 0, utf16_idx: 0 })
.chain(lines_from(0, 0, text))
.collect()
}
/// Compute a line iterator from an offset.
fn lines_from(
byte_offset: usize,
utf16_offset: usize,
text: &str,
) -> impl Iterator<Item = Line> + '_ {
let mut s = unscanny::Scanner::new(text);
let mut utf16_idx = utf16_offset;
std::iter::from_fn(move || {
s.eat_until(|c: char| {
utf16_idx += c.len_utf16();
is_newline(c)
});
if s.done() {
return None;
}
if s.eat() == Some('\r') && s.eat_if('\n') {
utf16_idx += 1;
}
Some(Line { byte_idx: byte_offset + s.cursor(), utf16_idx })
})
}
/// The number of code units this string would use if it was encoded in
/// UTF16. This runs in linear time.
fn len_utf16(string: &str) -> usize {
string.chars().map(char::len_utf16).sum()
}
#[cfg(test)]
mod tests {
use super::*;
const TEST: &str = "ä\tcde\nf💛g\r\nhi\rjkl";
#[test]
fn test_source_file_new() {
let lines = Lines::new(TEST);
assert_eq!(
lines.0.lines,
[
Line { byte_idx: 0, utf16_idx: 0 },
Line { byte_idx: 7, utf16_idx: 6 },
Line { byte_idx: 15, utf16_idx: 12 },
Line { byte_idx: 18, utf16_idx: 15 },
]
);
}
#[test]
fn test_source_file_pos_to_line() {
let lines = Lines::new(TEST);
assert_eq!(lines.byte_to_line(0), Some(0));
assert_eq!(lines.byte_to_line(2), Some(0));
assert_eq!(lines.byte_to_line(6), Some(0));
assert_eq!(lines.byte_to_line(7), Some(1));
assert_eq!(lines.byte_to_line(8), Some(1));
assert_eq!(lines.byte_to_line(12), Some(1));
assert_eq!(lines.byte_to_line(21), Some(3));
assert_eq!(lines.byte_to_line(22), None);
}
#[test]
fn test_source_file_pos_to_column() {
let lines = Lines::new(TEST);
assert_eq!(lines.byte_to_column(0), Some(0));
assert_eq!(lines.byte_to_column(2), Some(1));
assert_eq!(lines.byte_to_column(6), Some(5));
assert_eq!(lines.byte_to_column(7), Some(0));
assert_eq!(lines.byte_to_column(8), Some(1));
assert_eq!(lines.byte_to_column(12), Some(2));
}
#[test]
fn test_source_file_utf16() {
#[track_caller]
fn roundtrip(lines: &Lines<&str>, byte_idx: usize, utf16_idx: usize) {
let middle = lines.byte_to_utf16(byte_idx).unwrap();
let result = lines.utf16_to_byte(middle).unwrap();
assert_eq!(middle, utf16_idx);
assert_eq!(result, byte_idx);
}
let lines = Lines::new(TEST);
roundtrip(&lines, 0, 0);
roundtrip(&lines, 2, 1);
roundtrip(&lines, 3, 2);
roundtrip(&lines, 8, 7);
roundtrip(&lines, 12, 9);
roundtrip(&lines, 21, 18);
assert_eq!(lines.byte_to_utf16(22), None);
assert_eq!(lines.utf16_to_byte(19), None);
}
#[test]
fn test_source_file_roundtrip() {
#[track_caller]
fn roundtrip(lines: &Lines<&str>, byte_idx: usize) {
let line = lines.byte_to_line(byte_idx).unwrap();
let column = lines.byte_to_column(byte_idx).unwrap();
let result = lines.line_column_to_byte(line, column).unwrap();
assert_eq!(result, byte_idx);
}
let lines = Lines::new(TEST);
roundtrip(&lines, 0);
roundtrip(&lines, 7);
roundtrip(&lines, 12);
roundtrip(&lines, 21);
}
#[test]
fn test_source_file_edit() {
// This tests only the non-parser parts. The reparsing itself is
// tested separately.
#[track_caller]
fn test(prev: &str, range: Range<usize>, with: &str, after: &str) {
let reference = Lines::new(after);
let mut edited = Lines::new(prev.to_string());
edited.edit(range.clone(), with);
assert_eq!(edited.text(), reference.text());
assert_eq!(edited.0.lines, reference.0.lines);
let mut replaced = Lines::new(prev.to_string());
replaced.replace(&{
let mut s = prev.to_string();
s.replace_range(range, with);
s
});
assert_eq!(replaced.text(), reference.text());
assert_eq!(replaced.0.lines, reference.0.lines);
}
// Test inserting at the beginning.
test("abc\n", 0..0, "hi\n", "hi\nabc\n");
test("\nabc", 0..0, "hi\r", "hi\r\nabc");
// Test editing in the middle.
test(TEST, 4..16, "❌", "ä\tc❌i\rjkl");
// Test appending.
test("abc\ndef", 7..7, "hi", "abc\ndefhi");
test("abc\ndef\n", 8..8, "hi", "abc\ndef\nhi");
// Test appending with adjoining \r and \n.
test("abc\ndef\r", 8..8, "\nghi", "abc\ndef\r\nghi");
// Test removing everything.
test(TEST, 0..21, "", "");
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/reparser.rs | crates/typst-syntax/src/reparser.rs | use std::ops::Range;
use crate::{
Span, SyntaxKind, SyntaxNode, is_newline, parse, reparse_block, reparse_markup,
};
/// Refresh the given syntax node with as little parsing as possible.
///
/// Takes the new text, the range in the old text that was replaced and the
/// length of the replacement and returns the range in the new text that was
/// ultimately reparsed.
///
/// The high-level API for this function is
/// [`Source::edit`](crate::Source::edit).
pub fn reparse(
root: &mut SyntaxNode,
text: &str,
replaced: Range<usize>,
replacement_len: usize,
) -> Range<usize> {
try_reparse(text, replaced, replacement_len, None, root, 0).unwrap_or_else(|| {
let id = root.span().id();
*root = parse(text);
if let Some(id) = id {
root.numberize(id, Span::FULL).unwrap();
}
0..text.len()
})
}
/// Try to reparse inside the given node.
fn try_reparse(
text: &str,
replaced: Range<usize>,
replacement_len: usize,
parent_kind: Option<SyntaxKind>,
node: &mut SyntaxNode,
offset: usize,
) -> Option<Range<usize>> {
// The range of children which overlap with the edit.
#[allow(clippy::reversed_empty_ranges)]
let mut overlap = usize::MAX..0;
let mut cursor = offset;
let node_kind = node.kind();
for (i, child) in node.children_mut().iter_mut().enumerate() {
let prev_range = cursor..cursor + child.len();
let prev_len = child.len();
let prev_desc = child.descendants();
// Does the child surround the edit?
// If so, try to reparse within it or itself.
if !child.is_leaf() && includes(&prev_range, &replaced) {
let new_len = prev_len + replacement_len - replaced.len();
let new_range = cursor..cursor + new_len;
// Try to reparse within the child.
if let Some(range) = try_reparse(
text,
replaced.clone(),
replacement_len,
Some(node_kind),
child,
cursor,
) {
assert_eq!(child.len(), new_len);
let new_desc = child.descendants();
node.update_parent(prev_len, new_len, prev_desc, new_desc);
return Some(range);
}
// If the child is a block, try to reparse the block.
if child.kind().is_block()
&& let Some(newborn) = reparse_block(text, new_range.clone())
{
return node
.replace_children(i..i + 1, vec![newborn])
.is_ok()
.then_some(new_range);
}
}
// Does the child overlap with the edit?
if overlaps(&prev_range, &replaced) {
overlap.start = overlap.start.min(i);
overlap.end = i + 1;
}
// Is the child beyond the edit?
if replaced.end < cursor {
break;
}
cursor += child.len();
}
// Try to reparse a range of markup expressions within markup. This is only
// possible if the markup is top-level or contained in a block, not if it is
// contained in things like headings or lists because too much can go wrong
// with indent and line breaks.
if overlap.is_empty()
|| node.kind() != SyntaxKind::Markup
|| !matches!(parent_kind, None | Some(SyntaxKind::ContentBlock))
{
return None;
}
let children = node.children_mut();
// Reparse a segment. Retries until it works, taking exponentially more
// children into account.
let mut expansion = 1;
loop {
// Add slack in both directions.
let mut start = overlap.start.saturating_sub(expansion.max(2));
let mut end = (overlap.end + expansion).min(children.len());
// Expand to the left.
while start > 0 && expand(&children[start]) {
start -= 1;
}
// Expand to the right.
while end < children.len() && expand(&children[end]) {
end += 1;
}
// Also take hash.
if start > 0 && children[start - 1].kind() == SyntaxKind::Hash {
start -= 1;
}
// Synthesize what `at_start` and `nesting` would be at the start of the
// reparse.
let mut prefix_len = 0;
let mut nesting = 0;
let mut at_start = true;
for child in &children[..start] {
prefix_len += child.len();
next_at_start(child, &mut at_start);
next_nesting(child, &mut nesting);
}
// Determine what `at_start` will have to be at the end of the reparse.
let mut prev_len = 0;
let mut prev_at_start_after = at_start;
let mut prev_nesting_after = nesting;
for child in &children[start..end] {
prev_len += child.len();
next_at_start(child, &mut prev_at_start_after);
next_nesting(child, &mut prev_nesting_after);
}
// Determine the range in the new text that we want to reparse.
let shifted = offset + prefix_len;
let new_len = prev_len + replacement_len - replaced.len();
let new_range = shifted..shifted + new_len;
let at_end = end == children.len();
// Reparse!
let reparsed = reparse_markup(
text,
new_range.clone(),
&mut at_start,
&mut nesting,
parent_kind.is_none(),
);
if let Some(newborns) = reparsed {
// If more children follow, at_start must match its previous value.
// Similarly, if we children follow or we not top-level the nesting
// must match its previous value.
if (at_end || at_start == prev_at_start_after)
&& ((at_end && parent_kind.is_none()) || nesting == prev_nesting_after)
{
return node
.replace_children(start..end, newborns)
.is_ok()
.then_some(new_range);
}
}
// If it didn't even work with all children, we give up.
if start == 0 && at_end {
break;
}
// Exponential expansion to both sides.
expansion *= 2;
}
None
}
/// Whether the inner range is fully contained in the outer one (no touching).
fn includes(outer: &Range<usize>, inner: &Range<usize>) -> bool {
outer.start < inner.start && outer.end > inner.end
}
/// Whether the first and second range overlap or touch.
fn overlaps(first: &Range<usize>, second: &Range<usize>) -> bool {
(first.start <= second.start && second.start <= first.end)
|| (second.start <= first.start && first.start <= second.end)
}
/// Whether the selection should be expanded beyond a node of this kind.
fn expand(node: &SyntaxNode) -> bool {
let kind = node.kind();
kind.is_trivia()
|| kind.is_error()
|| kind == SyntaxKind::Semicolon
|| node.text() == "/"
|| node.text() == ":"
}
/// Whether `at_start` would still be true after this node given the
/// previous value of the property.
fn next_at_start(node: &SyntaxNode, at_start: &mut bool) {
let kind = node.kind();
if kind.is_trivia() {
*at_start |= kind == SyntaxKind::Parbreak
|| (kind == SyntaxKind::Space && node.text().chars().any(is_newline));
} else {
*at_start = false;
}
}
/// Update `nesting` based on the node.
fn next_nesting(node: &SyntaxNode, nesting: &mut usize) {
if node.kind() == SyntaxKind::Text {
match node.text().as_str() {
"[" => *nesting += 1,
"]" if *nesting > 0 => *nesting -= 1,
_ => {}
}
}
}
#[cfg(test)]
mod tests {
use std::ops::Range;
use crate::{Source, Span, parse};
#[track_caller]
fn test(prev: &str, range: Range<usize>, with: &str, incremental: bool) {
let mut source = Source::detached(prev);
let prev = source.root().clone();
let range = source.edit(range, with);
let mut found = source.root().clone();
let mut expected = parse(source.text());
found.synthesize(Span::detached());
expected.synthesize(Span::detached());
if found != expected {
eprintln!("source: {:?}", source.text());
eprintln!("previous: {prev:#?}");
eprintln!("expected: {expected:#?}");
eprintln!("found: {found:#?}");
panic!("test failed");
}
if incremental {
assert_ne!(source.text().len(), range.len(), "should have been incremental");
} else {
assert_eq!(
source.text().len(),
range.len(),
"shouldn't have been incremental"
);
}
}
#[test]
fn test_reparse_markup() {
test("abc~def~gh~", 5..6, "+", true);
test("~~~~~~~", 3..4, "A", true);
test("abc~~", 1..2, "", true);
test("#var. hello", 5..6, " ", false);
test("#var;hello", 9..10, "a", false);
test("https:/world", 7..7, "/", false);
test("hello world", 7..12, "walkers", false);
test("some content", 0..12, "", false);
test("", 0..0, "do it", false);
test("a d e", 1..3, " b c d", false);
test("~*~*~", 2..2, "*", false);
test("::1\n2. a\n3", 7..7, "4", true);
test("* #{1+2} *", 6..7, "3", true);
test("#{(0, 1, 2)}", 6..7, "11pt", true);
test("\n= A heading", 4..4, "n evocative", false);
test("#call() abc~d", 7..7, "[]", true);
test("a your thing a", 6..7, "a", false);
test("#grid(columns: (auto, 1fr, 40%))", 16..20, "4pt", false);
test("abc\n= a heading\njoke", 3..4, "\nmore\n\n", true);
test("#show f: a => b..", 16..16, "c", false);
test("#for", 4..4, "//", false);
test("a\n#let \nb", 7..7, "i", true);
test(r"#{{let x = z}; a = 1} b", 7..7, "//", false);
test(r#"a ```typst hello```"#, 16..17, "", false);
test("a{b}c", 1..1, "#", false);
test("a#{b}c", 1..2, "", false);
}
#[test]
fn test_reparse_block() {
test("Hello #{ x + 1 }!", 9..10, "abc", true);
test("A#{}!", 3..3, "\"", false);
test("#{ [= x] }!", 5..5, "=", true);
test("#[[]]", 3..3, "\\", true);
test("#[[ab]]", 4..5, "\\", true);
test("#{}}", 2..2, "{", false);
test("A: #[BC]", 6..6, "{", true);
test("A: #[BC]", 6..6, "#{", true);
test("A: #[BC]", 6..6, "#{}", true);
test("#{\"ab\"}A", 5..5, "c", true);
test("#{\"ab\"}A", 5..6, "c", false);
test("a#[]b", 3..3, "#{", true);
test("a#{call(); abc}b", 8..8, "[]", true);
test("a #while x {\n g(x) \n} b", 12..12, "//", true);
test("a#[]b", 3..3, "[hey]", true);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/package.rs | crates/typst-syntax/src/package.rs | //! Package manifest parsing.
use std::collections::BTreeMap;
use std::fmt::{self, Debug, Display, Formatter};
use std::str::FromStr;
use ecow::{EcoString, eco_format};
use serde::de::IgnoredAny;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use unscanny::Scanner;
use crate::is_ident;
/// A type alias for a map of key-value pairs used to collect unknown fields
/// where values are completely discarded.
pub type UnknownFields = BTreeMap<EcoString, IgnoredAny>;
/// A parsed package manifest.
///
/// The `unknown_fields` contains fields which were found but not expected.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PackageManifest {
/// Details about the package itself.
pub package: PackageInfo,
/// Details about the template, if the package is one.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub template: Option<TemplateInfo>,
/// The tools section for third-party configuration.
#[serde(default)]
pub tool: ToolInfo,
/// All parsed but unknown fields, this can be used for validation.
#[serde(flatten, skip_serializing)]
pub unknown_fields: UnknownFields,
}
/// The `[tool]` key in the manifest. This field can be used to retrieve
/// 3rd-party tool configuration.
///
/// # Examples
/// ```
/// # use serde::{Deserialize, Serialize};
/// # use ecow::EcoString;
/// # use typst_syntax::package::PackageManifest;
/// #[derive(Debug, PartialEq, Serialize, Deserialize)]
/// struct MyTool {
/// key: EcoString,
/// }
///
/// let mut manifest: PackageManifest = toml::from_str(r#"
/// [package]
/// name = "package"
/// version = "0.1.0"
/// entrypoint = "src/lib.typ"
///
/// [tool.my-tool]
/// key = "value"
/// "#)?;
///
/// let my_tool = manifest
/// .tool
/// .sections
/// .remove("my-tool")
/// .ok_or("tool.my-tool section missing")?;
/// let my_tool = MyTool::deserialize(my_tool)?;
///
/// assert_eq!(my_tool, MyTool { key: "value".into() });
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub struct ToolInfo {
/// Any fields parsed in the tool section.
#[serde(flatten)]
pub sections: BTreeMap<EcoString, toml::Table>,
}
/// The `[template]` key in the manifest.
///
/// The `unknown_fields` contains fields which were found but not expected.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TemplateInfo {
/// The directory within the package that contains the files that should be
/// copied into the user's new project directory.
pub path: EcoString,
/// A path relative to the template's path that points to the file serving
/// as the compilation target.
pub entrypoint: EcoString,
/// A path relative to the package's root that points to a PNG or lossless
/// WebP thumbnail for the template.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thumbnail: Option<EcoString>,
/// All parsed but unknown fields, this can be used for validation.
#[serde(flatten, skip_serializing)]
pub unknown_fields: UnknownFields,
}
/// The `[package]` key in the manifest.
///
/// The `unknown_fields` contains fields which were found but not expected.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PackageInfo {
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: PackageVersion,
/// The path of the entrypoint into the package.
pub entrypoint: EcoString,
/// A list of the package's authors.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub authors: Vec<EcoString>,
/// The package's license.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub license: Option<EcoString>,
/// A short description of the package.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<EcoString>,
/// A link to the package's web presence.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub homepage: Option<EcoString>,
/// A link to the repository where this package is developed.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub repository: Option<EcoString>,
/// An array of search keywords for the package.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub keywords: Vec<EcoString>,
/// An array with up to three of the predefined categories to help users
/// discover the package.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub categories: Vec<EcoString>,
/// An array of disciplines defining the target audience for which the
/// package is useful.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub disciplines: Vec<EcoString>,
/// The minimum required compiler version for the package.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub compiler: Option<VersionBound>,
/// An array of globs specifying files that should not be part of the
/// published bundle.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub exclude: Vec<EcoString>,
/// All parsed but unknown fields, this can be used for validation.
#[serde(flatten, skip_serializing)]
pub unknown_fields: UnknownFields,
}
impl PackageManifest {
/// Create a new package manifest with the given package info.
pub fn new(package: PackageInfo) -> Self {
PackageManifest {
package,
template: None,
tool: ToolInfo::default(),
unknown_fields: UnknownFields::new(),
}
}
/// Ensure that this manifest is indeed for the specified package.
pub fn validate(&self, spec: &PackageSpec) -> Result<(), EcoString> {
if self.package.name != spec.name {
return Err(eco_format!(
"package manifest contains mismatched name `{}`",
self.package.name
));
}
if self.package.version != spec.version {
return Err(eco_format!(
"package manifest contains mismatched version {}",
self.package.version
));
}
if let Some(required) = self.package.compiler {
let current = PackageVersion::compiler();
if !current.matches_ge(&required) {
return Err(eco_format!(
"package requires Typst {required} or newer \
(current version is {current})"
));
}
}
Ok(())
}
}
impl TemplateInfo {
/// Create a new template info with only required fields.
pub fn new(path: impl Into<EcoString>, entrypoint: impl Into<EcoString>) -> Self {
TemplateInfo {
path: path.into(),
entrypoint: entrypoint.into(),
thumbnail: None,
unknown_fields: UnknownFields::new(),
}
}
}
impl PackageInfo {
/// Create a new package info with only required fields.
pub fn new(
name: impl Into<EcoString>,
version: PackageVersion,
entrypoint: impl Into<EcoString>,
) -> Self {
PackageInfo {
name: name.into(),
version,
entrypoint: entrypoint.into(),
authors: vec![],
categories: vec![],
compiler: None,
description: None,
disciplines: vec![],
exclude: vec![],
homepage: None,
keywords: vec![],
license: None,
repository: None,
unknown_fields: BTreeMap::new(),
}
}
}
/// Identifies a package.
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct PackageSpec {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: PackageVersion,
}
impl PackageSpec {
pub fn versionless(&self) -> VersionlessPackageSpec {
VersionlessPackageSpec {
namespace: self.namespace.clone(),
name: self.name.clone(),
}
}
}
impl FromStr for PackageSpec {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut s = unscanny::Scanner::new(s);
let namespace = parse_namespace(&mut s)?.into();
let name = parse_name(&mut s)?.into();
let version = parse_version(&mut s)?;
Ok(Self { namespace, name, version })
}
}
impl Debug for PackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for PackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "@{}/{}:{}", self.namespace, self.name, self.version)
}
}
/// Identifies a package, but not a specific version of it.
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct VersionlessPackageSpec {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
}
impl VersionlessPackageSpec {
/// Fill in the `version` to get a complete [`PackageSpec`].
pub fn at(self, version: PackageVersion) -> PackageSpec {
PackageSpec {
namespace: self.namespace,
name: self.name,
version,
}
}
}
impl FromStr for VersionlessPackageSpec {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut s = unscanny::Scanner::new(s);
let namespace = parse_namespace(&mut s)?.into();
let name = parse_name(&mut s)?.into();
if !s.done() {
Err("unexpected version in versionless package specification")?;
}
Ok(Self { namespace, name })
}
}
impl Debug for VersionlessPackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for VersionlessPackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "@{}/{}", self.namespace, self.name)
}
}
fn parse_namespace<'s>(s: &mut Scanner<'s>) -> Result<&'s str, EcoString> {
if !s.eat_if('@') {
Err("package specification must start with '@'")?;
}
let namespace = s.eat_until('/');
if namespace.is_empty() {
Err("package specification is missing namespace")?;
} else if !is_ident(namespace) {
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
}
Ok(namespace)
}
fn parse_name<'s>(s: &mut Scanner<'s>) -> Result<&'s str, EcoString> {
s.eat_if('/');
let name = s.eat_until(':');
if name.is_empty() {
Err("package specification is missing name")?;
} else if !is_ident(name) {
Err(eco_format!("`{name}` is not a valid package name"))?;
}
Ok(name)
}
fn parse_version(s: &mut Scanner) -> Result<PackageVersion, EcoString> {
s.eat_if(':');
let version = s.after();
if version.is_empty() {
Err("package specification is missing version")?;
}
version.parse()
}
/// A package's version.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct PackageVersion {
/// The package's major version.
pub major: u32,
/// The package's minor version.
pub minor: u32,
/// The package's patch version.
pub patch: u32,
}
impl PackageVersion {
/// The current compiler version.
pub fn compiler() -> Self {
let typst_version = typst_utils::version();
Self {
major: typst_version.major(),
minor: typst_version.minor(),
patch: typst_version.patch(),
}
}
/// Performs an `==` match with the given version bound. Version elements
/// missing in the bound are ignored.
pub fn matches_eq(&self, bound: &VersionBound) -> bool {
self.major == bound.major
&& bound.minor.is_none_or(|minor| self.minor == minor)
&& bound.patch.is_none_or(|patch| self.patch == patch)
}
/// Performs a `>` match with the given version bound. The match only
/// succeeds if some version element in the bound is actually greater than
/// that of the version.
pub fn matches_gt(&self, bound: &VersionBound) -> bool {
if self.major != bound.major {
return self.major > bound.major;
}
let Some(minor) = bound.minor else { return false };
if self.minor != minor {
return self.minor > minor;
}
let Some(patch) = bound.patch else { return false };
if self.patch != patch {
return self.patch > patch;
}
false
}
/// Performs a `<` match with the given version bound. The match only
/// succeeds if some version element in the bound is actually less than that
/// of the version.
pub fn matches_lt(&self, bound: &VersionBound) -> bool {
if self.major != bound.major {
return self.major < bound.major;
}
let Some(minor) = bound.minor else { return false };
if self.minor != minor {
return self.minor < minor;
}
let Some(patch) = bound.patch else { return false };
if self.patch != patch {
return self.patch < patch;
}
false
}
/// Performs a `>=` match with the given versions. The match succeeds when
/// either a `==` or `>` match does.
pub fn matches_ge(&self, bound: &VersionBound) -> bool {
self.matches_eq(bound) || self.matches_gt(bound)
}
/// Performs a `<=` match with the given versions. The match succeeds when
/// either a `==` or `<` match does.
pub fn matches_le(&self, bound: &VersionBound) -> bool {
self.matches_eq(bound) || self.matches_lt(bound)
}
}
impl FromStr for PackageVersion {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.');
let mut next = |kind| {
let part = parts
.next()
.filter(|s| !s.is_empty())
.ok_or_else(|| eco_format!("version number is missing {kind} version"))?;
part.parse::<u32>()
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
};
let major = next("major")?;
let minor = next("minor")?;
let patch = next("patch")?;
if let Some(rest) = parts.next() {
Err(eco_format!("version number has unexpected fourth component: `{rest}`"))?;
}
Ok(Self { major, minor, patch })
}
}
impl Debug for PackageVersion {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for PackageVersion {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl Serialize for PackageVersion {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.collect_str(self)
}
}
impl<'de> Deserialize<'de> for PackageVersion {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let string = EcoString::deserialize(d)?;
string.parse().map_err(serde::de::Error::custom)
}
}
/// A version bound for compatibility specification.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct VersionBound {
/// The bounds's major version.
pub major: u32,
/// The bounds's minor version.
pub minor: Option<u32>,
/// The bounds's patch version. Can only be present if minor is too.
pub patch: Option<u32>,
}
impl FromStr for VersionBound {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.');
let mut next = |kind| {
if let Some(part) = parts.next() {
part.parse::<u32>().map(Some).map_err(|_| {
eco_format!("`{part}` is not a valid {kind} version bound")
})
} else {
Ok(None)
}
};
let major = next("major")?
.ok_or_else(|| eco_format!("version bound is missing major version"))?;
let minor = next("minor")?;
let patch = next("patch")?;
if let Some(rest) = parts.next() {
Err(eco_format!("version bound has unexpected fourth component: `{rest}`"))?;
}
Ok(Self { major, minor, patch })
}
}
impl Debug for VersionBound {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for VersionBound {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.major)?;
if let Some(minor) = self.minor {
write!(f, ".{minor}")?;
}
if let Some(patch) = self.patch {
write!(f, ".{patch}")?;
}
Ok(())
}
}
impl Serialize for VersionBound {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.collect_str(self)
}
}
impl<'de> Deserialize<'de> for VersionBound {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let string = EcoString::deserialize(d)?;
string.parse().map_err(serde::de::Error::custom)
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use super::*;
#[test]
fn version_version_match() {
let v1_1_1 = PackageVersion::from_str("1.1.1").unwrap();
assert!(v1_1_1.matches_eq(&VersionBound::from_str("1").unwrap()));
assert!(v1_1_1.matches_eq(&VersionBound::from_str("1.1").unwrap()));
assert!(!v1_1_1.matches_eq(&VersionBound::from_str("1.2").unwrap()));
assert!(!v1_1_1.matches_gt(&VersionBound::from_str("1").unwrap()));
assert!(v1_1_1.matches_gt(&VersionBound::from_str("1.0").unwrap()));
assert!(!v1_1_1.matches_gt(&VersionBound::from_str("1.1").unwrap()));
assert!(!v1_1_1.matches_lt(&VersionBound::from_str("1").unwrap()));
assert!(!v1_1_1.matches_lt(&VersionBound::from_str("1.1").unwrap()));
assert!(v1_1_1.matches_lt(&VersionBound::from_str("1.2").unwrap()));
}
#[test]
fn minimal_manifest() {
assert_eq!(
toml::from_str::<PackageManifest>(
r#"
[package]
name = "package"
version = "0.1.0"
entrypoint = "src/lib.typ"
"#
),
Ok(PackageManifest {
package: PackageInfo::new(
"package",
PackageVersion { major: 0, minor: 1, patch: 0 },
"src/lib.typ"
),
template: None,
tool: ToolInfo { sections: BTreeMap::new() },
unknown_fields: BTreeMap::new(),
})
);
}
#[test]
fn tool_section() {
// NOTE: tool section must be table of tables, but we can't easily
// compare the error structurally
assert!(
toml::from_str::<PackageManifest>(
r#"
[package]
name = "package"
version = "0.1.0"
entrypoint = "src/lib.typ"
[tool]
not-table = "str"
"#
)
.is_err()
);
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct MyTool {
key: EcoString,
}
let mut manifest: PackageManifest = toml::from_str(
r#"
[package]
name = "package"
version = "0.1.0"
entrypoint = "src/lib.typ"
[tool.my-tool]
key = "value"
"#,
)
.unwrap();
let my_tool = manifest.tool.sections.remove("my-tool").unwrap();
let my_tool = MyTool::deserialize(my_tool).unwrap();
assert_eq!(my_tool, MyTool { key: "value".into() });
}
#[test]
fn unknown_keys() {
let manifest: PackageManifest = toml::from_str(
r#"
[package]
name = "package"
version = "0.1.0"
entrypoint = "src/lib.typ"
[unknown]
"#,
)
.unwrap();
assert!(manifest.unknown_fields.contains_key("unknown"));
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-syntax/src/set.rs | crates/typst-syntax/src/set.rs | // Acknowledgement:
// Based on rust-analyzer's `TokenSet`.
// https://github.com/rust-lang/rust-analyzer/blob/master/crates/parser/src/token_set.rs
use crate::SyntaxKind;
/// A set of syntax kinds.
#[derive(Default, Copy, Clone)]
pub struct SyntaxSet(u128);
impl SyntaxSet {
/// Create a new empty set.
pub const fn new() -> Self {
Self(0)
}
/// Insert a syntax kind into the set.
///
/// You can only add kinds with discriminator < 128.
pub const fn add(self, kind: SyntaxKind) -> Self {
assert!((kind as u8) < BITS);
Self(self.0 | bit(kind))
}
/// Remove a syntax kind from the set. Does nothing if not present.
///
/// You can only remove kinds with discriminator < 128.
pub const fn remove(self, kind: SyntaxKind) -> Self {
assert!((kind as u8) < BITS);
Self(self.0 & !bit(kind))
}
/// Combine two syntax sets.
pub const fn union(self, other: Self) -> Self {
Self(self.0 | other.0)
}
/// Whether the set contains the given syntax kind.
pub const fn contains(&self, kind: SyntaxKind) -> bool {
(kind as u8) < BITS && (self.0 & bit(kind)) != 0
}
}
const BITS: u8 = 128;
const fn bit(kind: SyntaxKind) -> u128 {
1 << (kind as usize)
}
/// Generate a compile-time constant `SyntaxSet` of the given kinds.
macro_rules! syntax_set {
($($kind:ident),* $(,)?) => {{
const SET: crate::set::SyntaxSet = crate::set::SyntaxSet::new()
$(.add(crate::SyntaxKind:: $kind))*;
SET
}}
}
// Export so other modules can import as: `use set::syntax_set`
pub(crate) use syntax_set;
/// Syntax kinds that can start a statement.
pub const STMT: SyntaxSet = syntax_set!(Let, Set, Show, Import, Include, Return);
/// Syntax kinds that can start a math expression.
pub const MATH_EXPR: SyntaxSet = syntax_set!(
Hash,
MathIdent,
FieldAccess,
Dot,
Comma,
Semicolon,
// Parens and braces are converted to `MathText` unless they're parsed as a
// function call.
LeftBrace,
RightBrace,
LeftParen,
RightParen,
MathText,
MathShorthand,
Linebreak,
MathAlignPoint,
MathPrimes,
Escape,
Str,
Root,
// `Bang` is converted to `MathText` when parsing.
Bang,
);
/// Syntax kinds that can start a code expression.
pub const CODE_EXPR: SyntaxSet = CODE_PRIMARY.union(UNARY_OP);
/// Syntax kinds that can start an atomic code expression.
pub const ATOMIC_CODE_EXPR: SyntaxSet = ATOMIC_CODE_PRIMARY;
/// Syntax kinds that can start a code primary.
pub const CODE_PRIMARY: SyntaxSet = ATOMIC_CODE_PRIMARY.add(SyntaxKind::Underscore);
/// Syntax kinds that can start an atomic code primary.
pub const ATOMIC_CODE_PRIMARY: SyntaxSet = syntax_set!(
Ident,
LeftBrace,
LeftBracket,
LeftParen,
Dollar,
Let,
Set,
Show,
Context,
If,
While,
For,
Import,
Include,
Break,
Continue,
Return,
None,
Auto,
Int,
Float,
Bool,
Numeric,
Str,
Label,
Raw,
);
/// Syntax kinds that are unary operators.
pub const UNARY_OP: SyntaxSet = syntax_set!(Plus, Minus, Not);
/// Syntax kinds that are binary operators.
pub const BINARY_OP: SyntaxSet = syntax_set!(
Plus, Minus, Star, Slash, And, Or, EqEq, ExclEq, Lt, LtEq, Gt, GtEq, Eq, In, PlusEq,
HyphEq, StarEq, SlashEq,
);
/// Syntax kinds that can start an argument in a function call.
pub const ARRAY_OR_DICT_ITEM: SyntaxSet = CODE_EXPR.add(SyntaxKind::Dots);
/// Syntax kinds that can start an argument in a function call.
pub const ARG: SyntaxSet = CODE_EXPR.add(SyntaxKind::Dots);
/// Syntax kinds that can start a parameter in a parameter list.
pub const PARAM: SyntaxSet = PATTERN.add(SyntaxKind::Dots);
/// Syntax kinds that can start a destructuring item.
pub const DESTRUCTURING_ITEM: SyntaxSet = PATTERN.add(SyntaxKind::Dots);
/// Syntax kinds that can start a pattern.
pub const PATTERN: SyntaxSet =
PATTERN_LEAF.add(SyntaxKind::LeftParen).add(SyntaxKind::Underscore);
/// Syntax kinds that can start a pattern leaf.
pub const PATTERN_LEAF: SyntaxSet = ATOMIC_CODE_EXPR;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_set() {
let set = SyntaxSet::new().add(SyntaxKind::And).add(SyntaxKind::Or);
assert!(set.contains(SyntaxKind::And));
assert!(set.contains(SyntaxKind::Or));
assert!(!set.contains(SyntaxKind::Not));
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/charsets.rs | crates/typst-html/src/charsets.rs | //! Defines syntactical properties of HTML tags, attributes, and text.
/// Check whether a character is in a tag name.
pub const fn is_valid_in_tag_name(c: char) -> bool {
c.is_ascii_alphanumeric()
}
/// Check whether a character is valid in an attribute name.
pub const fn is_valid_in_attribute_name(c: char) -> bool {
match c {
// These are forbidden.
'\0' | ' ' | '"' | '\'' | '>' | '/' | '=' => false,
c if is_whatwg_control_char(c) => false,
c if is_whatwg_non_char(c) => false,
// _Everything_ else is allowed, including U+2029 paragraph
// separator. Go wild.
_ => true,
}
}
/// Check whether a character can be an used in an attribute value without
/// escaping.
///
/// See <https://html.spec.whatwg.org/multipage/syntax.html#attributes-2>
pub const fn is_valid_in_attribute_value(c: char) -> bool {
match c {
// Ampersands are sometimes legal (i.e. when they are not _ambiguous
// ampersands_) but it is not worth the trouble to check for that.
'&' => false,
// Quotation marks are not allowed in double-quote-delimited attribute
// values.
'"' => false,
// All other text characters are allowed.
c => is_w3c_text_char(c),
}
}
/// Check whether a character can be an used in normal text without
/// escaping.
pub const fn is_valid_in_normal_element_text(c: char) -> bool {
match c {
// Ampersands are sometimes legal (i.e. when they are not _ambiguous
// ampersands_) but it is not worth the trouble to check for that.
'&' => false,
// Less-than signs are not allowed in text.
'<' => false,
// All other text characters are allowed.
c => is_w3c_text_char(c),
}
}
/// Check if something is valid text in HTML.
pub const fn is_w3c_text_char(c: char) -> bool {
match c {
// Non-characters are obviously not text characters.
c if is_whatwg_non_char(c) => false,
// Control characters are disallowed, except for whitespace.
c if is_whatwg_control_char(c) => c.is_ascii_whitespace(),
// Everything else is allowed.
_ => true,
}
}
const fn is_whatwg_non_char(c: char) -> bool {
match c {
'\u{fdd0}'..='\u{fdef}' => true,
// Non-characters matching xxFFFE or xxFFFF up to x10FFFF (inclusive).
c if c as u32 & 0xfffe == 0xfffe && c as u32 <= 0x10ffff => true,
_ => false,
}
}
const fn is_whatwg_control_char(c: char) -> bool {
match c {
// C0 control characters.
'\u{00}'..='\u{1f}' => true,
// Other control characters.
'\u{7f}'..='\u{9f}' => true,
_ => false,
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/css.rs | crates/typst-html/src/css.rs | //! Conversion from Typst data types into CSS data types.
use std::fmt::{self, Display, Write};
use ecow::EcoString;
use typst_library::layout::{Length, Rel};
use typst_library::visualize::{Color, Hsl, LinearRgb, Oklab, Oklch, Rgb};
use typst_utils::Numeric;
/// A list of CSS properties with values.
#[derive(Debug, Default)]
pub struct Properties(EcoString);
impl Properties {
/// Creates an empty list.
pub fn new() -> Self {
Self::default()
}
/// Adds a new property to the list.
pub fn push(&mut self, property: &str, value: impl Display) {
if !self.0.is_empty() {
self.0.push_str("; ");
}
write!(&mut self.0, "{property}: {value}").unwrap();
}
/// Adds a new property in builder-style.
pub fn with(mut self, property: &str, value: impl Display) -> Self {
self.push(property, value);
self
}
/// Turns this into a string suitable for use as an inline `style`
/// attribute.
pub fn into_inline_styles(self) -> Option<EcoString> {
(!self.0.is_empty()).then_some(self.0)
}
}
pub fn rel(rel: Rel) -> impl Display {
typst_utils::display(move |f| match (rel.abs.is_zero(), rel.rel.is_zero()) {
(false, false) => {
write!(f, "calc({} + {})", percent(rel.rel.get() as f32), length(rel.abs))
}
(true, false) => write!(f, "{}", percent(rel.rel.get() as f32)),
(_, true) => write!(f, "{}", length(rel.abs)),
})
}
pub fn length(length: Length) -> impl Display {
typst_utils::display(move |f| match (length.abs.is_zero(), length.em.is_zero()) {
(false, false) => {
write!(f, "calc({}pt + {}em)", length.abs.to_pt(), length.em.get())
}
(true, false) => write!(f, "{}em", length.em.get()),
(_, true) => write!(f, "{}pt", length.abs.to_pt()),
})
}
pub fn color(color: Color) -> impl Display {
typst_utils::display(move |f| match color {
Color::Rgb(_) | Color::Cmyk(_) | Color::Luma(_) => rgb(f, color.to_rgb()),
Color::Oklab(v) => oklab(f, v),
Color::Oklch(v) => oklch(f, v),
Color::LinearRgb(v) => linear_rgb(f, v),
Color::Hsl(_) | Color::Hsv(_) => hsl(f, color.to_hsl()),
})
}
fn oklab(f: &mut fmt::Formatter<'_>, v: Oklab) -> fmt::Result {
write!(f, "oklab({} {} {}{})", percent(v.l), number(v.a), number(v.b), alpha(v.alpha))
}
fn oklch(f: &mut fmt::Formatter<'_>, v: Oklch) -> fmt::Result {
write!(
f,
"oklch({} {} {}deg{})",
percent(v.l),
number(v.chroma),
number(v.hue.into_degrees()),
alpha(v.alpha)
)
}
fn rgb(f: &mut fmt::Formatter<'_>, v: Rgb) -> fmt::Result {
if let Some(v) = rgb_to_8_bit_lossless(v) {
let (r, g, b, a) = v.into_components();
write!(f, "#{r:02x}{g:02x}{b:02x}")?;
if a != u8::MAX {
write!(f, "{a:02x}")?;
}
Ok(())
} else {
write!(
f,
"rgb({} {} {}{})",
percent(v.red),
percent(v.green),
percent(v.blue),
alpha(v.alpha)
)
}
}
/// Converts an f32 RGBA color to its 8-bit representation if the result is
/// [very close](is_very_close) to the original.
fn rgb_to_8_bit_lossless(
v: Rgb,
) -> Option<palette::rgb::Rgba<palette::encoding::Srgb, u8>> {
let l = v.into_format::<u8, u8>();
let h = l.into_format::<f32, f32>();
(is_very_close(v.red, h.red)
&& is_very_close(v.blue, h.blue)
&& is_very_close(v.green, h.green)
&& is_very_close(v.alpha, h.alpha))
.then_some(l)
}
fn linear_rgb(f: &mut fmt::Formatter<'_>, v: LinearRgb) -> fmt::Result {
write!(
f,
"color(srgb-linear {} {} {}{})",
percent(v.red),
percent(v.green),
percent(v.blue),
alpha(v.alpha),
)
}
fn hsl(f: &mut fmt::Formatter<'_>, v: Hsl) -> fmt::Result {
write!(
f,
"hsl({}deg {} {}{})",
number(v.hue.into_degrees()),
percent(v.saturation),
percent(v.lightness),
alpha(v.alpha),
)
}
/// Displays an alpha component if it not 1.
fn alpha(value: f32) -> impl Display {
typst_utils::display(move |f| {
if !is_very_close(value, 1.0) {
write!(f, " / {}", percent(value))?;
}
Ok(())
})
}
/// Displays a rounded percentage.
///
/// For a percentage, two significant digits after the comma gives us a
/// precision of 1/10_000, which is more than 12 bits (see `is_very_close`).
fn percent(ratio: f32) -> impl Display {
typst_utils::display(move |f| {
write!(f, "{}%", typst_utils::round_with_precision(ratio as f64 * 100.0, 2))
})
}
/// Rounds a number for display.
///
/// For a number between 0 and 1, four significant digits give us a
/// precision of 1/10_000, which is more than 12 bits (see `is_very_close`).
fn number(value: f32) -> impl Display {
typst_utils::round_with_precision(value as f64, 4)
}
/// Whether two component values are close enough that there is no
/// difference when encoding them with 12-bit. 12 bit is the highest
/// reasonable color bit depth found in the industry.
fn is_very_close(a: f32, b: f32) -> bool {
const MAX_BIT_DEPTH: u32 = 12;
const EPS: f32 = 0.5 / 2_i32.pow(MAX_BIT_DEPTH) as f32;
(a - b).abs() < EPS
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/link.rs | crates/typst-html/src/link.rs | use std::collections::VecDeque;
use ecow::{EcoString, EcoVec, eco_format, eco_vec};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_library::foundations::{Label, NativeElement};
use typst_library::introspection::{
DocumentPosition, InnerHtmlPosition, Introspector, Location, Tag,
};
use typst_library::layout::{Frame, FrameItem, Point};
use typst_library::model::{Destination, LinkElem};
use typst_utils::PicoStr;
use crate::{HtmlElement, HtmlNode, attr, tag};
/// Searches for links within a frame.
///
/// If all links are created via `LinkElem` in the future, this can be removed
/// in favor of the query in `identify_link_targets`. For the time being, some
/// links are created without existence of a `LinkElem`, so this is
/// unfortunately necessary.
pub fn introspect_frame_links(frame: &Frame, targets: &mut FxHashSet<Location>) {
for (_, item) in frame.items() {
match item {
FrameItem::Link(Destination::Location(loc), _) => {
targets.insert(*loc);
}
FrameItem::Group(group) => introspect_frame_links(&group.frame, targets),
_ => {}
}
}
}
/// Attaches IDs to nodes produced by link targets to make them linkable.
///
/// May produce `<span>`s for link targets that turned into text nodes or no
/// nodes at all. See the [`LinkElem`] documentation for more details.
pub fn identify_link_targets(
root: &mut HtmlElement,
introspector: &mut Introspector,
mut targets: FxHashSet<Location>,
) {
// Query for all links with an intra-doc (i.e. `Location`) destination to
// know what needs IDs.
targets.extend(
introspector
.query(&LinkElem::ELEM.select())
.iter()
.map(|elem| elem.to_packed::<LinkElem>().unwrap())
.filter_map(|elem| match elem.dest.resolve_with_introspector(introspector) {
Ok(Destination::Location(loc)) => Some(loc),
_ => None,
}),
);
if targets.is_empty() {
// Nothing to do.
return;
}
// Assign IDs to all link targets.
let mut work = Work::new();
traverse(
&mut work,
&targets,
&mut Identificator::new(introspector),
&mut root.children,
);
// Add the mapping from locations to IDs to the introspector to make it
// available to links in the next iteration.
introspector.set_html_ids(work.ids);
}
/// Traverses a list of nodes.
fn traverse(
work: &mut Work,
targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>,
nodes: &mut EcoVec<HtmlNode>,
) {
let mut i = 0;
while i < nodes.len() {
let node = &mut nodes.make_mut()[i];
match node {
// When visiting a start tag, we check whether the element needs an
// ID and if so, add it to the queue, so that its first child node
// receives an ID.
HtmlNode::Tag(Tag::Start(elem, _)) => {
let loc = elem.location().unwrap();
if targets.contains(&loc) {
work.enqueue(loc, elem.label());
}
}
// When we reach an end tag, we check whether it closes an element
// that is still in our queue. If so, that means the element
// produced no nodes and we need to insert an empty span.
HtmlNode::Tag(Tag::End(loc, _, _)) => {
work.remove(*loc, |label| {
let mut element = HtmlElement::new(tag::span);
let id = identificator.assign(&mut element, label);
nodes.insert(i + 1, HtmlNode::Element(element));
id
});
}
// When visiting an element and the queue is non-empty, we assign an
// ID. Then, we traverse its children.
HtmlNode::Element(element) => {
work.drain(|label| identificator.assign(element, label));
traverse(work, targets, identificator, &mut element.children);
}
// When visiting text and the queue is non-empty, we generate a span
// and assign an ID.
HtmlNode::Text(..) => {
work.drain(|label| {
let mut element =
HtmlElement::new(tag::span).with_children(eco_vec![node.clone()]);
let id = identificator.assign(&mut element, label);
*node = HtmlNode::Element(element);
id
});
}
// When visiting a frame and the queue is non-empty, we assign an
// ID to it (will be added to the resulting SVG element).
HtmlNode::Frame(frame) => {
work.drain(|label| {
frame.id.get_or_insert_with(|| identificator.identify(label)).clone()
});
traverse_frame(
work,
targets,
identificator,
&frame.inner,
&mut frame.link_points,
);
}
}
i += 1;
}
}
/// Traverses a frame embedded in HTML.
fn traverse_frame(
work: &mut Work,
targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>,
frame: &Frame,
link_points: &mut EcoVec<(Point, EcoString)>,
) {
for (_, item) in frame.items() {
match item {
FrameItem::Tag(Tag::Start(elem, _)) => {
let loc = elem.location().unwrap();
if targets.contains(&loc)
&& let DocumentPosition::Html(position) =
identificator.introspector.position(loc)
&& let Some(InnerHtmlPosition::Frame(point)) = position.details()
{
let id = identificator.identify(elem.label());
work.ids.insert(loc, id.clone());
link_points.push((*point, id));
}
}
FrameItem::Group(group) => {
traverse_frame(work, targets, identificator, &group.frame, link_points);
}
_ => {}
}
}
}
/// Keeps track of the work to be done during ID generation.
struct Work {
/// The locations and labels of elements we need to assign an ID to right
/// now.
queue: VecDeque<(Location, Option<Label>)>,
/// The resulting mapping from element location's to HTML IDs.
ids: FxHashMap<Location, EcoString>,
}
impl Work {
/// Sets up.
fn new() -> Self {
Self { queue: VecDeque::new(), ids: FxHashMap::default() }
}
/// Marks the element with the given location and label as in need of an
/// ID. A subsequent call to `drain` will call `f`.
fn enqueue(&mut self, loc: Location, label: Option<Label>) {
self.queue.push_back((loc, label))
}
/// If one or multiple elements are in need of an ID, calls `f` to generate
/// an ID and apply it to the current node with `f`, and then establishes a
/// mapping from the elements' locations to that ID.
fn drain(&mut self, f: impl FnOnce(Option<Label>) -> EcoString) {
if let Some(&(_, label)) = self.queue.front() {
let id = f(label);
for (loc, _) in self.queue.drain(..) {
self.ids.insert(loc, id.clone());
}
}
}
/// Similar to `drain`, but only for a specific given location.
fn remove(&mut self, loc: Location, f: impl FnOnce(Option<Label>) -> EcoString) {
if let Some(i) = self.queue.iter().position(|&(l, _)| l == loc) {
let (_, label) = self.queue.remove(i).unwrap();
let id = f(label);
self.ids.insert(loc, id.clone());
}
}
}
/// Creates unique IDs for elements.
struct Identificator<'a> {
introspector: &'a Introspector,
loc_counter: usize,
label_counter: FxHashMap<Label, usize>,
}
impl<'a> Identificator<'a> {
/// Creates a new identificator.
fn new(introspector: &'a Introspector) -> Self {
Self {
introspector,
loc_counter: 0,
label_counter: FxHashMap::default(),
}
}
/// Assigns an ID to an element or reuses an existing ID.
fn assign(&mut self, element: &mut HtmlElement, label: Option<Label>) -> EcoString {
element.attrs.get(attr::id).cloned().unwrap_or_else(|| {
let id = self.identify(label);
element.attrs.push_front(attr::id, id.clone());
id
})
}
/// Generates an ID, potentially based on a label.
fn identify(&mut self, label: Option<Label>) -> EcoString {
if let Some(label) = label {
let resolved = label.resolve();
let text = resolved.as_str();
if can_use_label_as_id(text) {
if self.introspector.label_count(label) == 1 {
return text.into();
}
let counter = self.label_counter.entry(label).or_insert(0);
*counter += 1;
return disambiguate(self.introspector, text, counter);
}
}
self.loc_counter += 1;
disambiguate(self.introspector, "loc", &mut self.loc_counter)
}
}
/// Whether the label is both a valid CSS identifier and a valid URL fragment
/// for linking.
///
/// This is slightly more restrictive than HTML and CSS, but easier to
/// understand and explain.
fn can_use_label_as_id(label: &str) -> bool {
!label.is_empty()
&& label.chars().all(|c| c.is_alphanumeric() || matches!(c, '-' | '_'))
&& !label.starts_with(|c: char| c.is_numeric() || c == '-')
}
/// Disambiguates `text` with the suffix `-{counter}`, while ensuring that this
/// does not result in a collision with an existing label.
fn disambiguate(
introspector: &Introspector,
text: &str,
counter: &mut usize,
) -> EcoString {
loop {
let disambiguated = eco_format!("{text}-{counter}");
if PicoStr::get(&disambiguated)
.and_then(Label::new)
.is_some_and(|label| introspector.label_count(label) > 0)
{
*counter += 1;
} else {
break disambiguated;
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/attr.rs | crates/typst-html/src/attr.rs |
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
use crate::HtmlAttr;
pub const abbr: HtmlAttr = HtmlAttr::constant("abbr");
pub const accept: HtmlAttr = HtmlAttr::constant("accept");
pub const accept_charset: HtmlAttr = HtmlAttr::constant("accept-charset");
pub const accesskey: HtmlAttr = HtmlAttr::constant("accesskey");
pub const action: HtmlAttr = HtmlAttr::constant("action");
pub const allow: HtmlAttr = HtmlAttr::constant("allow");
pub const allowfullscreen: HtmlAttr = HtmlAttr::constant("allowfullscreen");
pub const alpha: HtmlAttr = HtmlAttr::constant("alpha");
pub const alt: HtmlAttr = HtmlAttr::constant("alt");
pub const aria_activedescendant: HtmlAttr = HtmlAttr::constant("aria-activedescendant");
pub const aria_atomic: HtmlAttr = HtmlAttr::constant("aria-atomic");
pub const aria_autocomplete: HtmlAttr = HtmlAttr::constant("aria-autocomplete");
pub const aria_busy: HtmlAttr = HtmlAttr::constant("aria-busy");
pub const aria_checked: HtmlAttr = HtmlAttr::constant("aria-checked");
pub const aria_colcount: HtmlAttr = HtmlAttr::constant("aria-colcount");
pub const aria_colindex: HtmlAttr = HtmlAttr::constant("aria-colindex");
pub const aria_colspan: HtmlAttr = HtmlAttr::constant("aria-colspan");
pub const aria_controls: HtmlAttr = HtmlAttr::constant("aria-controls");
pub const aria_current: HtmlAttr = HtmlAttr::constant("aria-current");
pub const aria_describedby: HtmlAttr = HtmlAttr::constant("aria-describedby");
pub const aria_details: HtmlAttr = HtmlAttr::constant("aria-details");
pub const aria_disabled: HtmlAttr = HtmlAttr::constant("aria-disabled");
pub const aria_errormessage: HtmlAttr = HtmlAttr::constant("aria-errormessage");
pub const aria_expanded: HtmlAttr = HtmlAttr::constant("aria-expanded");
pub const aria_flowto: HtmlAttr = HtmlAttr::constant("aria-flowto");
pub const aria_haspopup: HtmlAttr = HtmlAttr::constant("aria-haspopup");
pub const aria_hidden: HtmlAttr = HtmlAttr::constant("aria-hidden");
pub const aria_invalid: HtmlAttr = HtmlAttr::constant("aria-invalid");
pub const aria_keyshortcuts: HtmlAttr = HtmlAttr::constant("aria-keyshortcuts");
pub const aria_label: HtmlAttr = HtmlAttr::constant("aria-label");
pub const aria_labelledby: HtmlAttr = HtmlAttr::constant("aria-labelledby");
pub const aria_level: HtmlAttr = HtmlAttr::constant("aria-level");
pub const aria_live: HtmlAttr = HtmlAttr::constant("aria-live");
pub const aria_modal: HtmlAttr = HtmlAttr::constant("aria-modal");
pub const aria_multiline: HtmlAttr = HtmlAttr::constant("aria-multiline");
pub const aria_multiselectable: HtmlAttr = HtmlAttr::constant("aria-multiselectable");
pub const aria_orientation: HtmlAttr = HtmlAttr::constant("aria-orientation");
pub const aria_owns: HtmlAttr = HtmlAttr::constant("aria-owns");
pub const aria_placeholder: HtmlAttr = HtmlAttr::constant("aria-placeholder");
pub const aria_posinset: HtmlAttr = HtmlAttr::constant("aria-posinset");
pub const aria_pressed: HtmlAttr = HtmlAttr::constant("aria-pressed");
pub const aria_readonly: HtmlAttr = HtmlAttr::constant("aria-readonly");
pub const aria_relevant: HtmlAttr = HtmlAttr::constant("aria-relevant");
pub const aria_required: HtmlAttr = HtmlAttr::constant("aria-required");
pub const aria_roledescription: HtmlAttr = HtmlAttr::constant("aria-roledescription");
pub const aria_rowcount: HtmlAttr = HtmlAttr::constant("aria-rowcount");
pub const aria_rowindex: HtmlAttr = HtmlAttr::constant("aria-rowindex");
pub const aria_rowspan: HtmlAttr = HtmlAttr::constant("aria-rowspan");
pub const aria_selected: HtmlAttr = HtmlAttr::constant("aria-selected");
pub const aria_setsize: HtmlAttr = HtmlAttr::constant("aria-setsize");
pub const aria_sort: HtmlAttr = HtmlAttr::constant("aria-sort");
pub const aria_valuemax: HtmlAttr = HtmlAttr::constant("aria-valuemax");
pub const aria_valuemin: HtmlAttr = HtmlAttr::constant("aria-valuemin");
pub const aria_valuenow: HtmlAttr = HtmlAttr::constant("aria-valuenow");
pub const aria_valuetext: HtmlAttr = HtmlAttr::constant("aria-valuetext");
pub const r#as: HtmlAttr = HtmlAttr::constant("as");
pub const r#async: HtmlAttr = HtmlAttr::constant("async");
pub const autocapitalize: HtmlAttr = HtmlAttr::constant("autocapitalize");
pub const autocomplete: HtmlAttr = HtmlAttr::constant("autocomplete");
pub const autocorrect: HtmlAttr = HtmlAttr::constant("autocorrect");
pub const autofocus: HtmlAttr = HtmlAttr::constant("autofocus");
pub const autoplay: HtmlAttr = HtmlAttr::constant("autoplay");
pub const blocking: HtmlAttr = HtmlAttr::constant("blocking");
pub const charset: HtmlAttr = HtmlAttr::constant("charset");
pub const checked: HtmlAttr = HtmlAttr::constant("checked");
pub const cite: HtmlAttr = HtmlAttr::constant("cite");
pub const class: HtmlAttr = HtmlAttr::constant("class");
pub const closedby: HtmlAttr = HtmlAttr::constant("closedby");
pub const color: HtmlAttr = HtmlAttr::constant("color");
pub const colorspace: HtmlAttr = HtmlAttr::constant("colorspace");
pub const cols: HtmlAttr = HtmlAttr::constant("cols");
pub const colspan: HtmlAttr = HtmlAttr::constant("colspan");
pub const command: HtmlAttr = HtmlAttr::constant("command");
pub const commandfor: HtmlAttr = HtmlAttr::constant("commandfor");
pub const content: HtmlAttr = HtmlAttr::constant("content");
pub const contenteditable: HtmlAttr = HtmlAttr::constant("contenteditable");
pub const controls: HtmlAttr = HtmlAttr::constant("controls");
pub const coords: HtmlAttr = HtmlAttr::constant("coords");
pub const crossorigin: HtmlAttr = HtmlAttr::constant("crossorigin");
pub const data: HtmlAttr = HtmlAttr::constant("data");
pub const datetime: HtmlAttr = HtmlAttr::constant("datetime");
pub const decoding: HtmlAttr = HtmlAttr::constant("decoding");
pub const default: HtmlAttr = HtmlAttr::constant("default");
pub const defer: HtmlAttr = HtmlAttr::constant("defer");
pub const dir: HtmlAttr = HtmlAttr::constant("dir");
pub const dirname: HtmlAttr = HtmlAttr::constant("dirname");
pub const disabled: HtmlAttr = HtmlAttr::constant("disabled");
pub const download: HtmlAttr = HtmlAttr::constant("download");
pub const draggable: HtmlAttr = HtmlAttr::constant("draggable");
pub const enctype: HtmlAttr = HtmlAttr::constant("enctype");
pub const enterkeyhint: HtmlAttr = HtmlAttr::constant("enterkeyhint");
pub const fetchpriority: HtmlAttr = HtmlAttr::constant("fetchpriority");
pub const r#for: HtmlAttr = HtmlAttr::constant("for");
pub const form: HtmlAttr = HtmlAttr::constant("form");
pub const formaction: HtmlAttr = HtmlAttr::constant("formaction");
pub const formenctype: HtmlAttr = HtmlAttr::constant("formenctype");
pub const formmethod: HtmlAttr = HtmlAttr::constant("formmethod");
pub const formnovalidate: HtmlAttr = HtmlAttr::constant("formnovalidate");
pub const formtarget: HtmlAttr = HtmlAttr::constant("formtarget");
pub const headers: HtmlAttr = HtmlAttr::constant("headers");
pub const height: HtmlAttr = HtmlAttr::constant("height");
pub const hidden: HtmlAttr = HtmlAttr::constant("hidden");
pub const high: HtmlAttr = HtmlAttr::constant("high");
pub const href: HtmlAttr = HtmlAttr::constant("href");
pub const hreflang: HtmlAttr = HtmlAttr::constant("hreflang");
pub const http_equiv: HtmlAttr = HtmlAttr::constant("http-equiv");
pub const id: HtmlAttr = HtmlAttr::constant("id");
pub const imagesizes: HtmlAttr = HtmlAttr::constant("imagesizes");
pub const imagesrcset: HtmlAttr = HtmlAttr::constant("imagesrcset");
pub const inert: HtmlAttr = HtmlAttr::constant("inert");
pub const inputmode: HtmlAttr = HtmlAttr::constant("inputmode");
pub const integrity: HtmlAttr = HtmlAttr::constant("integrity");
pub const is: HtmlAttr = HtmlAttr::constant("is");
pub const ismap: HtmlAttr = HtmlAttr::constant("ismap");
pub const itemid: HtmlAttr = HtmlAttr::constant("itemid");
pub const itemprop: HtmlAttr = HtmlAttr::constant("itemprop");
pub const itemref: HtmlAttr = HtmlAttr::constant("itemref");
pub const itemscope: HtmlAttr = HtmlAttr::constant("itemscope");
pub const itemtype: HtmlAttr = HtmlAttr::constant("itemtype");
pub const kind: HtmlAttr = HtmlAttr::constant("kind");
pub const label: HtmlAttr = HtmlAttr::constant("label");
pub const lang: HtmlAttr = HtmlAttr::constant("lang");
pub const list: HtmlAttr = HtmlAttr::constant("list");
pub const loading: HtmlAttr = HtmlAttr::constant("loading");
pub const r#loop: HtmlAttr = HtmlAttr::constant("loop");
pub const low: HtmlAttr = HtmlAttr::constant("low");
pub const max: HtmlAttr = HtmlAttr::constant("max");
pub const maxlength: HtmlAttr = HtmlAttr::constant("maxlength");
pub const media: HtmlAttr = HtmlAttr::constant("media");
pub const method: HtmlAttr = HtmlAttr::constant("method");
pub const min: HtmlAttr = HtmlAttr::constant("min");
pub const minlength: HtmlAttr = HtmlAttr::constant("minlength");
pub const multiple: HtmlAttr = HtmlAttr::constant("multiple");
pub const muted: HtmlAttr = HtmlAttr::constant("muted");
pub const name: HtmlAttr = HtmlAttr::constant("name");
pub const nomodule: HtmlAttr = HtmlAttr::constant("nomodule");
pub const nonce: HtmlAttr = HtmlAttr::constant("nonce");
pub const novalidate: HtmlAttr = HtmlAttr::constant("novalidate");
pub const open: HtmlAttr = HtmlAttr::constant("open");
pub const optimum: HtmlAttr = HtmlAttr::constant("optimum");
pub const pattern: HtmlAttr = HtmlAttr::constant("pattern");
pub const ping: HtmlAttr = HtmlAttr::constant("ping");
pub const placeholder: HtmlAttr = HtmlAttr::constant("placeholder");
pub const playsinline: HtmlAttr = HtmlAttr::constant("playsinline");
pub const popover: HtmlAttr = HtmlAttr::constant("popover");
pub const popovertarget: HtmlAttr = HtmlAttr::constant("popovertarget");
pub const popovertargetaction: HtmlAttr = HtmlAttr::constant("popovertargetaction");
pub const poster: HtmlAttr = HtmlAttr::constant("poster");
pub const preload: HtmlAttr = HtmlAttr::constant("preload");
pub const readonly: HtmlAttr = HtmlAttr::constant("readonly");
pub const referrerpolicy: HtmlAttr = HtmlAttr::constant("referrerpolicy");
pub const rel: HtmlAttr = HtmlAttr::constant("rel");
pub const required: HtmlAttr = HtmlAttr::constant("required");
pub const reversed: HtmlAttr = HtmlAttr::constant("reversed");
pub const role: HtmlAttr = HtmlAttr::constant("role");
pub const rows: HtmlAttr = HtmlAttr::constant("rows");
pub const rowspan: HtmlAttr = HtmlAttr::constant("rowspan");
pub const sandbox: HtmlAttr = HtmlAttr::constant("sandbox");
pub const scope: HtmlAttr = HtmlAttr::constant("scope");
pub const selected: HtmlAttr = HtmlAttr::constant("selected");
pub const shadowrootclonable: HtmlAttr = HtmlAttr::constant("shadowrootclonable");
pub const shadowrootcustomelementregistry: HtmlAttr = HtmlAttr::constant("shadowrootcustomelementregistry");
pub const shadowrootdelegatesfocus: HtmlAttr = HtmlAttr::constant("shadowrootdelegatesfocus");
pub const shadowrootmode: HtmlAttr = HtmlAttr::constant("shadowrootmode");
pub const shadowrootserializable: HtmlAttr = HtmlAttr::constant("shadowrootserializable");
pub const shape: HtmlAttr = HtmlAttr::constant("shape");
pub const size: HtmlAttr = HtmlAttr::constant("size");
pub const sizes: HtmlAttr = HtmlAttr::constant("sizes");
pub const slot: HtmlAttr = HtmlAttr::constant("slot");
pub const span: HtmlAttr = HtmlAttr::constant("span");
pub const spellcheck: HtmlAttr = HtmlAttr::constant("spellcheck");
pub const src: HtmlAttr = HtmlAttr::constant("src");
pub const srcdoc: HtmlAttr = HtmlAttr::constant("srcdoc");
pub const srclang: HtmlAttr = HtmlAttr::constant("srclang");
pub const srcset: HtmlAttr = HtmlAttr::constant("srcset");
pub const start: HtmlAttr = HtmlAttr::constant("start");
pub const step: HtmlAttr = HtmlAttr::constant("step");
pub const style: HtmlAttr = HtmlAttr::constant("style");
pub const tabindex: HtmlAttr = HtmlAttr::constant("tabindex");
pub const target: HtmlAttr = HtmlAttr::constant("target");
pub const title: HtmlAttr = HtmlAttr::constant("title");
pub const translate: HtmlAttr = HtmlAttr::constant("translate");
pub const r#type: HtmlAttr = HtmlAttr::constant("type");
pub const usemap: HtmlAttr = HtmlAttr::constant("usemap");
pub const value: HtmlAttr = HtmlAttr::constant("value");
pub const width: HtmlAttr = HtmlAttr::constant("width");
pub const wrap: HtmlAttr = HtmlAttr::constant("wrap");
pub const writingsuggestions: HtmlAttr = HtmlAttr::constant("writingsuggestions");
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/lib.rs | crates/typst-html/src/lib.rs | //! Typst's HTML exporter.
mod attr;
mod charsets;
mod convert;
mod css;
mod document;
mod dom;
mod encode;
mod fragment;
mod link;
mod rules;
mod tag;
mod typed;
pub use self::document::html_document;
pub use self::dom::*;
pub use self::encode::html;
pub use self::rules::{html_span_filled, register};
use ecow::EcoString;
use typst_library::Category;
use typst_library::foundations::{Content, Module, Scope};
use typst_library::introspection::Location;
use typst_macros::elem;
/// Creates the module with all HTML definitions.
pub fn module() -> Module {
let mut html = Scope::deduplicating();
html.start_category(Category::Html);
html.define_elem::<HtmlElem>();
html.define_elem::<FrameElem>();
crate::typed::define(&mut html);
Module::new("html", html)
}
/// An HTML element that can contain Typst content.
///
/// Typst's HTML export automatically generates the appropriate tags for most
/// elements. However, sometimes, it is desirable to retain more control. For
/// example, when using Typst to generate your blog, you could use this function
/// to wrap each article in an `<article>` tag.
///
/// Typst is aware of what is valid HTML. A tag and its attributes must form
/// syntactically valid HTML. Some tags, like `meta` do not accept content.
/// Hence, you must not provide a body for them. We may add more checks in the
/// future, so be sure that you are generating valid HTML when using this
/// function.
///
/// Normally, Typst will generate `html`, `head`, and `body` tags for you. If
/// you instead create them with this function, Typst will omit its own tags.
///
/// ```typ
/// #html.elem("div", attrs: (style: "background: aqua"))[
/// A div with _Typst content_ inside!
/// ]
/// ```
#[elem(name = "elem")]
pub struct HtmlElem {
/// The element's tag.
#[required]
pub tag: HtmlTag,
/// The element's HTML attributes.
pub attrs: HtmlAttrs,
/// The contents of the HTML element.
///
/// The body can be arbitrary Typst content.
#[positional]
pub body: Option<Content>,
/// The element's logical parent, if any.
#[internal]
#[synthesized]
pub parent: Location,
/// A role that should be applied to the top-level styled HTML element, but
/// not its descendants. If we ever get set rules that apply to a specific
/// element instead of a subtree, they could supplant this. If we need the
/// same mechanism for things like `class`, this could potentially also be
/// extended to arbitrary attributes. It's minimal for now.
///
/// This is ignored for `<p>` elements as it otherwise tends to
/// unintentionally attach to paragraphs resulting from grouping of a single
/// element instead of attaching to that element. This is a bit of a hack,
/// but good enough for now as the `role` property is purely internal and
/// we control what it is used for.
#[internal]
#[ghost]
pub role: Option<EcoString>,
}
impl HtmlElem {
/// Add an attribute to the element.
pub fn with_attr(mut self, attr: HtmlAttr, value: impl Into<EcoString>) -> Self {
self.attrs
.as_option_mut()
.get_or_insert_with(Default::default)
.push(attr, value);
self
}
/// Adds the attribute to the element if value is not `None`.
pub fn with_optional_attr(
self,
attr: HtmlAttr,
value: Option<impl Into<EcoString>>,
) -> Self {
if let Some(value) = value { self.with_attr(attr, value) } else { self }
}
/// Adds CSS styles to an element.
fn with_styles(self, properties: css::Properties) -> Self {
if let Some(value) = properties.into_inline_styles() {
self.with_attr(attr::style, value)
} else {
self
}
}
/// Checks whether the given element is an inline-level HTML element.
fn is_inline(elem: &Content) -> bool {
elem.to_packed::<HtmlElem>()
.is_some_and(|elem| tag::is_inline_by_default(elem.tag))
}
}
/// An element that lays out its content as an inline SVG.
///
/// Sometimes, converting Typst content to HTML is not desirable. This can be
/// the case for plots and other content that relies on positioning and styling
/// to convey its message.
///
/// This function allows you to use the Typst layout engine that would also be
/// used for PDF, SVG, and PNG export to render a part of your document exactly
/// how it would appear when exported in one of these formats. It embeds the
/// content as an inline SVG.
#[elem]
pub struct FrameElem {
/// The content that shall be laid out.
#[positional]
#[required]
pub body: Content,
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/convert.rs | crates/typst-html/src/convert.rs | use ecow::{EcoString, EcoVec, eco_vec};
use typst_library::diag::{SourceResult, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{Content, Packed, StyleChain, Target, TargetElem};
use typst_library::introspection::{SplitLocator, TagElem};
use typst_library::layout::{Abs, Axes, HElem, Region, Size};
use typst_library::routines::Pair;
use typst_library::text::{
LinebreakElem, SmartQuoteElem, SmartQuoter, SmartQuotes, SpaceElem, TextElem,
is_default_ignorable,
};
use typst_syntax::Span;
use crate::fragment::{html_block_fragment, html_inline_fragment};
use crate::{FrameElem, HtmlElem, HtmlElement, HtmlFrame, HtmlNode, attr, css, tag};
/// What and how to convert.
pub enum ConversionLevel<'a> {
/// Converts the top-level nodes or children of a block-level element. The
/// conversion has its own local smart quoting state and space protection.
Block,
/// Converts the children of an inline-level HTML element as part of a
/// larger context with shared smart quoting state and shared space
/// protection.
Inline(&'a mut SmartQuoter),
}
/// How to emit whitespace.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Whitespace {
/// Ensures that whitespace that would otherwise be collapsed by HTML
/// rendering engines[^1] is protected by spans with `white-space:
/// pre-wrap`. The affected by whitespace are ASCII spaces and ASCII tabs.
///
/// Tries to emit spans only when necessary.
/// - ASCII tabs and consecutive sequences of spaces and/or tabs are always
/// wrapped in spans in this mode. This happens directly during
/// conversion.
/// - Single ASCII spaces are only wrapped if they aren't supported by
/// normal elements on both sides. This happens in a separate pass that
/// runs for the whole block-level context as doing this properly needs
/// lookahead and lookbehind across different levels of the element
/// hierarchy.
///
/// [^1]: https://www.w3.org/TR/css-text-3/#white-space-rules
Normal,
/// The whitespace is emitted as-is. This happens in
/// - `<pre>` elements as they already have `white-space: pre`,
/// - raw and escapable raw text elements as normal white space rules do not
/// apply to them.
Pre,
}
/// Converts realized content into HTML nodes.
pub fn convert_to_nodes<'a>(
engine: &mut Engine,
locator: &mut SplitLocator,
children: impl IntoIterator<Item = Pair<'a>>,
level: ConversionLevel,
whitespace: Whitespace,
) -> SourceResult<EcoVec<HtmlNode>> {
let block = matches!(level, ConversionLevel::Block);
let mut converter = Converter {
engine,
locator,
quoter: match level {
ConversionLevel::Inline(quoter) => quoter,
ConversionLevel::Block => &mut SmartQuoter::new(),
},
whitespace,
output: EcoVec::new(),
trailing: None,
};
for (child, styles) in children {
handle(&mut converter, child, styles)?;
}
let mut nodes = converter.finish();
if block && whitespace == Whitespace::Normal {
protect_spaces(&mut nodes);
}
Ok(nodes)
}
/// Converts one element into HTML node(s).
fn handle(
converter: &mut Converter,
child: &Content,
styles: StyleChain,
) -> SourceResult<()> {
if let Some(elem) = child.to_packed::<TagElem>() {
converter.push(elem.tag.clone());
} else if let Some(elem) = child.to_packed::<HtmlElem>() {
handle_html_elem(converter, elem, styles)?;
} else if child.is::<SpaceElem>() {
converter.push(HtmlNode::text(' ', child.span()));
} else if let Some(elem) = child.to_packed::<TextElem>() {
let text = if let Some(case) = styles.get(TextElem::case) {
case.apply(&elem.text).into()
} else {
elem.text.clone()
};
handle_text(converter, text, elem.span());
} else if let Some(elem) = child.to_packed::<HElem>()
&& elem.amount.is_zero()
{
// Nothing to do for zero-sized spacing. This is sometimes used to
// destruct spaces, e.g. in footnotes. See [`HElem::hole`].
} else if let Some(elem) = child.to_packed::<LinebreakElem>() {
converter.push(HtmlElement::new(tag::br).spanned(elem.span()));
} else if let Some(elem) = child.to_packed::<SmartQuoteElem>() {
let double = elem.double.get(styles);
let quote = if elem.enabled.get(styles) {
let before = last_char(&converter.output);
let quotes = SmartQuotes::get(
elem.quotes.get_ref(styles),
styles.get(TextElem::lang),
styles.get(TextElem::region),
elem.alternative.get(styles),
);
converter.quoter.quote(before, "es, double)
} else {
SmartQuotes::fallback(double)
};
handle_text(converter, quote.into(), child.span());
} else if let Some(elem) = child.to_packed::<FrameElem>() {
let locator = converter.locator.next(&elem.span());
let style = TargetElem::target.set(Target::Paged).wrap();
let frame = (converter.engine.routines.layout_frame)(
converter.engine,
&elem.body,
locator,
styles.chain(&style),
Region::new(Size::splat(Abs::inf()), Axes::splat(false)),
)?;
converter.push(HtmlFrame::new(frame, styles, elem.span()));
} else {
converter.engine.sink.warn(warning!(
child.span(),
"{} was ignored during HTML export",
child.elem().name(),
));
}
Ok(())
}
/// Handles an HTML element.
fn handle_html_elem(
converter: &mut Converter,
elem: &Packed<HtmlElem>,
styles: StyleChain,
) -> SourceResult<()> {
// See the docs of `HtmlElem::role` for why we filter out roles for `<p>`
// elements.
let role = styles.get_cloned(HtmlElem::role).filter(|_| elem.tag != tag::p);
let mut children = EcoVec::new();
if let Some(body) = elem.body.get_ref(styles) {
let whitespace = if converter.whitespace == Whitespace::Pre
|| elem.tag == tag::pre
|| tag::is_raw(elem.tag)
|| tag::is_escapable_raw(elem.tag)
{
Whitespace::Pre
} else {
Whitespace::Normal
};
// The `role` attribute should only apply to the first element in the
// hierarchy. Thus, we unset it for children if it is currently set.
let unset;
let styles = if role.is_some() {
unset = HtmlElem::role.set(None).wrap();
styles.chain(&unset)
} else {
styles
};
if tag::is_block_by_default(elem.tag) {
children = html_block_fragment(
converter.engine,
body,
converter.locator.next(&elem.span()),
styles,
whitespace,
)?;
// Block-level elements reset the inline state. This part is
// unfortunately untested as it's currently not possible to
// create inline-level content next to block-level content
// without a paragraph automatically appearing.
*converter.quoter = SmartQuoter::new();
} else {
children = html_inline_fragment(
converter.engine,
body,
converter.locator,
converter.quoter,
styles,
whitespace,
)?;
}
}
let mut attrs = elem.attrs.get_cloned(styles);
if let Some(role) = role {
attrs.push(attr::role, role);
}
converter.push(HtmlElement {
tag: elem.tag,
attrs,
children,
parent: elem.parent,
span: elem.span(),
pre_span: false,
});
Ok(())
}
/// Handles arbitrary text while taking care that no whitespace within will be
/// collapsed by browsers.
fn handle_text(converter: &mut Converter, text: EcoString, span: Span) {
/// Special kinds of characters.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum Kind {
/// ASCII space.
Space,
/// ASCII tab.
Tab,
/// CR, LF, or CR + LF.
Newline,
/// A Unicode default-ignorable. Does not protect spaces from
/// collapsing.
Ignorable,
}
impl Kind {
fn of(c: char) -> Option<Kind> {
match c {
' ' => Some(Kind::Space),
'\t' => Some(Kind::Tab),
'\r' | '\n' => Some(Kind::Newline),
c if is_default_ignorable(c) => Some(Kind::Ignorable),
_ => None,
}
}
}
if converter.whitespace == Whitespace::Pre {
converter.push(HtmlNode::Text(text, span));
return;
}
let mut emitted = 0;
let mut prev_kind = None;
for (i, c) in text.char_indices() {
let kind = Kind::of(c);
let prev_kind = prev_kind.replace(kind);
let Some(kind) = kind else { continue };
// A space that is surrounded by normal (i.e. not special) characters is
// already protected and doesn't need further treatment.
if kind == Kind::Space
&& let Some(None) = prev_kind
&& let Some(after) = text[i + 1..].chars().next()
&& Kind::of(after).is_none()
{
continue;
}
// Emit the unspecial text up to the special character.
if emitted < i {
converter.push_text(&text[emitted..i], span);
emitted = i;
}
// Process the special character.
match kind {
Kind::Space => converter.push_text(' ', span),
Kind::Tab => converter.push_text('\t', span),
Kind::Newline => {
if c == '\r' && text[i + 1..].starts_with('\n') {
// Skip the CR because the LF will already turn into
// a `<br>`.
emitted += 1;
continue;
}
converter.push(HtmlElement::new(tag::br).spanned(span));
}
Kind::Ignorable => converter.push_text(c, span),
}
emitted += c.len_utf8();
}
// Push the remaining unspecial text.
if emitted < text.len() {
converter.push_text(
// Try to reuse the `EcoString` if possible.
if emitted == 0 { text } else { text[emitted..].into() },
span,
);
}
}
/// State during conversion.
struct Converter<'a, 'y, 'z> {
engine: &'a mut Engine<'y>,
locator: &'a mut SplitLocator<'z>,
quoter: &'a mut SmartQuoter,
whitespace: Whitespace,
output: EcoVec<HtmlNode>,
trailing: Option<TrailingWhitespace>,
}
/// Keeps track of a trailing whitespace in the output.
struct TrailingWhitespace {
/// If `true`, the trailing whitespace consists of exactly one ASCII space.
single: bool,
/// The trailing whitespace starts at `output[from..]`.
from: usize,
}
impl Converter<'_, '_, '_> {
/// Returns the converted nodes.
fn finish(mut self) -> EcoVec<HtmlNode> {
self.flush_whitespace();
self.output
}
/// Pushes a node, taking care to protect consecutive whitespace.
fn push(&mut self, node: impl Into<HtmlNode>) {
let node = node.into();
if let HtmlNode::Text(text, _) = &node
&& (text == " " || text == "\t")
{
if let Some(ws) = &mut self.trailing {
ws.single = false;
} else {
self.trailing = Some(TrailingWhitespace {
single: text == " ",
from: self.output.len(),
});
}
} else if !matches!(node, HtmlNode::Tag(_)) {
self.flush_whitespace();
}
self.output.push(node);
}
/// Shorthand for pushing a text node.
fn push_text(&mut self, text: impl Into<EcoString>, span: Span) {
self.push(HtmlNode::text(text.into(), span));
}
/// If there is trailing whitespace in need of protection, protects it.
///
/// Does not protect single ASCII spaces. Those are handled in a separate
/// pass as they are more complex and require lookahead. See the
/// documentation of [`Whitespace`] for more information.
fn flush_whitespace(&mut self) {
if self.whitespace == Whitespace::Normal
&& let Some(TrailingWhitespace { single: false, from }) = self.trailing.take()
{
let nodes: EcoVec<_> = self.output[from..].iter().cloned().collect();
self.output.truncate(from);
self.output.push(HtmlNode::Element(pre_wrap(nodes)));
}
}
}
/// Protects all spaces in the given block-level `nodes` against collapsing.
///
/// Does not recurse into block-level elements as those are separate contexts
/// with their own space protection.
fn protect_spaces(nodes: &mut EcoVec<HtmlNode>) {
let mut p = Protector::new();
p.visit_nodes(nodes);
p.collapsing();
}
/// A state machine for whitespace protection.
enum Protector<'a> {
Collapsing,
Supportive,
Space(&'a mut HtmlNode),
}
impl<'a> Protector<'a> {
/// Creates a new protector.
fn new() -> Self {
Self::Collapsing
}
/// Visits the given nodes and protects single spaces that need to be saved
/// from collapsing.
fn visit_nodes(&mut self, nodes: &'a mut EcoVec<HtmlNode>) {
for node in nodes.make_mut().iter_mut() {
match node {
HtmlNode::Tag(_) => {}
HtmlNode::Text(text, _) => {
if text == " " {
match self {
Self::Collapsing => {
protect_space(node);
*self = Self::Supportive;
}
Self::Supportive => {
*self = Self::Space(node);
}
Self::Space(prev) => {
protect_space(prev);
*self = Self::Space(node);
}
}
} else if text.chars().any(|c| !is_default_ignorable(c)) {
self.supportive();
}
}
HtmlNode::Element(element) => {
if tag::is_block_by_default(element.tag) || element.tag == tag::br {
self.collapsing();
} else if !element.pre_span {
// Recursively visit the children of inline-level
// elements while making sure to not revisit pre-wrapped
// spans that we've generated ourselves.
self.visit_nodes(&mut element.children);
}
}
HtmlNode::Frame(_) => self.supportive(),
}
}
}
/// Called when visiting an element that would collapse adjacent single
/// spaces. A preceding, if any, and succeeding, if any, single space will
/// then be protected .
fn collapsing(&mut self) {
if let Self::Space(node) = std::mem::replace(self, Self::Collapsing) {
protect_space(node);
}
}
/// Called when visiting an element that supports adjacent single spaces.
fn supportive(&mut self) {
*self = Self::Supportive;
}
}
/// Protects a single spaces against collapsing.
fn protect_space(node: &mut HtmlNode) {
*node = pre_wrap(eco_vec![node.clone()]).into();
}
/// Wraps a collection of whitespace nodes in a
/// `<span style="white-space: pre-wrap">..</span>` to avoid them being
/// collapsed by HTML rendering engines.
fn pre_wrap(nodes: EcoVec<HtmlNode>) -> HtmlElement {
let span = Span::find(nodes.iter().map(|c| c.span()));
let mut elem = HtmlElement::new(tag::span)
.with_styles(css::Properties::new().with("white-space", "pre-wrap"))
.with_children(nodes)
.spanned(span);
elem.pre_span = true;
elem
}
/// Returns the last non-default ignorable character from the passed nodes.
fn last_char(nodes: &[HtmlNode]) -> Option<char> {
for node in nodes.iter().rev() {
if let Some(c) = match node {
HtmlNode::Text(s, _) => s.chars().rev().find(|&c| !is_default_ignorable(c)),
HtmlNode::Element(e) => last_char(&e.children),
_ => None,
} {
return Some(c);
}
}
None
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/encode.rs | crates/typst-html/src/encode.rs | use std::fmt::Write;
use ecow::{EcoString, eco_format};
use typst_library::diag::{At, SourceResult, StrResult, bail};
use typst_library::foundations::Repr;
use typst_library::introspection::Introspector;
use typst_syntax::Span;
use crate::{
HtmlDocument, HtmlElement, HtmlFrame, HtmlNode, HtmlTag, attr, charsets, tag,
};
/// Encodes an HTML document into a string.
pub fn html(document: &HtmlDocument) -> SourceResult<String> {
let mut w = Writer::new(&document.introspector, true);
w.buf.push_str("<!DOCTYPE html>");
write_indent(&mut w);
write_element(&mut w, &document.root)?;
if w.pretty {
w.buf.push('\n');
}
Ok(w.buf)
}
/// Encodes HTML.
struct Writer<'a> {
/// The output buffer.
buf: String,
/// The current indentation level
level: usize,
/// The document's introspector.
introspector: &'a Introspector,
/// Whether pretty printing is enabled.
pretty: bool,
}
impl<'a> Writer<'a> {
/// Creates a new writer.
fn new(introspector: &'a Introspector, pretty: bool) -> Self {
Self { buf: String::new(), level: 0, introspector, pretty }
}
}
/// Writes a newline and indent, if pretty printing is enabled.
fn write_indent(w: &mut Writer) {
if w.pretty {
w.buf.push('\n');
for _ in 0..w.level {
w.buf.push_str(" ");
}
}
}
/// Encodes an HTML node into the writer.
fn write_node(w: &mut Writer, node: &HtmlNode, escape_text: bool) -> SourceResult<()> {
match node {
HtmlNode::Tag(_) => {}
HtmlNode::Text(text, span) => write_text(w, text, *span, escape_text)?,
HtmlNode::Element(element) => write_element(w, element)?,
HtmlNode::Frame(frame) => write_frame(w, frame),
}
Ok(())
}
/// Encodes plain text into the writer.
fn write_text(w: &mut Writer, text: &str, span: Span, escape: bool) -> SourceResult<()> {
for c in text.chars() {
if escape || !charsets::is_valid_in_normal_element_text(c) {
write_escape(w, c).at(span)?;
} else {
w.buf.push(c);
}
}
Ok(())
}
/// Encodes one element into the writer.
fn write_element(w: &mut Writer, element: &HtmlElement) -> SourceResult<()> {
w.buf.push('<');
w.buf.push_str(&element.tag.resolve());
for (attr, value) in &element.attrs.0 {
w.buf.push(' ');
w.buf.push_str(&attr.resolve());
// If the string is empty, we can use shorthand syntax.
// `<elem attr="">..</div` is equivalent to `<elem attr>..</div>`
if !value.is_empty() {
w.buf.push('=');
w.buf.push('"');
for c in value.chars() {
if charsets::is_valid_in_attribute_value(c) {
w.buf.push(c);
} else {
write_escape(w, c).at(element.span)?;
}
}
w.buf.push('"');
}
}
w.buf.push('>');
if tag::is_void(element.tag) {
if !element.children.is_empty() {
bail!(element.span, "HTML void elements must not have children");
}
return Ok(());
}
// See HTML spec § 13.1.2.5.
if matches!(element.tag, tag::pre | tag::textarea) && starts_with_newline(element) {
w.buf.push('\n');
}
if tag::is_raw(element.tag) {
write_raw(w, element)?;
} else if tag::is_escapable_raw(element.tag) {
write_escapable_raw(w, element)?;
} else if !element.children.is_empty() {
write_children(w, element)?;
}
w.buf.push_str("</");
w.buf.push_str(&element.tag.resolve());
w.buf.push('>');
Ok(())
}
/// Encodes the children of an element.
fn write_children(w: &mut Writer, element: &HtmlElement) -> SourceResult<()> {
let pretty = w.pretty;
let pretty_inside = allows_pretty_inside(element.tag)
&& element.children.iter().any(|node| match node {
HtmlNode::Element(child) => wants_pretty_around(child.tag),
HtmlNode::Frame(_) => true,
_ => false,
});
w.pretty &= pretty_inside;
let mut indent = w.pretty;
w.level += 1;
for c in &element.children {
let pretty_around = match c {
HtmlNode::Tag(_) => continue,
HtmlNode::Element(child) => w.pretty && wants_pretty_around(child.tag),
HtmlNode::Text(..) | HtmlNode::Frame(_) => false,
};
if core::mem::take(&mut indent) || pretty_around {
write_indent(w);
}
write_node(w, c, element.pre_span)?;
indent = pretty_around;
}
w.level -= 1;
write_indent(w);
w.pretty = pretty;
Ok(())
}
/// Whether the first character in the element is a newline.
fn starts_with_newline(element: &HtmlElement) -> bool {
for child in &element.children {
match child {
HtmlNode::Tag(_) => {}
HtmlNode::Text(text, _) => return text.starts_with(['\n', '\r']),
_ => return false,
}
}
false
}
/// Encodes the contents of a raw text element.
fn write_raw(w: &mut Writer, element: &HtmlElement) -> SourceResult<()> {
let text = collect_raw_text(element)?;
if let Some(closing) = find_closing_tag(&text, element.tag) {
bail!(
element.span,
"HTML raw text element cannot contain its own closing tag";
hint: "the sequence `{closing}` appears in the raw text";
)
}
let mode = if w.pretty { RawMode::of(element, &text) } else { RawMode::Keep };
match mode {
RawMode::Keep => {
w.buf.push_str(&text);
}
RawMode::Wrap => {
w.buf.push('\n');
w.buf.push_str(&text);
write_indent(w);
}
RawMode::Indent => {
w.level += 1;
for line in text.lines() {
write_indent(w);
w.buf.push_str(line);
}
w.level -= 1;
write_indent(w);
}
}
Ok(())
}
/// Encodes the contents of an escapable raw text element.
fn write_escapable_raw(w: &mut Writer, element: &HtmlElement) -> SourceResult<()> {
walk_raw_text(element, |piece, span| write_text(w, piece, span, false))
}
/// Collects the textual contents of a raw text element.
fn collect_raw_text(element: &HtmlElement) -> SourceResult<String> {
let mut text = String::new();
walk_raw_text(element, |piece, span| {
if let Some(c) = piece.chars().find(|&c| !charsets::is_w3c_text_char(c)) {
return Err(unencodable(c)).at(span);
}
text.push_str(piece);
Ok(())
})?;
Ok(text)
}
/// Iterates over the textual contents of a raw text element.
fn walk_raw_text(
element: &HtmlElement,
mut f: impl FnMut(&str, Span) -> SourceResult<()>,
) -> SourceResult<()> {
for c in &element.children {
match c {
HtmlNode::Tag(_) => continue,
HtmlNode::Text(text, span) => f(text, *span)?,
HtmlNode::Element(HtmlElement { span, .. })
| HtmlNode::Frame(HtmlFrame { span, .. }) => {
bail!(*span, "HTML raw text element cannot have non-text children")
}
}
}
Ok(())
}
/// Finds a closing sequence for the given tag in the text, if it exists.
///
/// See HTML spec § 13.1.2.6.
fn find_closing_tag(text: &str, tag: HtmlTag) -> Option<&str> {
let s = tag.resolve();
let len = s.len();
text.match_indices("</").find_map(|(i, _)| {
let rest = &text[i + 2..];
let disallowed = rest.len() >= len
&& rest[..len].eq_ignore_ascii_case(&s)
&& rest[len..].starts_with(['\t', '\n', '\u{c}', '\r', ' ', '>', '/']);
disallowed.then(|| &text[i..i + 2 + len])
})
}
/// How to format the contents of a raw text element.
enum RawMode {
/// Just don't touch it.
Keep,
/// Newline after the opening and newline + indent before the closing tag.
Wrap,
/// Newlines after opening and before closing tag and each line indented.
Indent,
}
impl RawMode {
fn of(element: &HtmlElement, text: &str) -> Self {
match element.tag {
tag::script
if !element.attrs.0.iter().any(|(attr, value)| {
*attr == attr::r#type && value != "text/javascript"
}) =>
{
// Template literals can be multi-line, so indent may change
// the semantics of the JavaScript.
if text.contains('`') { Self::Wrap } else { Self::Indent }
}
tag::style => Self::Indent,
_ => Self::Keep,
}
}
}
/// Whether we are allowed to add an extra newline at the start and end of the
/// element's contents.
///
/// Technically, users can change CSS `display` properties such that the
/// insertion of whitespace may actually impact the visual output. For example,
/// <https://www.w3.org/TR/css-text-3/#example-af2745cd> shows how adding CSS
/// rules to `<p>` can make it sensitive to whitespace. For this reason, we
/// should also respect the `style` tag in the future.
fn allows_pretty_inside(tag: HtmlTag) -> bool {
(tag::is_block_by_default(tag) && tag != tag::pre)
|| tag::is_tabular_by_default(tag)
|| tag == tag::li
}
/// Whether newlines should be added before and after the element if the parent
/// allows it.
///
/// In contrast to `allows_pretty_inside`, which is purely spec-driven, this is
/// more subjective and depends on preference.
fn wants_pretty_around(tag: HtmlTag) -> bool {
allows_pretty_inside(tag) || tag::is_metadata_content(tag) || tag == tag::pre
}
/// Escape a character.
fn write_escape(w: &mut Writer, c: char) -> StrResult<()> {
// See <https://html.spec.whatwg.org/multipage/syntax.html#syntax-charref>
match c {
'&' => w.buf.push_str("&"),
'<' => w.buf.push_str("<"),
'>' => w.buf.push_str(">"),
'"' => w.buf.push_str("""),
'\'' => w.buf.push_str("'"),
c if charsets::is_w3c_text_char(c) && c != '\r' => {
write!(w.buf, "&#x{:x};", c as u32).unwrap()
}
_ => return Err(unencodable(c)),
}
Ok(())
}
/// The error message for a character that cannot be encoded.
#[cold]
fn unencodable(c: char) -> EcoString {
eco_format!("the character `{}` cannot be encoded in HTML", c.repr())
}
/// Encode a laid out frame into the writer.
fn write_frame(w: &mut Writer, frame: &HtmlFrame) {
let svg = typst_svg::svg_html_frame(
&frame.inner,
frame.text_size,
frame.id.as_deref(),
&frame.link_points,
w.introspector,
);
w.buf.push_str(&svg);
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/document.rs | crates/typst-html/src/document.rs | use comemo::{Tracked, TrackedMut};
use ecow::{EcoVec, eco_vec};
use rustc_hash::FxHashSet;
use typst_library::World;
use typst_library::diag::{SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Content, StyleChain, Styles};
use typst_library::introspection::{
DocumentPosition, HtmlPosition, Introspector, IntrospectorBuilder, Location, Locator,
};
use typst_library::layout::Transform;
use typst_library::model::DocumentInfo;
use typst_library::routines::{Arenas, RealizationKind, Routines};
use typst_syntax::Span;
use typst_utils::Protected;
use crate::convert::{ConversionLevel, Whitespace};
use crate::rules::FootnoteContainer;
use crate::{HtmlDocument, HtmlElem, HtmlElement, HtmlNode, HtmlSliceExt, attr, tag};
/// Produce an HTML document from content.
///
/// This first performs root-level realization and then turns the resulting
/// elements into HTML.
#[typst_macros::time(name = "html document")]
pub fn html_document(
engine: &mut Engine,
content: &Content,
styles: StyleChain,
) -> SourceResult<HtmlDocument> {
html_document_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
content,
styles,
)
}
/// The internal implementation of `html_document`.
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
fn html_document_impl(
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
content: &Content,
styles: StyleChain,
) -> SourceResult<HtmlDocument> {
let introspector = Protected::from_raw(introspector);
let mut locator = Locator::root().split();
let mut engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route).unnested(),
};
// Create this upfront to make it as stable as possible.
let footnote_locator = locator.next(&());
// Mark the external styles as "outside" so that they are valid at the
// document level.
let styles = styles.to_map().outside();
let styles = StyleChain::new(&styles);
let arenas = Arenas::default();
let mut info = DocumentInfo::default();
let children = (engine.routines.realize)(
RealizationKind::HtmlDocument { info: &mut info, is_inline: HtmlElem::is_inline },
&mut engine,
&mut locator,
&arenas,
content,
styles,
)?;
let nodes = crate::convert::convert_to_nodes(
&mut engine,
&mut locator,
children.iter().copied(),
ConversionLevel::Block,
Whitespace::Normal,
)?;
let (mut tags_and_root, root_index) = finalize_dom(
&mut engine,
nodes,
&info,
footnote_locator,
StyleChain::new(&Styles::root(&children, styles)),
)?;
let mut link_targets = FxHashSet::default();
let mut introspector = introspect_html(&tags_and_root, &mut link_targets);
let HtmlNode::Element(mut root) = tags_and_root.remove(root_index) else {
panic!("expected HTML element")
};
crate::link::identify_link_targets(&mut root, &mut introspector, link_targets);
Ok(HtmlDocument { info, root, introspector })
}
/// Introspects HTML nodes.
#[typst_macros::time(name = "introspect html")]
fn introspect_html(
output: &[HtmlNode],
link_targets: &mut FxHashSet<Location>,
) -> Introspector {
fn discover(
builder: &mut IntrospectorBuilder,
sink: &mut Vec<(Content, DocumentPosition)>,
link_targets: &mut FxHashSet<Location>,
nodes: &[HtmlNode],
current_position: &mut EcoVec<usize>,
) {
for (node, dom_index) in nodes.iter_with_dom_indices() {
match node {
HtmlNode::Tag(tag) => {
current_position.push(dom_index);
builder.discover_in_tag(
sink,
tag,
DocumentPosition::Html(HtmlPosition::new(
current_position.clone(),
)),
);
current_position.pop();
}
HtmlNode::Text(_, _) => {}
HtmlNode::Element(elem) => {
let is_root = elem.tag == tag::html;
if !is_root {
current_position.push(dom_index);
}
if let Some(parent) = elem.parent {
let mut nested = vec![];
discover(
builder,
&mut nested,
link_targets,
&elem.children,
current_position,
);
builder.register_insertion(parent, nested);
} else {
discover(
builder,
sink,
link_targets,
&elem.children,
current_position,
);
}
if !is_root {
current_position.pop();
}
}
HtmlNode::Frame(frame) => {
current_position.push(dom_index);
builder.discover_in_frame(
sink,
&frame.inner,
Transform::identity(),
&mut |point| {
DocumentPosition::Html(
HtmlPosition::new(current_position.clone())
.in_frame(point),
)
},
);
crate::link::introspect_frame_links(&frame.inner, link_targets);
current_position.pop();
}
}
}
}
let mut elems = Vec::new();
let mut builder = IntrospectorBuilder::new();
let mut current_position = EcoVec::new();
discover(&mut builder, &mut elems, link_targets, output, &mut current_position);
builder.finalize(elems)
}
/// Wrap the user generated HTML in <html>, <body> or both if needed.
///
/// Returns a vector containing outer introspection tags and the HTML root element.
/// A direct reference to the root element is also returned.
fn finalize_dom(
engine: &mut Engine,
output: EcoVec<HtmlNode>,
info: &DocumentInfo,
footnote_locator: Locator<'_>,
footnote_styles: StyleChain<'_>,
) -> SourceResult<(EcoVec<HtmlNode>, usize)> {
let count = output.iter().filter(|node| !matches!(node, HtmlNode::Tag(_))).count();
let mut needs_body = true;
for (idx, node) in output.iter().enumerate() {
let HtmlNode::Element(elem) = node else { continue };
let tag = elem.tag;
match (tag, count) {
(tag::html, 1) => {
FootnoteContainer::unsupported_with_custom_dom(engine)?;
return Ok((output, idx));
}
(tag::body, 1) => {
FootnoteContainer::unsupported_with_custom_dom(engine)?;
needs_body = false;
}
(tag::html | tag::body, _) => bail!(
elem.span,
"`{}` element must be the only element in the document",
elem.tag,
),
_ => {}
}
}
let body = if needs_body {
let mut body = HtmlElement::new(tag::body).with_children(output);
let footnotes = crate::fragment::html_block_fragment(
engine,
FootnoteContainer::shared(),
footnote_locator,
footnote_styles,
Whitespace::Normal,
)?;
body.children.extend(footnotes);
eco_vec![body.into()]
} else {
output
};
let mut html = HtmlElement::new(tag::html)
.with_attr(attr::lang, info.locale.unwrap_or_default().rfc_3066());
let head = head_element(info);
html.children.push(head.into());
html.children.extend(body);
Ok((eco_vec![html.into()], 0))
}
/// Generate a `<head>` element.
fn head_element(info: &DocumentInfo) -> HtmlElement {
let mut children = EcoVec::new();
children.push(HtmlElement::new(tag::meta).with_attr(attr::charset, "utf-8").into());
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "viewport")
.with_attr(attr::content, "width=device-width, initial-scale=1")
.into(),
);
if let Some(title) = &info.title {
children.push(
HtmlElement::new(tag::title)
.with_children(eco_vec![HtmlNode::Text(title.clone(), Span::detached())])
.into(),
);
}
if let Some(description) = &info.description {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "description")
.with_attr(attr::content, description.clone())
.into(),
);
}
if !info.author.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "authors")
.with_attr(attr::content, info.author.join(", "))
.into(),
)
}
if !info.keywords.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "keywords")
.with_attr(attr::content, info.keywords.join(", "))
.into(),
)
}
HtmlElement::new(tag::head).with_children(children)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/fragment.rs | crates/typst-html/src/fragment.rs | use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec;
use typst_library::World;
use typst_library::diag::{At, SourceResult};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Content, StyleChain};
use typst_library::introspection::{Introspector, Locator, LocatorLink, SplitLocator};
use typst_library::routines::{Arenas, FragmentKind, Pair, RealizationKind, Routines};
use typst_library::text::SmartQuoter;
use typst_utils::Protected;
use crate::convert::{ConversionLevel, Whitespace};
use crate::{HtmlElem, HtmlNode};
/// Produces HTML nodes from content contained in an HTML element that is
/// block-level by default.
#[typst_macros::time(name = "html block fragment")]
pub fn html_block_fragment(
engine: &mut Engine,
content: &Content,
locator: Locator,
styles: StyleChain,
whitespace: Whitespace,
) -> SourceResult<EcoVec<HtmlNode>> {
html_block_fragment_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
content,
locator.track(),
styles,
whitespace,
)
}
/// The cached, internal implementation of [`html_fragment`].
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
fn html_block_fragment_impl(
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
content: &Content,
locator: Tracked<Locator>,
styles: StyleChain,
whitespace: Whitespace,
) -> SourceResult<EcoVec<HtmlNode>> {
let introspector = Protected::from_raw(introspector);
let link = LocatorLink::new(locator);
let mut locator = Locator::link(&link).split();
let mut engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route),
};
engine.route.check_html_depth().at(content.span())?;
let arenas = Arenas::default();
let children = realize_fragment(&mut engine, &mut locator, &arenas, content, styles)?;
crate::convert::convert_to_nodes(
&mut engine,
&mut locator,
children.iter().copied(),
ConversionLevel::Block,
whitespace,
)
}
/// Produces HTML nodes from content contained in an HTML element that is
/// inline-level by default.
///
/// The difference to block-level content is that inline-level content has
/// shared smartquoting state with surrounding inline-level content. This
/// requires mutable state, which is at odds with memoization. However, the
/// caching granularity would be unnecessarily high anyway if every single
/// fragment was cached, so this works out pretty well together.
#[typst_macros::time(name = "html inline fragment")]
pub fn html_inline_fragment(
engine: &mut Engine,
content: &Content,
locator: &mut SplitLocator,
quoter: &mut SmartQuoter,
styles: StyleChain,
whitespace: Whitespace,
) -> SourceResult<EcoVec<HtmlNode>> {
engine.route.increase();
engine.route.check_html_depth().at(content.span())?;
let arenas = Arenas::default();
let children = realize_fragment(engine, locator, &arenas, content, styles)?;
let result = crate::convert::convert_to_nodes(
engine,
locator,
children.iter().copied(),
ConversionLevel::Inline(quoter),
whitespace,
);
engine.route.decrease();
result
}
/// Realizes the body of an HTML fragment.
fn realize_fragment<'a>(
engine: &mut Engine,
locator: &mut SplitLocator,
arenas: &'a Arenas,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<Vec<Pair<'a>>> {
(engine.routines.realize)(
RealizationKind::HtmlFragment {
// We ignore the `FragmentKind` because we handle both uniformly.
kind: &mut FragmentKind::Block,
is_inline: HtmlElem::is_inline,
},
engine,
locator,
arenas,
content,
styles,
)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/typed.rs | crates/typst-html/src/typed.rs | //! The typed HTML element API (e.g. `html.div`).
//!
//! The typed API is backed by generated data derived from the HTML
//! specification. See [generated] and `tools/codegen`.
use std::fmt::Write;
use std::num::{NonZeroI64, NonZeroU64};
use std::sync::LazyLock;
use bumpalo::Bump;
use comemo::Tracked;
use ecow::{EcoString, eco_format, eco_vec};
use typst_assets::html as data;
use typst_library::diag::{At, Hint, HintedStrResult, SourceResult, bail};
use typst_library::engine::Engine;
use typst_library::foundations::{
Args, Array, AutoValue, CastInfo, Content, Context, Datetime, Dict, Duration,
FromValue, IntoValue, NativeFuncData, NativeFuncPtr, NoneValue, ParamInfo,
PositiveF64, Reflect, Scope, Str, Type, Value,
};
use typst_library::layout::{Axes, Axis, Dir, Length};
use typst_library::visualize::Color;
use typst_macros::cast;
use crate::{HtmlAttr, HtmlAttrs, HtmlElem, HtmlTag, css, tag};
/// Hook up all typed HTML definitions.
pub(super) fn define(html: &mut Scope) {
for data in FUNCS.iter() {
html.define_func_with_data(data);
}
}
/// Lazily created functions for all typed HTML constructors.
static FUNCS: LazyLock<Vec<NativeFuncData>> = LazyLock::new(|| {
// Leaking is okay here. It's not meaningfully different from having
// memory-managed values as `FUNCS` is a static.
let bump = Box::leak(Box::new(Bump::new()));
data::ELEMS.iter().map(|info| create_func_data(info, bump)).collect()
});
/// Creates metadata for a native HTML element constructor function.
fn create_func_data(
element: &'static data::ElemInfo,
bump: &'static Bump,
) -> NativeFuncData {
NativeFuncData {
function: NativeFuncPtr(bump.alloc(
move |_: &mut Engine, _: Tracked<Context>, args: &mut Args| {
construct(element, args)
},
)),
name: element.name,
title: {
let title = bump.alloc_str(element.name);
title[0..1].make_ascii_uppercase();
title
},
docs: element.docs,
keywords: &["typed-html"],
contextual: false,
scope: LazyLock::new(&|| Scope::new()),
params: LazyLock::new(bump.alloc(move || create_param_info(element))),
returns: LazyLock::new(&|| CastInfo::Type(Type::of::<Content>())),
}
}
/// Creates parameter signature metadata for an element.
fn create_param_info(element: &'static data::ElemInfo) -> Vec<ParamInfo> {
let mut params = vec![];
for attr in element.attributes() {
params.push(ParamInfo {
name: attr.name,
docs: attr.docs,
input: AttrType::convert(attr.ty).input(),
default: None,
positional: false,
named: true,
variadic: false,
required: false,
settable: false,
});
}
let tag = HtmlTag::constant(element.name);
if !tag::is_void(tag) {
params.push(ParamInfo {
name: "body",
docs: "The contents of the HTML element.",
input: CastInfo::Type(Type::of::<Content>()),
default: None,
positional: true,
named: false,
variadic: false,
required: false,
settable: false,
});
}
params
}
/// The native constructor function shared by all HTML elements.
fn construct(element: &'static data::ElemInfo, args: &mut Args) -> SourceResult<Value> {
let mut attrs = HtmlAttrs::default();
let mut errors = eco_vec![];
args.items.retain(|item| {
let Some(name) = &item.name else { return true };
let Some(attr) = element.get_attr(name) else { return true };
let span = item.value.span;
let value = std::mem::take(&mut item.value.v);
let ty = AttrType::convert(attr.ty);
match ty.cast(value).at(span) {
Ok(Some(string)) => attrs.push(HtmlAttr::constant(attr.name), string),
Ok(None) => {}
Err(diags) => errors.extend(diags),
}
false
});
if !errors.is_empty() {
return Err(errors);
}
let tag = HtmlTag::constant(element.name);
let mut elem = HtmlElem::new(tag);
if !attrs.0.is_empty() {
elem.attrs.set(attrs);
}
if !tag::is_void(tag) {
let body = args.eat::<Content>()?;
elem.body.set(body);
}
Ok(elem.into_value())
}
/// A dynamic representation of an attribute's type.
///
/// See the documentation of [`data::Type`] for more details on variants.
enum AttrType {
Presence,
Native(NativeType),
Strings(StringsType),
Union(UnionType),
List(ListType),
}
impl AttrType {
/// Converts the type definition into a representation suitable for casting
/// and reflection.
const fn convert(ty: data::Type) -> AttrType {
use data::Type;
match ty {
Type::Presence => Self::Presence,
Type::None => Self::of::<NoneValue>(),
Type::NoneEmpty => Self::of::<NoneEmpty>(),
Type::NoneUndefined => Self::of::<NoneUndefined>(),
Type::Auto => Self::of::<AutoValue>(),
Type::TrueFalse => Self::of::<TrueFalseBool>(),
Type::YesNo => Self::of::<YesNoBool>(),
Type::OnOff => Self::of::<OnOffBool>(),
Type::Int => Self::of::<i64>(),
Type::NonNegativeInt => Self::of::<u64>(),
Type::PositiveInt => Self::of::<NonZeroU64>(),
Type::Float => Self::of::<f64>(),
Type::PositiveFloat => Self::of::<PositiveF64>(),
Type::Str => Self::of::<Str>(),
Type::Char => Self::of::<char>(),
Type::Datetime => Self::of::<Datetime>(),
Type::Duration => Self::of::<Duration>(),
Type::Color => Self::of::<Color>(),
Type::HorizontalDir => Self::of::<HorizontalDir>(),
Type::IconSize => Self::of::<IconSize>(),
Type::ImageCandidate => Self::of::<ImageCandidate>(),
Type::SourceSize => Self::of::<SourceSize>(),
Type::Strings(start, end) => Self::Strings(StringsType { start, end }),
Type::Union(variants) => Self::Union(UnionType(variants)),
Type::List(inner, separator, shorthand) => {
Self::List(ListType { inner, separator, shorthand })
}
}
}
/// Produces the dynamic representation of an attribute type backed by a
/// native Rust type.
const fn of<T: IntoAttr>() -> Self {
Self::Native(NativeType::of::<T>())
}
/// See [`Reflect::input`].
fn input(&self) -> CastInfo {
match self {
Self::Presence => bool::input(),
Self::Native(ty) => (ty.input)(),
Self::Union(ty) => ty.input(),
Self::Strings(ty) => ty.input(),
Self::List(ty) => ty.input(),
}
}
/// See [`Reflect::castable`].
fn castable(&self, value: &Value) -> bool {
match self {
Self::Presence => bool::castable(value),
Self::Native(ty) => (ty.castable)(value),
Self::Union(ty) => ty.castable(value),
Self::Strings(ty) => ty.castable(value),
Self::List(ty) => ty.castable(value),
}
}
/// Tries to cast the value into this attribute's type and serialize it into
/// an HTML attribute string.
fn cast(&self, value: Value) -> HintedStrResult<Option<EcoString>> {
match self {
Self::Presence => value.cast::<bool>().map(|b| b.then(EcoString::new)),
Self::Native(ty) => (ty.cast)(value),
Self::Union(ty) => ty.cast(value),
Self::Strings(ty) => ty.cast(value),
Self::List(ty) => ty.cast(value),
}
}
}
/// An enumeration with generated string variants.
///
/// `start` and `end` are used to index into `data::ATTR_STRINGS`.
struct StringsType {
start: usize,
end: usize,
}
impl StringsType {
fn input(&self) -> CastInfo {
CastInfo::Union(
self.strings()
.iter()
.map(|(val, desc)| CastInfo::Value(val.into_value(), desc))
.collect(),
)
}
fn castable(&self, value: &Value) -> bool {
match value {
Value::Str(s) => self.strings().iter().any(|&(v, _)| v == s.as_str()),
_ => false,
}
}
fn cast(&self, value: Value) -> HintedStrResult<Option<EcoString>> {
if self.castable(&value) {
value.cast().map(Some)
} else {
Err(self.input().error(&value))
}
}
fn strings(&self) -> &'static [(&'static str, &'static str)] {
&data::ATTR_STRINGS[self.start..self.end]
}
}
/// A type that accepts any of the contained types.
struct UnionType(&'static [data::Type]);
impl UnionType {
fn input(&self) -> CastInfo {
CastInfo::Union(self.iter().map(|ty| ty.input()).collect())
}
fn castable(&self, value: &Value) -> bool {
self.iter().any(|ty| ty.castable(value))
}
fn cast(&self, value: Value) -> HintedStrResult<Option<EcoString>> {
for item in self.iter() {
if item.castable(&value) {
return item.cast(value);
}
}
Err(self.input().error(&value))
}
fn iter(&self) -> impl Iterator<Item = AttrType> {
self.0.iter().map(|&ty| AttrType::convert(ty))
}
}
/// A list of items separated by a specific separator char.
///
/// - <https://html.spec.whatwg.org/#space-separated-tokens>
/// - <https://html.spec.whatwg.org/#comma-separated-tokens>
struct ListType {
inner: &'static data::Type,
separator: char,
shorthand: bool,
}
impl ListType {
fn input(&self) -> CastInfo {
if self.shorthand {
Array::input() + self.inner().input()
} else {
Array::input()
}
}
fn castable(&self, value: &Value) -> bool {
Array::castable(value) || (self.shorthand && self.inner().castable(value))
}
fn cast(&self, value: Value) -> HintedStrResult<Option<EcoString>> {
let ty = self.inner();
if Array::castable(&value) {
let array = value.cast::<Array>()?;
let mut out = EcoString::new();
for (i, item) in array.into_iter().enumerate() {
let item = ty.cast(item)?.unwrap();
if item.as_str().contains(self.separator) {
let buf;
let name = match self.separator {
' ' => "space",
',' => "comma",
_ => {
buf = eco_format!("'{}'", self.separator);
buf.as_str()
}
};
bail!(
"array item may not contain a {name}";
hint: "the array attribute will be encoded as a \
{name}-separated string";
);
}
if i > 0 {
out.push(self.separator);
if self.separator == ',' {
out.push(' ');
}
}
out.push_str(&item);
}
Ok(Some(out))
} else if self.shorthand && ty.castable(&value) {
let item = ty.cast(value)?.unwrap();
Ok(Some(item))
} else {
Err(self.input().error(&value))
}
}
fn inner(&self) -> AttrType {
AttrType::convert(*self.inner)
}
}
/// A dynamic representation of attribute backed by a native type implementing
/// - the standard `Reflect` and `FromValue` traits for casting from a value,
/// - the special `IntoAttr` trait for conversion into an attribute string.
#[derive(Copy, Clone)]
struct NativeType {
input: fn() -> CastInfo,
cast: fn(Value) -> HintedStrResult<Option<EcoString>>,
castable: fn(&Value) -> bool,
}
impl NativeType {
/// Creates a dynamic native type from a native Rust type.
const fn of<T: IntoAttr>() -> Self {
Self {
cast: |value| {
let this = value.cast::<T>()?;
Ok(Some(this.into_attr()))
},
input: T::input,
castable: T::castable,
}
}
}
/// Casts a native type into an HTML attribute.
pub trait IntoAttr: FromValue {
/// Turn the value into an attribute string.
fn into_attr(self) -> EcoString;
}
impl IntoAttr for Str {
fn into_attr(self) -> EcoString {
self.into()
}
}
/// A boolean that is encoded as a string:
/// - `false` is encoded as `"false"`
/// - `true` is encoded as `"true"`
pub struct TrueFalseBool(pub bool);
cast! {
TrueFalseBool,
v: bool => Self(v),
}
impl IntoAttr for TrueFalseBool {
fn into_attr(self) -> EcoString {
if self.0 { "true" } else { "false" }.into()
}
}
/// A boolean that is encoded as a string:
/// - `false` is encoded as `"no"`
/// - `true` is encoded as `"yes"`
pub struct YesNoBool(pub bool);
cast! {
YesNoBool,
v: bool => Self(v),
}
impl IntoAttr for YesNoBool {
fn into_attr(self) -> EcoString {
if self.0 { "yes" } else { "no" }.into()
}
}
/// A boolean that is encoded as a string:
/// - `false` is encoded as `"off"`
/// - `true` is encoded as `"on"`
pub struct OnOffBool(pub bool);
cast! {
OnOffBool,
v: bool => Self(v),
}
impl IntoAttr for OnOffBool {
fn into_attr(self) -> EcoString {
if self.0 { "on" } else { "off" }.into()
}
}
impl IntoAttr for AutoValue {
fn into_attr(self) -> EcoString {
"auto".into()
}
}
impl IntoAttr for NoneValue {
fn into_attr(self) -> EcoString {
"none".into()
}
}
/// A `none` value that turns into an empty string attribute.
struct NoneEmpty;
cast! {
NoneEmpty,
_: NoneValue => NoneEmpty,
}
impl IntoAttr for NoneEmpty {
fn into_attr(self) -> EcoString {
"".into()
}
}
/// A `none` value that turns into the string `"undefined"`.
struct NoneUndefined;
cast! {
NoneUndefined,
_: NoneValue => NoneUndefined,
}
impl IntoAttr for NoneUndefined {
fn into_attr(self) -> EcoString {
"undefined".into()
}
}
impl IntoAttr for char {
fn into_attr(self) -> EcoString {
eco_format!("{self}")
}
}
impl IntoAttr for i64 {
fn into_attr(self) -> EcoString {
eco_format!("{self}")
}
}
impl IntoAttr for u64 {
fn into_attr(self) -> EcoString {
eco_format!("{self}")
}
}
impl IntoAttr for NonZeroI64 {
fn into_attr(self) -> EcoString {
eco_format!("{self}")
}
}
impl IntoAttr for NonZeroU64 {
fn into_attr(self) -> EcoString {
eco_format!("{self}")
}
}
impl IntoAttr for f64 {
fn into_attr(self) -> EcoString {
// HTML float literal allows all the things that Rust's float `Display`
// impl produces.
eco_format!("{self}")
}
}
impl IntoAttr for PositiveF64 {
fn into_attr(self) -> EcoString {
self.get().into_attr()
}
}
impl IntoAttr for Color {
fn into_attr(self) -> EcoString {
eco_format!("{}", css::color(self))
}
}
impl IntoAttr for Duration {
fn into_attr(self) -> EcoString {
// https://html.spec.whatwg.org/#valid-duration-string
let mut out = EcoString::new();
macro_rules! part {
($s:literal) => {
if !out.is_empty() {
out.push(' ');
}
write!(out, $s).unwrap();
};
}
let [weeks, days, hours, minutes, seconds] = self.decompose();
if weeks > 0 {
part!("{weeks}w");
}
if days > 0 {
part!("{days}d");
}
if hours > 0 {
part!("{hours}h");
}
if minutes > 0 {
part!("{minutes}m");
}
if seconds > 0 || out.is_empty() {
part!("{seconds}s");
}
out
}
}
impl IntoAttr for Datetime {
fn into_attr(self) -> EcoString {
let fmt = typst_utils::display(|f| match self {
Self::Date(date) => datetime::date(f, date),
Self::Time(time) => datetime::time(f, time),
Self::Datetime(datetime) => datetime::datetime(f, datetime),
});
eco_format!("{fmt}")
}
}
mod datetime {
use std::fmt::{self, Formatter, Write};
pub fn datetime(f: &mut Formatter, datetime: time::PrimitiveDateTime) -> fmt::Result {
// https://html.spec.whatwg.org/#valid-global-date-and-time-string
date(f, datetime.date())?;
f.write_char('T')?;
time(f, datetime.time())
}
pub fn date(f: &mut Formatter, date: time::Date) -> fmt::Result {
// https://html.spec.whatwg.org/#valid-date-string
write!(f, "{:04}-{:02}-{:02}", date.year(), date.month() as u8, date.day())
}
pub fn time(f: &mut Formatter, time: time::Time) -> fmt::Result {
// https://html.spec.whatwg.org/#valid-time-string
write!(f, "{:02}:{:02}", time.hour(), time.minute())?;
if time.second() > 0 {
write!(f, ":{:02}", time.second())?;
}
Ok(())
}
}
/// A direction on the X axis: `ltr` or `rtl`.
pub struct HorizontalDir(Dir);
cast! {
HorizontalDir,
v: Dir => {
if v.axis() == Axis::Y {
bail!("direction must be horizontal");
}
Self(v)
},
}
impl IntoAttr for HorizontalDir {
fn into_attr(self) -> EcoString {
self.0.into_attr()
}
}
impl IntoAttr for Dir {
fn into_attr(self) -> EcoString {
match self {
Self::LTR => "ltr".into(),
Self::RTL => "rtl".into(),
Self::TTB => "ttb".into(),
Self::BTT => "btt".into(),
}
}
}
/// A width/height pair for `<link rel="icon" sizes="..." />`.
pub struct IconSize(Axes<u64>);
cast! {
IconSize,
v: Axes<u64> => Self(v),
}
impl IntoAttr for IconSize {
fn into_attr(self) -> EcoString {
eco_format!("{}x{}", self.0.x, self.0.y)
}
}
/// <https://html.spec.whatwg.org/#image-candidate-string>
pub struct ImageCandidate(EcoString);
cast! {
ImageCandidate,
mut v: Dict => {
let src = v.take("src")?.cast::<EcoString>()?;
let width: Option<NonZeroU64> =
v.take("width").ok().map(Value::cast).transpose()?;
let density: Option<PositiveF64> =
v.take("density").ok().map(Value::cast).transpose()?;
v.finish(&["src", "width", "density"])?;
if src.is_empty() {
bail!("`src` must not be empty");
} else if src.starts_with(',') || src.ends_with(',') {
bail!("`src` must not start or end with a comma");
}
let mut out = src;
match (width, density) {
(None, None) => {}
(Some(width), None) => write!(out, " {width}w").unwrap(),
(None, Some(density)) => write!(out, " {}d", density.get()).unwrap(),
(Some(_), Some(_)) => bail!("cannot specify both `width` and `density`"),
}
Self(out)
},
}
impl IntoAttr for ImageCandidate {
fn into_attr(self) -> EcoString {
self.0
}
}
/// <https://html.spec.whatwg.org/multipage/images.html#valid-source-size-list>
pub struct SourceSize(EcoString);
cast! {
SourceSize,
mut v: Dict => {
let condition = v.take("condition")?.cast::<EcoString>()?;
let size = v
.take("size")?
.cast::<Length>()
.hint("CSS lengths that are not expressible as Typst lengths are not yet supported")
.hint("you can use `html.elem` to create a raw attribute")?;
Self(eco_format!("({condition}) {}", css::length(size)))
},
}
impl IntoAttr for SourceSize {
fn into_attr(self) -> EcoString {
self.0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_tags_and_attr_const_internible() {
for elem in data::ELEMS {
let _ = HtmlTag::constant(elem.name);
}
for attr in data::ATTRS {
let _ = HtmlAttr::constant(attr.name);
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/dom.rs | crates/typst-html/src/dom.rs | use std::fmt::{self, Debug, Display, Formatter};
use ecow::{EcoString, EcoVec};
use typst_library::diag::{HintedStrResult, StrResult, bail};
use typst_library::foundations::{Dict, Repr, Str, StyleChain, cast};
use typst_library::introspection::{Introspector, Location, Tag};
use typst_library::layout::{Abs, Frame, Point};
use typst_library::model::DocumentInfo;
use typst_library::text::TextElem;
use typst_syntax::Span;
use typst_utils::{PicoStr, ResolvedPicoStr};
use crate::{attr, charsets, css};
/// An HTML document.
#[derive(Debug, Clone)]
pub struct HtmlDocument {
/// The document's root HTML element.
pub root: HtmlElement,
/// Details about the document.
pub info: DocumentInfo,
/// Provides the ability to execute queries on the document.
pub introspector: Introspector,
}
/// A child of an HTML element.
#[derive(Debug, Clone, Hash)]
pub enum HtmlNode {
/// An introspectable element that produced something within this node.
Tag(Tag),
/// Plain text.
Text(EcoString, Span),
/// Another element.
Element(HtmlElement),
/// Layouted content that will be embedded into HTML as an SVG.
Frame(HtmlFrame),
}
impl HtmlNode {
/// Create a plain text node.
pub fn text(text: impl Into<EcoString>, span: Span) -> Self {
Self::Text(text.into(), span)
}
/// Returns the span, if any.
pub fn span(&self) -> Span {
match self {
Self::Tag(_) => Span::detached(),
Self::Text(_, span) => *span,
Self::Element(element) => element.span,
Self::Frame(frame) => frame.span,
}
}
}
impl From<Tag> for HtmlNode {
fn from(tag: Tag) -> Self {
Self::Tag(tag)
}
}
impl From<HtmlElement> for HtmlNode {
fn from(element: HtmlElement) -> Self {
Self::Element(element)
}
}
impl From<HtmlFrame> for HtmlNode {
fn from(frame: HtmlFrame) -> Self {
Self::Frame(frame)
}
}
/// An extension trait for `[HtmlNode]`.
pub trait HtmlSliceExt {
/// Iterates over nodes alongside the indices as they would be observed in
/// the final DOM.
///
/// - Tags receive the index of the preceding node and don't advance the
/// cursor.
///
/// - For indexing purposes, consecutive text nodes are considered as
/// groups. They receive the same index as they are not distinguishable on
/// the DOM level.
fn iter_with_dom_indices(&self) -> impl Iterator<Item = (&HtmlNode, usize)>;
}
impl HtmlSliceExt for [HtmlNode] {
fn iter_with_dom_indices(&self) -> impl Iterator<Item = (&HtmlNode, usize)> {
let mut cursor = 0;
let mut was_text = false;
self.iter().map(move |child| {
let mut i = cursor;
match child {
HtmlNode::Tag(_) => {}
HtmlNode::Text(..) => was_text = true,
_ => {
cursor += usize::from(was_text);
i = cursor;
cursor += 1;
was_text = false;
}
}
(child, i)
})
}
}
/// An HTML element.
#[derive(Debug, Clone, Hash)]
pub struct HtmlElement {
/// The HTML tag.
pub tag: HtmlTag,
/// The element's attributes.
pub attrs: HtmlAttrs,
/// The element's children.
pub children: EcoVec<HtmlNode>,
/// The element's logical parent. For introspection purposes, this element
/// is logically ordered immediately after the parent's start location.
pub parent: Option<Location>,
/// The span from which the element originated, if any.
pub span: Span,
/// Whether this is a span with `white-space: pre-wrap` generated by the
/// compiler to prevent whitespace from being collapsed.
///
/// For such spans, spaces and tabs in the element are emitted as escape
/// sequences. While this does not matter for browser engine rendering (as
/// the `white-space` CSS property is enough), it ensures that formatters
/// won't mess up the output.
pub pre_span: bool,
}
impl HtmlElement {
/// Create a new, blank element without attributes or children.
pub fn new(tag: HtmlTag) -> Self {
Self {
tag,
attrs: HtmlAttrs::default(),
children: EcoVec::new(),
parent: None,
span: Span::detached(),
pre_span: false,
}
}
/// Attach children to the element.
///
/// Note: This overwrites potential previous children.
pub fn with_children(mut self, children: EcoVec<HtmlNode>) -> Self {
self.children = children;
self
}
/// Add an attribute to the element.
pub fn with_attr(mut self, key: HtmlAttr, value: impl Into<EcoString>) -> Self {
self.attrs.push(key, value);
self
}
/// Adds CSS styles to an element.
pub(crate) fn with_styles(self, properties: css::Properties) -> Self {
if let Some(value) = properties.into_inline_styles() {
self.with_attr(attr::style, value)
} else {
self
}
}
/// Attach a span to the element.
pub fn spanned(mut self, span: Span) -> Self {
self.span = span;
self
}
}
/// The tag of an HTML element.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct HtmlTag(PicoStr);
impl HtmlTag {
/// Intern an HTML tag string at runtime.
pub fn intern(string: &str) -> StrResult<Self> {
if string.is_empty() {
bail!("tag name must not be empty");
}
let mut has_hyphen = false;
let mut has_uppercase = false;
for c in string.chars() {
if c == '-' {
has_hyphen = true;
} else if !charsets::is_valid_in_tag_name(c) {
bail!("the character {} is not valid in a tag name", c.repr());
} else {
has_uppercase |= c.is_ascii_uppercase();
}
}
// If we encounter a hyphen, we are dealing with a custom element rather
// than a standard HTML element.
//
// A valid custom element name must:
// - Contain at least one hyphen (U+002D)
// - Start with an ASCII lowercase letter (a-z)
// - Not contain any ASCII uppercase letters (A-Z)
// - Not be one of the reserved names
// - Only contain valid characters (ASCII alphanumeric and hyphens)
//
// See https://html.spec.whatwg.org/multipage/custom-elements.html#valid-custom-element-name
if has_hyphen {
if !string.starts_with(|c: char| c.is_ascii_lowercase()) {
bail!("custom element name must start with a lowercase letter");
}
if has_uppercase {
bail!("custom element name must not contain uppercase letters");
}
// These names are used in SVG and MathML. Since `html.elem` only
// supports creation of _HTML_ elements, they are forbidden.
if matches!(
string,
"annotation-xml"
| "color-profile"
| "font-face"
| "font-face-src"
| "font-face-uri"
| "font-face-format"
| "font-face-name"
| "missing-glyph"
) {
bail!("name is reserved and not valid for a custom element");
}
}
Ok(Self(PicoStr::intern(string)))
}
/// Creates a compile-time constant `HtmlTag`.
///
/// Should only be used in const contexts because it can panic.
#[track_caller]
pub const fn constant(string: &'static str) -> Self {
if string.is_empty() {
panic!("tag name must not be empty");
}
let bytes = string.as_bytes();
let mut i = 0;
while i < bytes.len() {
if !bytes[i].is_ascii() || !charsets::is_valid_in_tag_name(bytes[i] as char) {
panic!("not all characters are valid in a tag name");
}
i += 1;
}
Self(PicoStr::constant(string))
}
/// Resolves the tag to a string.
pub fn resolve(self) -> ResolvedPicoStr {
self.0.resolve()
}
/// Turns the tag into its inner interned string.
pub const fn into_inner(self) -> PicoStr {
self.0
}
}
impl Debug for HtmlTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for HtmlTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "<{}>", self.resolve())
}
}
cast! {
HtmlTag,
self => self.0.resolve().as_str().into_value(),
v: Str => Self::intern(&v)?,
}
/// Attributes of an HTML element.
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash)]
pub struct HtmlAttrs(pub EcoVec<(HtmlAttr, EcoString)>);
impl HtmlAttrs {
/// Creates an empty attribute list.
pub fn new() -> Self {
Self::default()
}
/// Adds an attribute.
pub fn push(&mut self, attr: HtmlAttr, value: impl Into<EcoString>) {
self.0.push((attr, value.into()));
}
/// Adds an attribute to the start of the list.
pub fn push_front(&mut self, attr: HtmlAttr, value: impl Into<EcoString>) {
self.0.insert(0, (attr, value.into()));
}
/// Finds an attribute value.
pub fn get(&self, attr: HtmlAttr) -> Option<&EcoString> {
self.0.iter().find(|&&(k, _)| k == attr).map(|(_, v)| v)
}
}
cast! {
HtmlAttrs,
self => self.0
.into_iter()
.map(|(key, value)| (key.resolve().as_str().into(), value.into_value()))
.collect::<Dict>()
.into_value(),
values: Dict => Self(values
.into_iter()
.map(|(k, v)| {
let attr = HtmlAttr::intern(&k)?;
let value = v.cast::<EcoString>()?;
Ok((attr, value))
})
.collect::<HintedStrResult<_>>()?),
}
/// An attribute of an HTML element.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct HtmlAttr(PicoStr);
impl HtmlAttr {
/// Intern an HTML attribute string at runtime.
pub fn intern(string: &str) -> StrResult<Self> {
if string.is_empty() {
bail!("attribute name must not be empty");
}
if let Some(c) =
string.chars().find(|&c| !charsets::is_valid_in_attribute_name(c))
{
bail!("the character {} is not valid in an attribute name", c.repr());
}
Ok(Self(PicoStr::intern(string)))
}
/// Creates a compile-time constant `HtmlAttr`.
///
/// Must only be used in const contexts (in a constant definition or
/// explicit `const { .. }` block) because otherwise a panic for a malformed
/// attribute or not auto-internible constant will only be caught at
/// runtime.
#[track_caller]
pub const fn constant(string: &'static str) -> Self {
if string.is_empty() {
panic!("attribute name must not be empty");
}
let bytes = string.as_bytes();
let mut i = 0;
while i < bytes.len() {
if !bytes[i].is_ascii()
|| !charsets::is_valid_in_attribute_name(bytes[i] as char)
{
panic!("not all characters are valid in an attribute name");
}
i += 1;
}
Self(PicoStr::constant(string))
}
/// Resolves the attribute to a string.
pub fn resolve(self) -> ResolvedPicoStr {
self.0.resolve()
}
/// Turns the attribute into its inner interned string.
pub const fn into_inner(self) -> PicoStr {
self.0
}
}
impl Debug for HtmlAttr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Display for HtmlAttr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.resolve())
}
}
cast! {
HtmlAttr,
self => self.0.resolve().as_str().into_value(),
v: Str => Self::intern(&v)?,
}
/// Layouted content that will be embedded into HTML as an SVG.
#[derive(Debug, Clone, Hash)]
pub struct HtmlFrame {
/// The frame that will be displayed as an SVG.
pub inner: Frame,
/// The text size where the frame was defined. This is used to size the
/// frame with em units to make text in and outside of the frame sized
/// consistently.
pub text_size: Abs,
/// An ID to assign to the SVG itself.
pub id: Option<EcoString>,
/// IDs to assign to destination jump points within the SVG.
pub link_points: EcoVec<(Point, EcoString)>,
/// The span from which the frame originated.
pub span: Span,
}
impl HtmlFrame {
/// Wraps a laid-out frame.
pub fn new(inner: Frame, styles: StyleChain, span: Span) -> Self {
Self {
inner,
text_size: styles.resolve(TextElem::size),
id: None,
link_points: EcoVec::new(),
span,
}
}
}
#[cfg(test)]
mod tests {
use typst_library::foundations::Content;
use typst_library::introspection::TagFlags;
use super::*;
use crate::tag;
#[test]
fn test_iter_with_dom_indices() {
let text = |s| HtmlNode::text(s, Span::detached());
let nodes = [
text("A"),
HtmlElement::new(tag::span).into(),
text("hi"),
text(" you"),
HtmlNode::Tag(Tag::Start(
Content::default(),
TagFlags { introspectable: true, tagged: true },
)),
text(" there"),
HtmlElement::new(tag::span).into(),
text(" my"),
text(" friend!"),
];
assert_eq!(
nodes.iter_with_dom_indices().map(|(_, i)| i).collect::<Vec<_>>(),
[0, 1, 2, 2, 2, 2, 3, 4, 4]
);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/rules.rs | crates/typst-html/src/rules.rs | use std::num::NonZeroUsize;
use comemo::{Track, Tracked};
use ecow::{EcoString, EcoVec, eco_format};
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{
Content, Context, NativeElement, NativeRuleMap, Selector, ShowFn, Smart, StyleChain,
Target,
};
use typst_library::introspection::{
Counter, History, Introspect, Introspector, Location, QueryIntrospection,
};
use typst_library::layout::resolve::{Cell, CellGrid, Entry, Header};
use typst_library::layout::{
BlockBody, BlockElem, BoxElem, HElem, OuterVAlignment, Sizing,
};
use typst_library::model::{
Attribution, BibliographyElem, CiteElem, CiteGroup, CslIndentElem, CslLightElem,
Destination, DirectLinkElem, EmphElem, EnumElem, FigureCaption, FigureElem,
FootnoteElem, FootnoteEntry, FootnoteMarker, HeadingElem, LinkElem, LinkTarget,
ListElem, OutlineElem, OutlineEntry, OutlineNode, ParElem, ParbreakElem, QuoteElem,
RefElem, StrongElem, TableCell, TableElem, TermsElem, TitleElem, Works,
};
use typst_library::text::{
HighlightElem, LinebreakElem, OverlineElem, RawElem, RawLine, SmallcapsElem,
SpaceElem, StrikeElem, SubElem, SuperElem, UnderlineElem,
};
use typst_library::visualize::{Color, ImageElem};
use typst_macros::elem;
use typst_syntax::Span;
use typst_utils::singleton;
use crate::{FrameElem, HtmlAttr, HtmlAttrs, HtmlElem, HtmlTag, attr, css, tag};
/// Registers show rules for the [HTML target](Target::Html).
pub fn register(rules: &mut NativeRuleMap) {
use Target::{Html, Paged};
// Model.
rules.register(Html, PAR_RULE);
rules.register(Html, STRONG_RULE);
rules.register(Html, EMPH_RULE);
rules.register(Html, LIST_RULE);
rules.register(Html, ENUM_RULE);
rules.register(Html, TERMS_RULE);
rules.register(Html, LINK_RULE);
rules.register(Html, DIRECT_LINK_RULE);
rules.register(Html, TITLE_RULE);
rules.register(Html, HEADING_RULE);
rules.register(Html, FIGURE_RULE);
rules.register(Html, FIGURE_CAPTION_RULE);
rules.register(Html, QUOTE_RULE);
rules.register(Html, FOOTNOTE_RULE);
rules.register(Html, FOOTNOTE_MARKER_RULE);
rules.register(Html, FOOTNOTE_CONTAINER_RULE);
rules.register(Html, FOOTNOTE_ENTRY_RULE);
rules.register(Html, OUTLINE_RULE);
rules.register(Html, OUTLINE_ENTRY_RULE);
rules.register(Html, REF_RULE);
rules.register(Html, CITE_GROUP_RULE);
rules.register(Html, BIBLIOGRAPHY_RULE);
rules.register(Html, CSL_LIGHT_RULE);
rules.register(Html, CSL_INDENT_RULE);
rules.register(Html, TABLE_RULE);
// Text.
rules.register(Html, SUB_RULE);
rules.register(Html, SUPER_RULE);
rules.register(Html, UNDERLINE_RULE);
rules.register(Html, OVERLINE_RULE);
rules.register(Html, STRIKE_RULE);
rules.register(Html, HIGHLIGHT_RULE);
rules.register(Html, SMALLCAPS_RULE);
rules.register(Html, RAW_RULE);
rules.register(Html, RAW_LINE_RULE);
// Layout.
rules.register(Html, BLOCK_RULE);
rules.register(Html, BOX_RULE);
// Visualize.
rules.register(Html, IMAGE_RULE);
// For the HTML target, `html.frame` is a primitive. In the laid-out target,
// it should be a no-op so that nested frames don't break (things like `show
// math.equation: html.frame` can result in nested ones).
rules.register::<FrameElem>(Paged, |elem, _, _| Ok(elem.body.clone()));
}
const PAR_RULE: ShowFn<ParElem> =
|elem, _, _| Ok(HtmlElem::new(tag::p).with_body(Some(elem.body.clone())).pack());
const STRONG_RULE: ShowFn<StrongElem> =
|elem, _, _| Ok(HtmlElem::new(tag::strong).with_body(Some(elem.body.clone())).pack());
const EMPH_RULE: ShowFn<EmphElem> =
|elem, _, _| Ok(HtmlElem::new(tag::em).with_body(Some(elem.body.clone())).pack());
const LIST_RULE: ShowFn<ListElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::ul)
.with_body(Some(Content::sequence(elem.children.iter().map(|item| {
// Text in wide lists shall always turn into paragraphs.
let mut body = item.body.clone();
if !elem.tight.get(styles) {
body += ParbreakElem::shared();
}
HtmlElem::new(tag::li)
.with_body(Some(body))
.pack()
.spanned(item.span())
}))))
.pack())
};
const ENUM_RULE: ShowFn<EnumElem> = |elem, _, styles| {
let mut ol = HtmlElem::new(tag::ol);
if elem.reversed.get(styles) {
ol = ol.with_attr(attr::reversed, "reversed");
}
if let Some(n) = elem.start.get(styles).custom() {
ol = ol.with_attr(attr::start, eco_format!("{n}"));
}
let body = Content::sequence(elem.children.iter().map(|item| {
let mut li = HtmlElem::new(tag::li);
if let Smart::Custom(nr) = item.number.get(styles) {
li = li.with_attr(attr::value, eco_format!("{nr}"));
}
// Text in wide enums shall always turn into paragraphs.
let mut body = item.body.clone();
if !elem.tight.get(styles) {
body += ParbreakElem::shared();
}
li.with_body(Some(body)).pack().spanned(item.span())
}));
Ok(ol.with_body(Some(body)).pack())
};
const TERMS_RULE: ShowFn<TermsElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::dl)
.with_body(Some(Content::sequence(elem.children.iter().flat_map(|item| {
// Text in wide term lists shall always turn into paragraphs.
let mut description = item.description.clone();
if !elem.tight.get(styles) {
description += ParbreakElem::shared();
}
[
HtmlElem::new(tag::dt)
.with_body(Some(item.term.clone()))
.pack()
.spanned(item.term.span()),
HtmlElem::new(tag::dd)
.with_body(Some(description))
.pack()
.spanned(item.description.span()),
]
}))))
.pack())
};
const LINK_RULE: ShowFn<LinkElem> = |elem, engine, _| {
let span = elem.span();
let dest = elem.dest.resolve(engine, span)?;
let href = match dest {
Destination::Url(url) => Some(url.clone().into_inner()),
Destination::Location(location) => {
let id = engine
.introspect(HtmlIdIntrospection(location, span))
.ok_or("failed to determine link anchor")
.at(span)?;
Some(eco_format!("#{id}"))
}
Destination::Position(_) => {
engine
.sink
.warn(warning!(span, "positional link was ignored during HTML export"));
None
}
};
Ok(HtmlElem::new(tag::a)
.with_optional_attr(attr::href, href)
.with_body(Some(elem.body.clone()))
.pack())
};
/// Resolves the DOM element ID assigned to the linked-to element with the given
/// location.
#[derive(Debug, Clone, PartialEq, Hash)]
struct HtmlIdIntrospection(Location, Span);
impl Introspect for HtmlIdIntrospection {
type Output = Option<EcoString>;
fn introspect(
&self,
_: &mut Engine,
introspector: Tracked<Introspector>,
) -> Self::Output {
introspector.html_id(self.0).cloned()
}
fn diagnose(&self, history: &History<Self::Output>) -> SourceDiagnostic {
let introspector = history.final_introspector();
let what = match introspector.query_first(&Selector::Location(self.0)) {
Some(content) => content.elem().name(),
None => "element",
};
warning!(
self.1,
"HTML element ID assigned to the destination {what} did not stabilize",
)
.with_hint(history.hint("IDs", |id| match id {
Some(id) => id.clone(),
None => "(no ID)".into(),
}))
}
}
const DIRECT_LINK_RULE: ShowFn<DirectLinkElem> = |elem, _, _| {
Ok(LinkElem::new(
LinkTarget::Dest(Destination::Location(elem.loc)),
elem.body.clone(),
)
.pack())
};
const TITLE_RULE: ShowFn<TitleElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::h1)
.with_body(Some(elem.resolve_body(styles).at(elem.span())?))
.pack())
};
const HEADING_RULE: ShowFn<HeadingElem> = |elem, engine, styles| {
let span = elem.span();
let mut realized = elem.body.clone();
if let Some(numbering) = elem.numbering.get_ref(styles).as_ref() {
let location = elem.location().unwrap();
let numbering = Counter::of(HeadingElem::ELEM)
.display_at(engine, location, styles, numbering, span)?
.spanned(span);
realized = numbering + SpaceElem::shared().clone() + realized;
}
// HTML's h1 is closer to a title element. There should only be one.
// Meanwhile, a level 1 Typst heading is a section heading. For this
// reason, levels are offset by one: A Typst level 1 heading becomes
// a `<h2>`.
let level = elem.resolve_level(styles).get();
Ok(if level >= 6 {
engine.sink.warn(warning!(
span,
"heading of level {} was transformed to \
<div role=\"heading\" aria-level=\"{}\">, which is not \
supported by all assistive technology",
level, level + 1;
hint: "HTML only supports <h1> to <h6>, not <h{}>", level + 1;
hint: "you may want to restructure your document so that \
it doesn't contain deep headings";
));
HtmlElem::new(tag::div)
.with_body(Some(realized))
.with_attr(attr::role, "heading")
.with_attr(attr::aria_level, eco_format!("{}", level + 1))
.pack()
} else {
let t = [tag::h2, tag::h3, tag::h4, tag::h5, tag::h6][level - 1];
HtmlElem::new(t).with_body(Some(realized)).pack()
})
};
const FIGURE_RULE: ShowFn<FigureElem> = |elem, _, styles| {
let span = elem.span();
let mut realized = elem.body.clone();
// Build the caption, if any.
if let Some(caption) = elem.caption.get_cloned(styles) {
realized = match caption.position.get(styles) {
OuterVAlignment::Top => caption.pack() + realized,
OuterVAlignment::Bottom => realized + caption.pack(),
};
}
// Ensure that the body is considered a paragraph.
realized += ParbreakElem::shared().clone().spanned(span);
Ok(HtmlElem::new(tag::figure).with_body(Some(realized)).pack())
};
const FIGURE_CAPTION_RULE: ShowFn<FigureCaption> = |elem, engine, styles| {
Ok(HtmlElem::new(tag::figcaption)
.with_body(Some(elem.realize(engine, styles)?))
.pack())
};
const QUOTE_RULE: ShowFn<QuoteElem> = |elem, _, styles| {
let span = elem.span();
let block = elem.block.get(styles);
let mut realized = elem.body.clone();
if elem.quotes.get(styles).unwrap_or(!block) {
realized = QuoteElem::quoted(realized, styles);
}
let attribution = elem.attribution.get_ref(styles);
if block {
let mut blockquote = HtmlElem::new(tag::blockquote).with_body(Some(realized));
if let Some(Attribution::Content(attribution)) = attribution
&& let Some(link) = attribution.to_packed::<LinkElem>()
&& let LinkTarget::Dest(Destination::Url(url)) = &link.dest
{
blockquote = blockquote.with_attr(attr::cite, url.clone().into_inner());
}
realized = blockquote.pack().spanned(span);
if let Some(attribution) = attribution.as_ref() {
realized += attribution.realize(span);
}
} else if let Some(Attribution::Label(label)) = attribution {
realized += SpaceElem::shared().clone();
realized += CiteElem::new(*label).pack().spanned(span);
}
Ok(realized)
};
const FOOTNOTE_RULE: ShowFn<FootnoteElem> = |elem, engine, styles| {
let span = elem.span();
let (dest, num) = elem.realize(engine, styles)?;
let sup = SuperElem::new(num).pack().spanned(span);
// Link to the footnote entry.
let link = LinkElem::new(dest.into(), sup)
.pack()
.styled(HtmlElem::role.set(Some("doc-noteref".into())));
// Indicates the presence of a default footnote rule to emit an error when
// no footnote container is available.
let marker = FootnoteMarker::new().pack().spanned(span);
Ok(HElem::hole().clone() + link + marker)
};
/// This is inserted at the end of the body to display footnotes. In the future,
/// we can expose this to allow customizing where the footnotes appear. It could
/// also be exposed for paged export.
#[elem]
pub struct FootnoteContainer {}
impl FootnoteContainer {
/// Get the globally shared footnote container element.
pub fn shared() -> &'static Content {
singleton!(Content, FootnoteContainer::new().pack())
}
/// Fails with an error if there are footnotes.
pub fn unsupported_with_custom_dom(engine: &mut Engine) -> SourceResult<()> {
let markers = engine.introspect(QueryIntrospection(
FootnoteMarker::ELEM.select(),
Span::detached(),
));
if markers.is_empty() {
return Ok(());
}
Err(markers
.iter()
.map(|marker| {
error!(
marker.span(),
"footnotes are not currently supported in combination \
with a custom `<html>` or `<body>` element";
hint: "you can still use footnotes with a custom footnote show rule";
)
})
.collect())
}
}
const FOOTNOTE_MARKER_RULE: ShowFn<FootnoteMarker> = |_, _, _| Ok(Content::empty());
const FOOTNOTE_CONTAINER_RULE: ShowFn<FootnoteContainer> = |elem, engine, _| {
let notes =
engine.introspect(QueryIntrospection(FootnoteElem::ELEM.select(), elem.span()));
if notes.is_empty() {
return Ok(Content::empty());
}
// Create entries for all footnotes in the document.
let items = notes.into_iter().filter_map(|note| {
let note = note.into_packed::<FootnoteElem>().unwrap();
if note.is_ref() {
return None;
}
let loc = note.location().unwrap();
let span = note.span();
Some(
HtmlElem::new(tag::li)
.with_body(Some(FootnoteEntry::new(note).pack().spanned(span)))
.with_parent(loc)
.pack()
.located(loc.variant(1))
.spanned(span),
)
});
// There can be multiple footnotes in a container, so they semantically
// represent an ordered list. However, the list is already numbered with the
// footnote superscripts in the DOM, so we turn off CSS' list enumeration.
let list = HtmlElem::new(tag::ol)
.with_styles(css::Properties::new().with("list-style-type", "none"))
.with_body(Some(Content::sequence(items)))
.pack();
// The user may want to style the whole footnote element so we wrap it in an
// additional selectable container. This is also how it's done in the ARIA
// spec (although there, the section also contains an additional heading).
Ok(HtmlElem::new(tag::section)
.with_attr(attr::role, "doc-endnotes")
.with_body(Some(list))
.pack())
};
const FOOTNOTE_ENTRY_RULE: ShowFn<FootnoteEntry> = |elem, engine, styles| {
let (prefix, body) = elem.realize(engine, styles)?;
// The prefix is a link back to the first footnote reference, so
// `doc-backlink` is the appropriate ARIA role.
let backlink = prefix.styled(HtmlElem::role.set(Some("doc-backlink".into())));
// We do not use the ARIA role `doc-footnote` because it "is only for
// representing individual notes that occur within the body of a work" (see
// <https://www.w3.org/TR/dpub-aria-1.1/#doc-footnote>). Our footnotes more
// appropriately modelled as ARIA endnotes. This is also in line with how
// Pandoc handles footnotes.
Ok(backlink + body)
};
const OUTLINE_RULE: ShowFn<OutlineElem> = |elem, engine, styles| {
fn convert_list(list: Vec<OutlineNode>) -> Content {
// The Digital Publishing ARIA spec also proposed to add
// `role="directory"` to the `<ol>` element, but this role is
// deprecated, so we don't do that. The elements are already easily
// selectable via `nav[role="doc-toc"] ol`.
HtmlElem::new(tag::ol)
.with_styles(css::Properties::new().with("list-style-type", "none"))
.with_body(Some(Content::sequence(list.into_iter().map(convert_node))))
.pack()
}
fn convert_node(node: OutlineNode) -> Content {
let body = if !node.children.is_empty() {
// The `<div>` is not technically necessary, but otherwise it
// auto-wraps in a `<p>`, which results in bad spacing. Perhaps, we
// can remove this in the future. See also:
// <https://github.com/typst/typst/issues/5907>
HtmlElem::new(tag::div).with_body(Some(node.entry.pack())).pack()
+ convert_list(node.children)
} else {
node.entry.pack()
};
HtmlElem::new(tag::li).with_body(Some(body)).pack()
}
let title = elem.realize_title(styles);
let tree = elem.realize_tree(engine, styles)?;
let list = convert_list(tree);
Ok(HtmlElem::new(tag::nav)
.with_attr(attr::role, "doc-toc")
.with_body(Some(title.unwrap_or_default() + list))
.pack())
};
const OUTLINE_ENTRY_RULE: ShowFn<OutlineEntry> = |elem, engine, styles| {
let span = elem.span();
let context = Context::new(None, Some(styles));
let mut realized = elem.body().at(span)?;
if let Some(prefix) = elem.prefix(engine, context.track(), span)? {
let wrapped = HtmlElem::new(tag::span)
.with_attr(attr::class, "prefix")
.with_body(Some(prefix))
.pack()
.spanned(span);
let separator = match elem.element.to_packed::<FigureElem>() {
Some(elem) => elem.resolve_separator(styles),
None => SpaceElem::shared().clone(),
};
realized = Content::sequence([wrapped, separator, realized]);
}
let loc = elem.element_location().at(span)?;
let dest = Destination::Location(loc);
Ok(LinkElem::new(dest.into(), realized).pack())
};
const REF_RULE: ShowFn<RefElem> = |elem, engine, styles| elem.realize(engine, styles);
const CITE_GROUP_RULE: ShowFn<CiteGroup> = |elem, engine, _| {
Ok(elem
.realize(engine)?
.styled(HtmlElem::role.set(Some("doc-biblioref".into()))))
};
// For the bibliography, we have a few elements that should be styled (e.g.
// indent), but inline styles are not apprioriate because they couldn't be
// properly overridden. For those, we currently emit classes so that a user can
// style them with CSS, but do not emit any styles ourselves.
const BIBLIOGRAPHY_RULE: ShowFn<BibliographyElem> = |elem, engine, styles| {
let span = elem.span();
let works = Works::with_bibliography(engine, elem.clone())?;
let references = works.references(elem, styles)?;
let items = references.iter().map(|(prefix, reference, loc)| {
let mut realized = reference.clone();
if let Some(mut prefix) = prefix.clone() {
// If we have a link back to the first citation referencing this
// entry, attach the appropriate role.
if prefix.is::<DirectLinkElem>() {
prefix = prefix.set(HtmlElem::role, Some("doc-backlink".into()));
}
let wrapped = HtmlElem::new(tag::span)
.with_attr(attr::class, "prefix")
.with_body(Some(prefix))
.pack()
.spanned(span);
let separator = SpaceElem::shared().clone();
realized = Content::sequence([wrapped, separator, realized]);
}
HtmlElem::new(tag::li)
.with_body(Some(realized))
.pack()
.located(*loc)
.spanned(span)
});
let title = elem.realize_title(styles);
let list = HtmlElem::new(tag::ul)
.with_styles(css::Properties::new().with("list-style-type", "none"))
.with_body(Some(Content::sequence(items)))
.pack()
.spanned(span);
Ok(HtmlElem::new(tag::section)
.with_attr(attr::role, "doc-bibliography")
.with_optional_attr(attr::class, works.hanging_indent.then_some("hanging-indent"))
.with_body(Some(title.unwrap_or_default() + list))
.pack())
};
const CSL_LIGHT_RULE: ShowFn<CslLightElem> = |elem, _, _| {
Ok(HtmlElem::new(tag::span)
.with_attr(attr::class, "light")
.with_body(Some(elem.body.clone()))
.pack())
};
const CSL_INDENT_RULE: ShowFn<CslIndentElem> = |elem, _, _| {
Ok(HtmlElem::new(tag::div)
.with_attr(attr::class, "indent")
.with_body(Some(elem.body.clone()))
.pack())
};
const TABLE_RULE: ShowFn<TableElem> = |elem, _, styles| {
let grid = elem.grid.as_ref().unwrap();
Ok(show_cellgrid(grid, styles))
};
fn show_cellgrid(grid: &CellGrid, styles: StyleChain) -> Content {
let elem = |tag, body| HtmlElem::new(tag).with_body(Some(body)).pack();
let mut rows: Vec<_> = grid.entries.chunks(grid.non_gutter_column_count()).collect();
let tr = |tag, row: &[Entry]| {
let row = row
.iter()
.flat_map(|entry| entry.as_cell())
.map(|cell| show_cell(tag, cell, styles));
elem(tag::tr, Content::sequence(row))
};
// TODO(subfooters): similarly to headers, take consecutive footers from
// the end for 'tfoot'.
let footer = grid.footer.as_ref().map(|ft| {
// Convert from gutter to non-gutter coordinates. Use ceil as it might
// include the previous gutter row
// (cf. typst-library/layout/grid/resolve.rs).
let footer_start = if grid.has_gutter { ft.start.div_ceil(2) } else { ft.start };
let rows = rows.drain(footer_start..);
elem(tag::tfoot, Content::sequence(rows.map(|row| tr(tag::td, row))))
});
// Header range converting from gutter (doubled) to non-gutter coordinates.
let header_range = |hd: &Header| {
if grid.has_gutter {
// Use ceil as it might be `2 * row_amount - 1` if the header is at
// the end (cf. typst-library/layout/grid/resolve.rs).
hd.range.start / 2..hd.range.end.div_ceil(2)
} else {
hd.range.clone()
}
};
// Store all consecutive headers at the start in 'thead'. All remaining
// headers are just 'th' rows across the table body.
let mut consecutive_header_end = 0;
let first_mid_table_header = grid
.headers
.iter()
.take_while(|hd| {
let range = header_range(hd);
let is_consecutive = range.start == consecutive_header_end;
consecutive_header_end = range.end;
is_consecutive
})
.count();
let (y_offset, header) = if first_mid_table_header > 0 {
let removed_header_rows =
header_range(grid.headers.get(first_mid_table_header - 1).unwrap()).end;
let rows = rows.drain(..removed_header_rows);
(
removed_header_rows,
Some(elem(tag::thead, Content::sequence(rows.map(|row| tr(tag::th, row))))),
)
} else {
(0, None)
};
// TODO: Consider improving accessibility properties of multi-level headers
// inside tables in the future, e.g. indicating which columns they are
// relative to and so on. See also:
// https://www.w3.org/WAI/tutorials/tables/multi-level/
let mut next_header = first_mid_table_header;
let mut body =
Content::sequence(rows.into_iter().enumerate().map(|(relative_y, row)| {
let y = relative_y + y_offset;
if let Some(current_header_range) =
grid.headers.get(next_header).map(|h| header_range(h))
&& current_header_range.contains(&y)
{
if y + 1 == current_header_range.end {
next_header += 1;
}
tr(tag::th, row)
} else {
tr(tag::td, row)
}
}));
if header.is_some() || footer.is_some() {
body = elem(tag::tbody, body);
}
let content = header.into_iter().chain(core::iter::once(body)).chain(footer);
elem(tag::table, Content::sequence(content))
}
fn show_cell(tag: HtmlTag, cell: &Cell, styles: StyleChain) -> Content {
let cell = cell.body.clone();
let Some(cell) = cell.to_packed::<TableCell>() else { return cell };
let mut attrs = HtmlAttrs::new();
let span = |n: NonZeroUsize| (n != NonZeroUsize::MIN).then(|| n.to_string());
if let Some(colspan) = span(cell.colspan.get(styles)) {
attrs.push(attr::colspan, colspan);
}
if let Some(rowspan) = span(cell.rowspan.get(styles)) {
attrs.push(attr::rowspan, rowspan);
}
HtmlElem::new(tag)
.with_body(Some(cell.body.clone()))
.with_attrs(attrs)
.pack()
.spanned(cell.span())
}
const SUB_RULE: ShowFn<SubElem> =
|elem, _, _| Ok(HtmlElem::new(tag::sub).with_body(Some(elem.body.clone())).pack());
const SUPER_RULE: ShowFn<SuperElem> =
|elem, _, _| Ok(HtmlElem::new(tag::sup).with_body(Some(elem.body.clone())).pack());
const UNDERLINE_RULE: ShowFn<UnderlineElem> = |elem, _, _| {
// Note: In modern HTML, `<u>` is not the underline element, but
// rather an "Unarticulated Annotation" element (see HTML spec
// 4.5.22). Using `text-decoration` instead is recommended by MDN.
Ok(HtmlElem::new(tag::span)
.with_attr(attr::style, "text-decoration: underline")
.with_body(Some(elem.body.clone()))
.pack())
};
const OVERLINE_RULE: ShowFn<OverlineElem> = |elem, _, _| {
Ok(HtmlElem::new(tag::span)
.with_attr(attr::style, "text-decoration: overline")
.with_body(Some(elem.body.clone()))
.pack())
};
const STRIKE_RULE: ShowFn<StrikeElem> =
|elem, _, _| Ok(HtmlElem::new(tag::s).with_body(Some(elem.body.clone())).pack());
const HIGHLIGHT_RULE: ShowFn<HighlightElem> =
|elem, _, _| Ok(HtmlElem::new(tag::mark).with_body(Some(elem.body.clone())).pack());
const SMALLCAPS_RULE: ShowFn<SmallcapsElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::span)
.with_attr(
attr::style,
if elem.all.get(styles) {
"font-variant-caps: all-small-caps"
} else {
"font-variant-caps: small-caps"
},
)
.with_body(Some(elem.body.clone()))
.pack())
};
const RAW_RULE: ShowFn<RawElem> = |elem, _, styles| {
let lines = elem.lines.as_deref().unwrap_or_default();
let mut seq = EcoVec::with_capacity((2 * lines.len()).saturating_sub(1));
for (i, line) in lines.iter().enumerate() {
if i != 0 {
seq.push(LinebreakElem::shared().clone());
}
seq.push(line.clone().pack());
}
let lang = elem.lang.get_ref(styles);
let code = HtmlElem::new(tag::code)
.with_optional_attr(const { HtmlAttr::constant("data-lang") }, lang.clone())
.with_body(Some(Content::sequence(seq)))
.pack()
.spanned(elem.span());
Ok(if elem.block.get(styles) {
HtmlElem::new(tag::pre).with_body(Some(code)).pack()
} else {
code
})
};
/// This is used by `RawElem::synthesize` through a routine.
///
/// It's a temporary workaround until `TextElem::fill` is supported in HTML
/// export.
#[doc(hidden)]
pub fn html_span_filled(content: Content, color: Color) -> Content {
let span = content.span();
HtmlElem::new(tag::span)
.with_styles(css::Properties::new().with("color", css::color(color)))
.with_body(Some(content))
.pack()
.spanned(span)
}
const RAW_LINE_RULE: ShowFn<RawLine> = |elem, _, _| Ok(elem.body.clone());
// TODO: This is rather incomplete.
const BLOCK_RULE: ShowFn<BlockElem> = |elem, _, styles| {
let body = match elem.body.get_cloned(styles) {
None => None,
Some(BlockBody::Content(body)) => Some(body),
// These are only generated by native `typst-layout` show rules.
Some(BlockBody::SingleLayouter(_) | BlockBody::MultiLayouter(_)) => {
bail!(
elem.span(),
"blocks with layout routines should not occur in \
HTML export – this is a bug";
)
}
};
Ok(HtmlElem::new(tag::div).with_body(body).pack())
};
// TODO: This is rather incomplete.
const BOX_RULE: ShowFn<BoxElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::span)
.with_styles(css::Properties::new().with("display", "inline-block"))
.with_body(elem.body.get_cloned(styles))
.pack())
};
const IMAGE_RULE: ShowFn<ImageElem> = |elem, engine, styles| {
let image = elem.decode(engine, styles)?;
let mut attrs = HtmlAttrs::new();
attrs.push(attr::src, typst_svg::convert_image_to_base64_url(&image));
if let Some(alt) = elem.alt.get_cloned(styles) {
attrs.push(attr::alt, alt);
}
let mut inline = css::Properties::new();
// TODO: Exclude in semantic profile.
if let Some(value) = typst_svg::convert_image_scaling(image.scaling()) {
inline.push("image-rendering", value);
}
// TODO: Exclude in semantic profile?
match elem.width.get(styles) {
Smart::Auto => {}
Smart::Custom(rel) => inline.push("width", css::rel(rel)),
}
// TODO: Exclude in semantic profile?
match elem.height.get(styles) {
Sizing::Auto => {}
Sizing::Rel(rel) => inline.push("height", css::rel(rel)),
Sizing::Fr(_) => {}
}
Ok(HtmlElem::new(tag::img).with_attrs(attrs).with_styles(inline).pack())
};
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-html/src/tag.rs | crates/typst-html/src/tag.rs | //! Predefined constants for HTML tags.
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
use crate::HtmlTag;
pub const a: HtmlTag = HtmlTag::constant("a");
pub const abbr: HtmlTag = HtmlTag::constant("abbr");
pub const address: HtmlTag = HtmlTag::constant("address");
pub const area: HtmlTag = HtmlTag::constant("area");
pub const article: HtmlTag = HtmlTag::constant("article");
pub const aside: HtmlTag = HtmlTag::constant("aside");
pub const audio: HtmlTag = HtmlTag::constant("audio");
pub const b: HtmlTag = HtmlTag::constant("b");
pub const base: HtmlTag = HtmlTag::constant("base");
pub const bdi: HtmlTag = HtmlTag::constant("bdi");
pub const bdo: HtmlTag = HtmlTag::constant("bdo");
pub const blockquote: HtmlTag = HtmlTag::constant("blockquote");
pub const body: HtmlTag = HtmlTag::constant("body");
pub const br: HtmlTag = HtmlTag::constant("br");
pub const button: HtmlTag = HtmlTag::constant("button");
pub const canvas: HtmlTag = HtmlTag::constant("canvas");
pub const caption: HtmlTag = HtmlTag::constant("caption");
pub const cite: HtmlTag = HtmlTag::constant("cite");
pub const code: HtmlTag = HtmlTag::constant("code");
pub const col: HtmlTag = HtmlTag::constant("col");
pub const colgroup: HtmlTag = HtmlTag::constant("colgroup");
pub const data: HtmlTag = HtmlTag::constant("data");
pub const datalist: HtmlTag = HtmlTag::constant("datalist");
pub const dd: HtmlTag = HtmlTag::constant("dd");
pub const del: HtmlTag = HtmlTag::constant("del");
pub const details: HtmlTag = HtmlTag::constant("details");
pub const dfn: HtmlTag = HtmlTag::constant("dfn");
pub const dialog: HtmlTag = HtmlTag::constant("dialog");
pub const div: HtmlTag = HtmlTag::constant("div");
pub const dl: HtmlTag = HtmlTag::constant("dl");
pub const dt: HtmlTag = HtmlTag::constant("dt");
pub const em: HtmlTag = HtmlTag::constant("em");
pub const embed: HtmlTag = HtmlTag::constant("embed");
pub const fieldset: HtmlTag = HtmlTag::constant("fieldset");
pub const figcaption: HtmlTag = HtmlTag::constant("figcaption");
pub const figure: HtmlTag = HtmlTag::constant("figure");
pub const footer: HtmlTag = HtmlTag::constant("footer");
pub const form: HtmlTag = HtmlTag::constant("form");
pub const h1: HtmlTag = HtmlTag::constant("h1");
pub const h2: HtmlTag = HtmlTag::constant("h2");
pub const h3: HtmlTag = HtmlTag::constant("h3");
pub const h4: HtmlTag = HtmlTag::constant("h4");
pub const h5: HtmlTag = HtmlTag::constant("h5");
pub const h6: HtmlTag = HtmlTag::constant("h6");
pub const head: HtmlTag = HtmlTag::constant("head");
pub const header: HtmlTag = HtmlTag::constant("header");
pub const hgroup: HtmlTag = HtmlTag::constant("hgroup");
pub const hr: HtmlTag = HtmlTag::constant("hr");
pub const html: HtmlTag = HtmlTag::constant("html");
pub const i: HtmlTag = HtmlTag::constant("i");
pub const iframe: HtmlTag = HtmlTag::constant("iframe");
pub const img: HtmlTag = HtmlTag::constant("img");
pub const input: HtmlTag = HtmlTag::constant("input");
pub const ins: HtmlTag = HtmlTag::constant("ins");
pub const kbd: HtmlTag = HtmlTag::constant("kbd");
pub const label: HtmlTag = HtmlTag::constant("label");
pub const legend: HtmlTag = HtmlTag::constant("legend");
pub const li: HtmlTag = HtmlTag::constant("li");
pub const link: HtmlTag = HtmlTag::constant("link");
pub const main: HtmlTag = HtmlTag::constant("main");
pub const map: HtmlTag = HtmlTag::constant("map");
pub const mark: HtmlTag = HtmlTag::constant("mark");
pub const menu: HtmlTag = HtmlTag::constant("menu");
pub const meta: HtmlTag = HtmlTag::constant("meta");
pub const meter: HtmlTag = HtmlTag::constant("meter");
pub const nav: HtmlTag = HtmlTag::constant("nav");
pub const noscript: HtmlTag = HtmlTag::constant("noscript");
pub const object: HtmlTag = HtmlTag::constant("object");
pub const ol: HtmlTag = HtmlTag::constant("ol");
pub const optgroup: HtmlTag = HtmlTag::constant("optgroup");
pub const option: HtmlTag = HtmlTag::constant("option");
pub const output: HtmlTag = HtmlTag::constant("output");
pub const p: HtmlTag = HtmlTag::constant("p");
pub const picture: HtmlTag = HtmlTag::constant("picture");
pub const pre: HtmlTag = HtmlTag::constant("pre");
pub const progress: HtmlTag = HtmlTag::constant("progress");
pub const q: HtmlTag = HtmlTag::constant("q");
pub const rp: HtmlTag = HtmlTag::constant("rp");
pub const rt: HtmlTag = HtmlTag::constant("rt");
pub const ruby: HtmlTag = HtmlTag::constant("ruby");
pub const s: HtmlTag = HtmlTag::constant("s");
pub const samp: HtmlTag = HtmlTag::constant("samp");
pub const script: HtmlTag = HtmlTag::constant("script");
pub const search: HtmlTag = HtmlTag::constant("search");
pub const section: HtmlTag = HtmlTag::constant("section");
pub const select: HtmlTag = HtmlTag::constant("select");
pub const slot: HtmlTag = HtmlTag::constant("slot");
pub const small: HtmlTag = HtmlTag::constant("small");
pub const source: HtmlTag = HtmlTag::constant("source");
pub const span: HtmlTag = HtmlTag::constant("span");
pub const strong: HtmlTag = HtmlTag::constant("strong");
pub const style: HtmlTag = HtmlTag::constant("style");
pub const sub: HtmlTag = HtmlTag::constant("sub");
pub const summary: HtmlTag = HtmlTag::constant("summary");
pub const sup: HtmlTag = HtmlTag::constant("sup");
pub const table: HtmlTag = HtmlTag::constant("table");
pub const tbody: HtmlTag = HtmlTag::constant("tbody");
pub const td: HtmlTag = HtmlTag::constant("td");
pub const template: HtmlTag = HtmlTag::constant("template");
pub const textarea: HtmlTag = HtmlTag::constant("textarea");
pub const tfoot: HtmlTag = HtmlTag::constant("tfoot");
pub const th: HtmlTag = HtmlTag::constant("th");
pub const thead: HtmlTag = HtmlTag::constant("thead");
pub const time: HtmlTag = HtmlTag::constant("time");
pub const title: HtmlTag = HtmlTag::constant("title");
pub const tr: HtmlTag = HtmlTag::constant("tr");
pub const track: HtmlTag = HtmlTag::constant("track");
pub const u: HtmlTag = HtmlTag::constant("u");
pub const ul: HtmlTag = HtmlTag::constant("ul");
pub const var: HtmlTag = HtmlTag::constant("var");
pub const video: HtmlTag = HtmlTag::constant("video");
pub const wbr: HtmlTag = HtmlTag::constant("wbr");
// HTML spec § 13.1.2 Elements
/// Whether this is a void tag whose associated element may not have
/// children.
pub fn is_void(tag: HtmlTag) -> bool {
matches!(
tag,
self::area
| self::base
| self::br
| self::col
| self::embed
| self::hr
| self::img
| self::input
| self::link
| self::meta
| self::source
| self::track
| self::wbr
)
}
/// Whether this is a tag containing raw text.
pub fn is_raw(tag: HtmlTag) -> bool {
matches!(tag, self::script | self::style)
}
/// Whether this is a tag containing escapable raw text.
pub fn is_escapable_raw(tag: HtmlTag) -> bool {
matches!(tag, self::textarea | self::title)
}
// HTML spec § 3.2.5.2 Kinds of content
/// Whether an element is considered metadata content.
pub fn is_metadata_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::base
| self::link
| self::meta
| self::noscript
| self::script
| self::style
| self::template
| self::title
)
}
/// Wether an element is considered flow content.
pub fn is_flow_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::a
| self::abbr
| self::address
| self::area
| self::article
| self::aside
| self::audio
| self::b
| self::bdi
| self::bdo
| self::blockquote
| self::br
| self::button
| self::canvas
| self::cite
| self::code
| self::data
| self::datalist
| self::del
| self::details
| self::dfn
| self::dialog
| self::div
| self::dl
| self::em
| self::embed
| self::fieldset
| self::figure
| self::footer
| self::form
| self::h1
| self::h2
| self::h3
| self::h4
| self::h5
| self::h6
| self::header
| self::hgroup
| self::hr
| self::i
| self::iframe
| self::img
| self::input
| self::ins
| self::kbd
| self::label
| self::link
| self::main
| self::map
| self::mark
| self::menu
| self::meta
| self::meter
| self::nav
| self::noscript
| self::object
| self::ol
| self::output
| self::p
| self::picture
| self::pre
| self::progress
| self::q
| self::ruby
| self::s
| self::samp
| self::script
| self::search
| self::section
| self::select
| self::slot
| self::small
| self::span
| self::strong
| self::sub
| self::sup
| self::table
| self::template
| self::textarea
| self::time
| self::u
| self::ul
| self::var
| self::video
| self::wbr
)
}
/// Whether an element is considered sectioning content.
pub fn is_sectioning_content(tag: HtmlTag) -> bool {
matches!(tag, self::article | self::aside | self::nav | self::section)
}
/// Whether an element is considered heading content.
pub fn is_heading_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::h1 | self::h2 | self::h3 | self::h4 | self::h5 | self::h6 | self::hgroup
)
}
/// Whether an element is considered phrasing content.
pub fn is_phrasing_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::a
| self::abbr
| self::area
| self::audio
| self::b
| self::bdi
| self::bdo
| self::br
| self::button
| self::canvas
| self::cite
| self::code
| self::data
| self::datalist
| self::del
| self::dfn
| self::em
| self::embed
| self::i
| self::iframe
| self::img
| self::input
| self::ins
| self::kbd
| self::label
| self::link
| self::map
| self::mark
| self::meta
| self::meter
| self::noscript
| self::object
| self::output
| self::picture
| self::progress
| self::q
| self::ruby
| self::s
| self::samp
| self::script
| self::select
| self::slot
| self::small
| self::span
| self::strong
| self::sub
| self::sup
| self::template
| self::textarea
| self::time
| self::u
| self::var
| self::video
| self::wbr
)
}
/// Whether an element is considered embedded content.
pub fn is_embedded_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::audio
| self::canvas
| self::embed
| self::iframe
| self::img
| self::object
| self::picture
| self::video
)
}
/// Whether an element is considered interactive content.
pub fn is_interactive_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::a
| self::audio
| self::button
| self::details
| self::embed
| self::iframe
| self::img
| self::input
| self::label
| self::select
| self::textarea
| self::video
)
}
/// Whether an element is considered palpable content.
pub fn is_palpable_content(tag: HtmlTag) -> bool {
matches!(
tag,
self::a
| self::abbr
| self::address
| self::article
| self::aside
| self::audio
| self::b
| self::bdi
| self::bdo
| self::blockquote
| self::button
| self::canvas
| self::cite
| self::code
| self::data
| self::del
| self::details
| self::dfn
| self::div
| self::dl
| self::em
| self::embed
| self::fieldset
| self::figure
| self::footer
| self::form
| self::h1
| self::h2
| self::h3
| self::h4
| self::h5
| self::h6
| self::header
| self::hgroup
| self::i
| self::iframe
| self::img
| self::input
| self::ins
| self::kbd
| self::label
| self::main
| self::map
| self::mark
| self::menu
| self::meter
| self::nav
| self::object
| self::ol
| self::output
| self::p
| self::picture
| self::pre
| self::progress
| self::q
| self::ruby
| self::s
| self::samp
| self::search
| self::section
| self::select
| self::small
| self::span
| self::strong
| self::sub
| self::sup
| self::table
| self::textarea
| self::time
| self::u
| self::ul
| self::var
| self::video
)
}
/// Whether an element is considered a script-supporting element.
pub fn is_script_supporting_element(tag: HtmlTag) -> bool {
matches!(tag, self::script | self::template)
}
// Defaults of the CSS `display` property.
/// Whether nodes with the tag have the CSS property `display: block` by
/// default.
pub fn is_block_by_default(tag: HtmlTag) -> bool {
matches!(
tag,
self::html
| self::head
| self::body
| self::article
| self::aside
| self::h1
| self::h2
| self::h3
| self::h4
| self::h5
| self::h6
| self::hgroup
| self::nav
| self::section
| self::dd
| self::dl
| self::dt
| self::menu
| self::ol
| self::ul
| self::address
| self::blockquote
| self::dialog
| self::div
| self::fieldset
| self::figure
| self::figcaption
| self::footer
| self::form
| self::header
| self::hr
| self::legend
| self::main
| self::p
| self::pre
| self::search
)
}
/// Whether the element is inline-level as opposed to being block-level.
///
/// Not sure whether this distinction really makes sense. But we somehow
/// need to decide what to put into automatic paragraphs. A `<strong>`
/// should merged into a paragraph created by realization, but a `<div>`
/// shouldn't.
///
/// <https://www.w3.org/TR/html401/struct/global.html#block-inline>
/// <https://developer.mozilla.org/en-US/docs/Glossary/Inline-level_content>
/// <https://github.com/orgs/mdn/discussions/353>
pub fn is_inline_by_default(tag: HtmlTag) -> bool {
matches!(
tag,
self::abbr
| self::a
| self::bdi
| self::b
| self::br
| self::bdo
| self::code
| self::cite
| self::dfn
| self::data
| self::i
| self::em
| self::mark
| self::kbd
| self::rp
| self::q
| self::ruby
| self::rt
| self::samp
| self::s
| self::span
| self::small
| self::sub
| self::strong
| self::time
| self::sup
| self::var
| self::u
)
}
/// Whether nodes with the tag have the CSS property `display: table(-.*)?`
/// by default.
pub fn is_tabular_by_default(tag: HtmlTag) -> bool {
matches!(
tag,
self::table
| self::thead
| self::tbody
| self::tfoot
| self::tr
| self::th
| self::td
| self::caption
| self::col
| self::colgroup
)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-kit/src/lib.rs | crates/typst-kit/src/lib.rs | //! Typst-kit contains various default implementations of functionality used in
//! typst-cli. It is intended as a single source of truth for things like font
//! searching, package downloads and more. Each component of typst-kit is
//! optional, but enabled by default.
//!
//! # Components
//! - [fonts] contains a default implementation for searching local and system
//! installed fonts. It is enabled by the `fonts` feature flag, additionally
//! the `embed-fonts` feature can be used to embed the Typst default fonts.
//! - For text: Libertinus Serif, New Computer Modern
//! - For math: New Computer Modern Math
//! - For code: Deja Vu Sans Mono
//! - [download] contains functionality for making simple web requests with
//! status reporting, useful for downloading packages from package registries.
//! It is enabled by the `downloads` feature flag, additionally the
//! `vendor-openssl` can be used on operating systems other than macOS and
//! Windows to vendor OpenSSL when building.
//! - [package] contains package storage and downloading functionality based on
//! [download]. It is enabled by the `packages` feature flag and implies the
//! `downloads` feature flag.
#[cfg(feature = "downloads")]
pub mod download;
#[cfg(feature = "fonts")]
pub mod fonts;
#[cfg(feature = "packages")]
pub mod package;
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-kit/src/download.rs | crates/typst-kit/src/download.rs | // Acknowledgement:
// Closely modelled after rustup's `DownloadTracker`.
// https://github.com/rust-lang/rustup/blob/master/src/cli/download_tracker.rs
//! Helpers for making various web requests with status reporting. These are
//! primarily used for communicating with package registries.
use std::collections::VecDeque;
use std::fmt::Debug;
use std::io::{self, ErrorKind, Read};
use std::path::PathBuf;
use std::sync::Arc;
use std::time::{Duration, Instant};
use ecow::EcoString;
use native_tls::{Certificate, TlsConnector};
use once_cell::sync::OnceCell;
use ureq::Response;
/// Manages progress reporting for downloads.
pub trait Progress {
/// Invoked when a download is started.
fn print_start(&mut self);
/// Invoked repeatedly while a download is ongoing.
fn print_progress(&mut self, state: &DownloadState);
/// Invoked when a download is finished.
fn print_finish(&mut self, state: &DownloadState);
}
/// An implementation of [`Progress`] with no-op reporting, i.e., reporting
/// events are swallowed.
pub struct ProgressSink;
impl Progress for ProgressSink {
fn print_start(&mut self) {}
fn print_progress(&mut self, _: &DownloadState) {}
fn print_finish(&mut self, _: &DownloadState) {}
}
/// The current state of an in progress or finished download.
#[derive(Debug)]
pub struct DownloadState {
/// The expected amount of bytes to download, `None` if the response header
/// was not set.
pub content_len: Option<usize>,
/// The total amount of downloaded bytes until now.
pub total_downloaded: usize,
/// A backlog of the amount of downloaded bytes each second.
pub bytes_per_second: VecDeque<usize>,
/// The download starting instant.
pub start_time: Instant,
}
/// A minimal https client for downloading various resources.
pub struct Downloader {
user_agent: EcoString,
cert_path: Option<PathBuf>,
cert: OnceCell<Certificate>,
}
impl Downloader {
/// Crates a new downloader with the given user agent and no certificate.
pub fn new(user_agent: impl Into<EcoString>) -> Self {
Self {
user_agent: user_agent.into(),
cert_path: None,
cert: OnceCell::new(),
}
}
/// Crates a new downloader with the given user agent and certificate path.
///
/// If the certificate cannot be read it is set to `None`.
pub fn with_path(user_agent: impl Into<EcoString>, cert_path: PathBuf) -> Self {
Self {
user_agent: user_agent.into(),
cert_path: Some(cert_path),
cert: OnceCell::new(),
}
}
/// Crates a new downloader with the given user agent and certificate.
pub fn with_cert(user_agent: impl Into<EcoString>, cert: Certificate) -> Self {
Self {
user_agent: user_agent.into(),
cert_path: None,
cert: OnceCell::with_value(cert),
}
}
/// Returns the certificate this client is using, if a custom certificate
/// is used it is loaded on first access.
///
/// - Returns `None` if `--cert` and `TYPST_CERT` are not set.
/// - Returns `Some(Ok(cert))` if the certificate was loaded successfully.
/// - Returns `Some(Err(err))` if an error occurred while loading the certificate.
pub fn cert(&self) -> Option<io::Result<&Certificate>> {
self.cert_path.as_ref().map(|path| {
self.cert.get_or_try_init(|| {
let pem = std::fs::read(path)?;
Certificate::from_pem(&pem).map_err(io::Error::other)
})
})
}
/// Download binary data from the given url.
#[allow(clippy::result_large_err)]
pub fn download(&self, url: &str) -> Result<ureq::Response, ureq::Error> {
let mut builder = ureq::AgentBuilder::new();
let mut tls = TlsConnector::builder();
// Set user agent.
builder = builder.user_agent(&self.user_agent);
// Get the network proxy config from the environment and apply it.
if let Some(proxy) = env_proxy::for_url_str(url)
.to_url()
.and_then(|url| ureq::Proxy::new(url).ok())
{
builder = builder.proxy(proxy);
}
// Apply a custom CA certificate if present.
if let Some(cert) = self.cert() {
tls.add_root_certificate(cert?.clone());
}
// Configure native TLS.
let connector = tls.build().map_err(io::Error::other)?;
builder = builder.tls_connector(Arc::new(connector));
builder.build().get(url).call()
}
/// Download binary data from the given url and report its progress.
#[allow(clippy::result_large_err)]
pub fn download_with_progress(
&self,
url: &str,
progress: &mut dyn Progress,
) -> Result<Vec<u8>, ureq::Error> {
progress.print_start();
let response = self.download(url)?;
Ok(RemoteReader::from_response(response, progress).download()?)
}
}
impl Debug for Downloader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Downloader")
.field("user_agent", &self.user_agent)
.field("cert_path", &self.cert_path)
.field(
"cert",
&self
.cert
.get()
.map(|_| typst_utils::debug(|f| write!(f, "Certificate(..)"))),
)
.finish()
}
}
/// Keep track of this many download speed samples.
const SAMPLES: usize = 5;
/// A wrapper around [`ureq::Response`] that reads the response body in chunks
/// over a websocket and reports its progress.
struct RemoteReader<'p> {
/// The reader returned by the ureq::Response.
reader: Box<dyn Read + Send + Sync + 'static>,
/// The download state, holding download metadata for progress reporting.
state: DownloadState,
/// The instant at which progress was last reported.
last_progress: Option<Instant>,
/// A trait object used to report download progress.
progress: &'p mut dyn Progress,
}
impl<'p> RemoteReader<'p> {
/// Wraps a [`ureq::Response`] and prepares it for downloading.
///
/// The 'Content-Length' header is used as a size hint for read
/// optimization, if present.
fn from_response(response: Response, progress: &'p mut dyn Progress) -> Self {
let content_len: Option<usize> = response
.header("Content-Length")
.and_then(|header| header.parse().ok());
Self {
reader: response.into_reader(),
last_progress: None,
state: DownloadState {
content_len,
total_downloaded: 0,
bytes_per_second: VecDeque::with_capacity(SAMPLES),
start_time: Instant::now(),
},
progress,
}
}
/// Download the body's content as raw bytes while reporting download
/// progress.
fn download(mut self) -> io::Result<Vec<u8>> {
let mut buffer = vec![0; 8192];
let mut data = match self.state.content_len {
Some(content_len) => Vec::with_capacity(content_len),
None => Vec::with_capacity(8192),
};
let mut downloaded_this_sec = 0;
loop {
let read = match self.reader.read(&mut buffer) {
Ok(0) => break,
Ok(n) => n,
// If the data is not yet ready but will be available eventually
// keep trying until we either get an actual error, receive data
// or an Ok(0).
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return Err(e),
};
data.extend(&buffer[..read]);
let last_printed = match self.last_progress {
Some(prev) => prev,
None => {
let current_time = Instant::now();
self.last_progress = Some(current_time);
current_time
}
};
let elapsed = Instant::now().saturating_duration_since(last_printed);
downloaded_this_sec += read;
self.state.total_downloaded += read;
if elapsed >= Duration::from_secs(1) {
if self.state.bytes_per_second.len() == SAMPLES {
self.state.bytes_per_second.pop_back();
}
self.state.bytes_per_second.push_front(downloaded_this_sec);
downloaded_this_sec = 0;
self.progress.print_progress(&self.state);
self.last_progress = Some(Instant::now());
}
}
self.progress.print_finish(&self.state);
Ok(data)
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-kit/src/fonts.rs | crates/typst-kit/src/fonts.rs | //! Default implementation for searching local and system installed fonts as
//! well as loading embedded default fonts.
//!
//! # Embedded fonts
//! The following fonts are available as embedded fonts via the `embed-fonts`
//! feature flag:
//! - For text: Libertinus Serif, New Computer Modern
//! - For math: New Computer Modern Math
//! - For code: Deja Vu Sans Mono
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
use fontdb::{Database, Source};
use typst_library::foundations::Bytes;
use typst_library::text::{Font, FontBook, FontInfo};
use typst_timing::TimingScope;
use typst_utils::LazyHash;
/// Holds details about the location of a font and lazily the font itself.
#[derive(Debug)]
pub struct FontSlot {
/// The path at which the font can be found on the system.
path: Option<PathBuf>,
/// The index of the font in its collection. Zero if the path does not point
/// to a collection.
index: u32,
/// The lazily loaded font.
font: OnceLock<Option<Font>>,
}
impl FontSlot {
/// Returns the path at which the font can be found on the system, or `None`
/// if the font was embedded.
pub fn path(&self) -> Option<&Path> {
self.path.as_deref()
}
/// Returns the index of the font in its collection. Zero if the path does
/// not point to a collection.
pub fn index(&self) -> u32 {
self.index
}
/// Get the font for this slot. This loads the font into memory on first
/// access.
pub fn get(&self) -> Option<Font> {
self.font
.get_or_init(|| {
let _scope = TimingScope::new("load font");
let data = fs::read(
self.path
.as_ref()
.expect("`path` is not `None` if `font` is uninitialized"),
)
.ok()?;
Font::new(Bytes::new(data), self.index)
})
.clone()
}
}
/// The result of a font search, created by calling [`FontSearcher::search`].
#[derive(Debug)]
pub struct Fonts {
/// Metadata about all discovered fonts.
///
/// Can directly be used in [`World::book`](typst_library::World::book).
pub book: LazyHash<FontBook>,
/// Slots that the fonts are loaded into.
///
/// Assuming your world implementation has a field `fonts: Fonts`, this can
/// be used in [`World::font`](typst_library::World::font) as such:
/// ```ignore
/// fn font(&self, index: usize) -> Option<Font> {
/// self.fonts.slots.get(index)?.get()
/// }
/// ```
pub slots: Vec<FontSlot>,
}
impl Fonts {
/// Creates a new font searcer with the default settings.
pub fn searcher() -> FontSearcher {
FontSearcher::new()
}
}
/// Searches for fonts.
///
/// Fonts are added in the following order (descending priority):
/// 1. Font directories
/// 2. System fonts (if included & enabled)
/// 3. Embedded fonts (if enabled)
#[derive(Debug)]
pub struct FontSearcher {
db: Database,
include_system_fonts: bool,
#[cfg(feature = "embed-fonts")]
include_embedded_fonts: bool,
book: FontBook,
fonts: Vec<FontSlot>,
}
impl FontSearcher {
/// Create a new, empty system searcher. The searcher is created with the
/// default configuration, it will include embedded fonts and system fonts.
pub fn new() -> Self {
Self {
db: Database::new(),
include_system_fonts: true,
#[cfg(feature = "embed-fonts")]
include_embedded_fonts: true,
book: FontBook::new(),
fonts: vec![],
}
}
/// Whether to search for and load system fonts, defaults to `true`.
pub fn include_system_fonts(&mut self, value: bool) -> &mut Self {
self.include_system_fonts = value;
self
}
/// Whether to load embedded fonts, defaults to `true`.
#[cfg(feature = "embed-fonts")]
pub fn include_embedded_fonts(&mut self, value: bool) -> &mut Self {
self.include_embedded_fonts = value;
self
}
/// Start searching for and loading fonts. To additionally load fonts
/// from specific directories, use [`search_with`][Self::search_with].
///
/// # Examples
/// ```no_run
/// # use typst_kit::fonts::FontSearcher;
/// let fonts = FontSearcher::new()
/// .include_system_fonts(true)
/// .search();
/// ```
pub fn search(&mut self) -> Fonts {
self.search_with::<_, &str>([])
}
/// Start searching for and loading fonts, with additional directories.
///
/// # Examples
/// ```no_run
/// # use typst_kit::fonts::FontSearcher;
/// let fonts = FontSearcher::new()
/// .include_system_fonts(true)
/// .search_with(["./assets/fonts/"]);
/// ```
pub fn search_with<I, P>(&mut self, font_dirs: I) -> Fonts
where
I: IntoIterator<Item = P>,
P: AsRef<Path>,
{
// Font paths have highest priority.
for path in font_dirs {
self.db.load_fonts_dir(path);
}
if self.include_system_fonts {
// System fonts have second priority.
self.db.load_system_fonts();
}
for face in self.db.faces() {
let path = match &face.source {
Source::File(path) | Source::SharedFile(path, _) => path,
// We never add binary sources to the database, so there
// shouldn't be any.
Source::Binary(_) => continue,
};
let info = self
.db
.with_face_data(face.id, FontInfo::new)
.expect("database must contain this font");
if let Some(info) = info {
self.book.push(info);
self.fonts.push(FontSlot {
path: Some(path.clone()),
index: face.index,
font: OnceLock::new(),
});
}
}
// Embedded fonts have lowest priority.
#[cfg(feature = "embed-fonts")]
if self.include_embedded_fonts {
self.add_embedded();
}
Fonts {
book: LazyHash::new(std::mem::take(&mut self.book)),
slots: std::mem::take(&mut self.fonts),
}
}
/// Add fonts that are embedded in the binary.
#[cfg(feature = "embed-fonts")]
fn add_embedded(&mut self) {
for data in typst_assets::fonts() {
let buffer = Bytes::new(data);
for (i, font) in Font::iter(buffer).enumerate() {
self.book.push(font.info().clone());
self.fonts.push(FontSlot {
path: None,
index: i as u32,
font: OnceLock::from(Some(font)),
});
}
}
}
}
impl Default for FontSearcher {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-kit/src/package.rs | crates/typst-kit/src/package.rs | //! Download and unpack packages and package indices.
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use ecow::eco_format;
use once_cell::sync::OnceCell;
use serde::Deserialize;
use typst_library::diag::{PackageError, PackageResult, StrResult, bail};
use typst_syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec};
use crate::download::{Downloader, Progress};
/// The default Typst registry.
pub const DEFAULT_REGISTRY: &str = "https://packages.typst.org";
/// The public namespace in the default Typst registry.
pub const DEFAULT_NAMESPACE: &str = "preview";
/// The default packages sub directory within the package and package cache paths.
pub const DEFAULT_PACKAGES_SUBDIR: &str = "typst/packages";
/// Attempts to infer the default package cache directory from the current
/// environment.
///
/// This simply joins [`DEFAULT_PACKAGES_SUBDIR`] to the output of
/// [`dirs::cache_dir`].
pub fn default_package_cache_path() -> Option<PathBuf> {
dirs::cache_dir().map(|cache_dir| cache_dir.join(DEFAULT_PACKAGES_SUBDIR))
}
/// Attempts to infer the default package directory from the current
/// environment.
///
/// This simply joins [`DEFAULT_PACKAGES_SUBDIR`] to the output of
/// [`dirs::data_dir`].
pub fn default_package_path() -> Option<PathBuf> {
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
}
/// Holds information about where packages should be stored and downloads them
/// on demand, if possible.
#[derive(Debug)]
pub struct PackageStorage {
/// The path at which non-local packages should be stored when downloaded.
package_cache_path: Option<PathBuf>,
/// The path at which local packages are stored.
package_path: Option<PathBuf>,
/// The downloader used for fetching the index and packages.
downloader: Downloader,
/// The cached index of the default namespace.
index: OnceCell<Vec<serde_json::Value>>,
}
impl PackageStorage {
/// Creates a new package storage for the given package paths. Falls back to
/// the recommended XDG directories if they are `None`.
pub fn new(
package_cache_path: Option<PathBuf>,
package_path: Option<PathBuf>,
downloader: Downloader,
) -> Self {
Self::with_index(package_cache_path, package_path, downloader, OnceCell::new())
}
/// Creates a new package storage with a pre-defined index.
///
/// Useful for testing.
fn with_index(
package_cache_path: Option<PathBuf>,
package_path: Option<PathBuf>,
downloader: Downloader,
index: OnceCell<Vec<serde_json::Value>>,
) -> Self {
Self {
package_cache_path: package_cache_path.or_else(default_package_cache_path),
package_path: package_path.or_else(default_package_path),
downloader,
index,
}
}
/// Returns the path at which non-local packages should be stored when
/// downloaded.
pub fn package_cache_path(&self) -> Option<&Path> {
self.package_cache_path.as_deref()
}
/// Returns the path at which local packages are stored.
pub fn package_path(&self) -> Option<&Path> {
self.package_path.as_deref()
}
/// Makes a package available on-disk and returns the path at which it is
/// located (will be either in the cache or package directory).
pub fn prepare_package(
&self,
spec: &PackageSpec,
progress: &mut dyn Progress,
) -> PackageResult<PathBuf> {
let subdir = format!("{}/{}/{}", spec.namespace, spec.name, spec.version);
if let Some(packages_dir) = &self.package_path {
let dir = packages_dir.join(&subdir);
if dir.exists() {
return Ok(dir);
}
}
if let Some(cache_dir) = &self.package_cache_path {
let dir = cache_dir.join(&subdir);
if dir.exists() {
return Ok(dir);
}
// Download from network if it doesn't exist yet.
if spec.namespace == DEFAULT_NAMESPACE {
self.download_package(spec, cache_dir, progress)?;
if dir.exists() {
return Ok(dir);
}
}
}
Err(PackageError::NotFound(spec.clone()))
}
/// Tries to determine the latest version of a package.
pub fn determine_latest_version(
&self,
spec: &VersionlessPackageSpec,
) -> StrResult<PackageVersion> {
if spec.namespace == DEFAULT_NAMESPACE {
// For `DEFAULT_NAMESPACE`, download the package index and find the latest
// version.
self.download_index()?
.iter()
.filter_map(|value| MinimalPackageInfo::deserialize(value).ok())
.filter(|package| package.name == spec.name)
.map(|package| package.version)
.max()
.ok_or_else(|| eco_format!("failed to find package {spec}"))
} else {
// For other namespaces, search locally. We only search in the data
// directory and not the cache directory, because the latter is not
// intended for storage of local packages.
let subdir = format!("{}/{}", spec.namespace, spec.name);
self.package_path
.iter()
.flat_map(|dir| std::fs::read_dir(dir.join(&subdir)).ok())
.flatten()
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter_map(|path| path.file_name()?.to_string_lossy().parse().ok())
.max()
.ok_or_else(|| eco_format!("please specify the desired version"))
}
}
/// Download the package index. The result of this is cached for efficiency.
fn download_index(&self) -> StrResult<&[serde_json::Value]> {
self.index
.get_or_try_init(|| {
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
match self.downloader.download(&url) {
Ok(response) => response.into_json().map_err(|err| {
eco_format!("failed to parse package index: {err}")
}),
Err(ureq::Error::Status(404, _)) => {
bail!("failed to fetch package index (not found)")
}
Err(err) => bail!("failed to fetch package index ({err})"),
}
})
.map(AsRef::as_ref)
}
/// Download a package over the network.
///
/// # Panics
/// Panics if the package spec namespace isn't `DEFAULT_NAMESPACE`.
fn download_package(
&self,
spec: &PackageSpec,
cache_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
let url = format!(
"{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/{}-{}.tar.gz",
spec.name, spec.version
);
let data = match self.downloader.download_with_progress(&url, progress) {
Ok(data) => data,
Err(ureq::Error::Status(404, _)) => {
if let Ok(version) = self.determine_latest_version(&spec.versionless()) {
return Err(PackageError::VersionNotFound(spec.clone(), version));
} else {
return Err(PackageError::NotFound(spec.clone()));
}
}
Err(err) => {
return Err(PackageError::NetworkFailed(Some(eco_format!("{err}"))));
}
};
// The directory in which the package's version lives.
let base_dir = cache_dir.join(format!("{}/{}", spec.namespace, spec.name));
// The place at which the specific package version will live in the end.
let package_dir = base_dir.join(format!("{}", spec.version));
// To prevent multiple Typst instances from interfering, we download
// into a temporary directory first and then move this directory to
// its final destination.
//
// In the `rename` function's documentation it is stated:
// > This will not work if the new name is on a different mount point.
//
// By locating the temporary directory directly next to where the
// package directory will live, we are (trying our best) making sure
// that `tempdir` and `package_dir` are on the same mount point.
let tempdir = Tempdir::create(base_dir.join(format!(
".tmp-{}-{}",
spec.version,
fastrand::u32(..),
)))
.map_err(|err| error("failed to create temporary package directory", err))?;
// Decompress the archive into the temporary directory.
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed)
.unpack(&tempdir)
.map_err(|err| PackageError::MalformedArchive(Some(eco_format!("{err}"))))?;
// When trying to move (i.e., `rename`) the directory from one place to
// another and the target/destination directory is empty, then the
// operation will succeed (if it's atomic, or hardware doesn't fail, or
// power doesn't go off, etc.). If however the target directory is not
// empty, i.e., another instance already successfully moved the package,
// then we can safely ignore the `DirectoryNotEmpty` error.
//
// This means that we do not check the integrity of an existing moved
// package, just like we don't check the integrity if the package
// directory already existed in the first place. If situations with
// broken packages still occur even with the rename safeguard, we might
// consider more complex solutions like file locking or checksums.
match fs::rename(&tempdir, &package_dir) {
Ok(()) => Ok(()),
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(err) => Err(error("failed to move downloaded package directory", err)),
}
}
}
/// Minimal information required about a package to determine its latest
/// version.
#[derive(Deserialize)]
struct MinimalPackageInfo {
name: String,
version: PackageVersion,
}
/// A temporary directory that is a automatically cleaned up.
struct Tempdir(PathBuf);
impl Tempdir {
/// Creates a directory at the path and auto-cleans it.
fn create(path: PathBuf) -> io::Result<Self> {
std::fs::create_dir_all(&path)?;
Ok(Self(path))
}
}
impl Drop for Tempdir {
fn drop(&mut self) {
_ = fs::remove_dir_all(&self.0);
}
}
impl AsRef<Path> for Tempdir {
fn as_ref(&self) -> &Path {
&self.0
}
}
/// Enriches an I/O error with a message and turns it into a
/// `PackageError::Other`.
#[cold]
fn error(message: &str, err: io::Error) -> PackageError {
PackageError::Other(Some(eco_format!("{message}: {err}")))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn lazy_deser_index() {
let storage = PackageStorage::with_index(
None,
None,
Downloader::new("typst/test"),
OnceCell::with_value(vec![
serde_json::json!({
"name": "charged-ieee",
"version": "0.1.0",
"entrypoint": "lib.typ",
}),
serde_json::json!({
"name": "unequivocal-ams",
// This version number is currently not valid, so this package
// can't be parsed.
"version": "0.2.0-dev",
"entrypoint": "lib.typ",
}),
]),
);
let ieee_version = storage.determine_latest_version(&VersionlessPackageSpec {
namespace: "preview".into(),
name: "charged-ieee".into(),
});
assert_eq!(ieee_version, Ok(PackageVersion { major: 0, minor: 1, patch: 0 }));
let ams_version = storage.determine_latest_version(&VersionlessPackageSpec {
namespace: "preview".into(),
name: "unequivocal-ams".into(),
});
assert_eq!(
ams_version,
Err("failed to find package @preview/unequivocal-ams".into())
)
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/vm.rs | crates/typst-eval/src/vm.rs | use comemo::Tracked;
use ecow::eco_format;
use typst_library::World;
use typst_library::diag::{HintedString, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{Binding, Context, IntoValue, Scopes, Value};
use typst_syntax::Span;
use typst_syntax::ast::{self, AstNode};
use crate::FlowEvent;
/// A virtual machine.
///
/// Holds the state needed to [evaluate](crate::eval()) Typst sources. A
/// new virtual machine is created for each module evaluation and function call.
pub struct Vm<'a> {
/// The underlying virtual typesetter.
pub engine: Engine<'a>,
/// A control flow event that is currently happening.
pub flow: Option<FlowEvent>,
/// The stack of scopes.
pub scopes: Scopes<'a>,
/// A span that is currently under inspection.
pub inspected: Option<Span>,
/// Data that is contextually made accessible to code behind the scenes.
pub context: Tracked<'a, Context<'a>>,
}
impl<'a> Vm<'a> {
/// Create a new virtual machine.
pub fn new(
engine: Engine<'a>,
context: Tracked<'a, Context<'a>>,
scopes: Scopes<'a>,
target: Span,
) -> Self {
let inspected = target.id().and_then(|id| engine.traced.get(id));
Self { engine, context, flow: None, scopes, inspected }
}
/// Access the underlying world.
pub fn world(&self) -> Tracked<'a, dyn World + 'a> {
self.engine.world
}
/// Bind a value to an identifier.
///
/// This will create a [`Binding`] with the value and the identifier's span.
pub fn define(&mut self, var: ast::Ident, value: impl IntoValue) {
self.bind(var, Binding::new(value, var.span()));
}
/// Insert a binding into the current scope.
///
/// This will insert the value into the top-most scope and make it available
/// for dynamic tracing, assisting IDE functionality.
pub fn bind(&mut self, var: ast::Ident, binding: Binding) {
if self.inspected == Some(var.span()) {
self.trace(binding.read().clone());
}
// This will become an error in the parser if `is` becomes a keyword.
if var.get() == "is" {
self.engine.sink.warn(warning!(
var.span(),
"`is` will likely become a keyword in future versions and will \
not be allowed as an identifier";
hint: "rename this variable to avoid future errors";
hint: "try `is_` instead";
));
}
self.scopes.top.bind(var.get().clone(), binding);
}
/// Trace a value.
#[cold]
pub fn trace(&mut self, value: Value) {
self.engine
.sink
.value(value.clone(), self.context.styles().ok().map(|s| s.to_map()));
}
}
/// Provide a hint if the callee is a shadowed standard library function.
pub fn hint_if_shadowed_std(
vm: &mut Vm,
callee: &ast::Expr,
mut err: HintedString,
) -> HintedString {
if let ast::Expr::Ident(ident) = callee {
let ident = ident.get();
if vm.scopes.check_std_shadowed(ident) {
err.hint(eco_format!(
"use `std.{ident}` to access the shadowed standard library function",
));
}
}
err
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/markup.rs | crates/typst-eval/src/markup.rs | use typst_library::diag::{At, SourceResult, warning};
use typst_library::foundations::{
Content, Label, NativeElement, Repr, Smart, Symbol, Unlabellable, Value,
};
use typst_library::model::{
EmphElem, EnumItem, HeadingElem, LinkElem, ListItem, ParbreakElem, RefElem,
StrongElem, Supplement, TermItem, Url,
};
use typst_library::text::{
LinebreakElem, RawContent, RawElem, SmartQuoteElem, SpaceElem, TextElem,
};
use typst_syntax::ast::{self, AstNode};
use typst_utils::PicoStr;
use crate::{Eval, Vm};
impl Eval for ast::Markup<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
eval_markup(vm, &mut self.exprs())
}
}
/// Evaluate a stream of markup.
fn eval_markup<'a>(
vm: &mut Vm,
exprs: &mut impl Iterator<Item = ast::Expr<'a>>,
) -> SourceResult<Content> {
let flow = vm.flow.take();
let mut seq = Vec::with_capacity(exprs.size_hint().1.unwrap_or_default());
while let Some(expr) = exprs.next() {
match expr {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
}
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
}
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;
}
let tail = eval_markup(vm, exprs)?;
seq.push(tail.styled_with_recipe(&mut vm.engine, vm.context, recipe)?)
}
expr => match expr.eval(vm)? {
Value::Label(label) => {
if let Some(elem) =
seq.iter_mut().rev().find(|node| !node.can::<dyn Unlabellable>())
{
if elem.label().is_some() {
vm.engine.sink.warn(warning!(
elem.span(), "content labelled multiple times";
hint: "only the last label is used, the rest are ignored";
));
}
*elem = std::mem::take(elem).labelled(label);
} else {
vm.engine.sink.warn(warning!(
expr.span(),
"label `{}` is not attached to anything",
label.repr(),
));
}
}
value => seq.push(value.display().spanned(expr.span())),
},
}
if vm.flow.is_some() {
break;
}
}
if flow.is_some() {
vm.flow = flow;
}
Ok(Content::sequence(seq))
}
impl Eval for ast::Text<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(TextElem::packed(self.get().clone()))
}
}
impl Eval for ast::Space<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(SpaceElem::shared().clone())
}
}
impl Eval for ast::Linebreak<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(LinebreakElem::shared().clone())
}
}
impl Eval for ast::Parbreak<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(ParbreakElem::shared().clone())
}
}
impl Eval for ast::Escape<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Symbol(Symbol::runtime_char(self.get())))
}
}
impl Eval for ast::Shorthand<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Symbol(Symbol::runtime_char(self.get())))
}
}
impl Eval for ast::SmartQuote<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(SmartQuoteElem::new().with_double(self.double()).pack())
}
}
impl Eval for ast::Strong<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let body = self.body();
if body.exprs().next().is_none() {
vm.engine.sink.warn(warning!(
self.span(), "no text within stars";
hint: "using multiple consecutive stars (e.g. **) has no additional effect";
));
}
Ok(StrongElem::new(body.eval(vm)?).pack())
}
}
impl Eval for ast::Emph<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let body = self.body();
if body.exprs().next().is_none() {
vm.engine.sink.warn(warning!(
self.span(), "no text within underscores";
hint: "using multiple consecutive underscores (e.g. __) has no \
additional effect";
));
}
Ok(EmphElem::new(body.eval(vm)?).pack())
}
}
impl Eval for ast::Raw<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
let lines = self.lines().map(|line| (line.get().clone(), line.span())).collect();
let mut elem = RawElem::new(RawContent::Lines(lines)).with_block(self.block());
if let Some(lang) = self.lang() {
elem.lang.set(Some(lang.get().clone()));
}
Ok(elem.pack())
}
}
impl Eval for ast::Link<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
let url = Url::new(self.get().clone()).at(self.span())?;
Ok(LinkElem::from_url(url).pack())
}
}
impl Eval for ast::Label<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Label(
Label::new(PicoStr::intern(self.get())).expect("unexpected empty label"),
))
}
}
impl Eval for ast::Ref<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let target = Label::new(PicoStr::intern(self.target()))
.expect("unexpected empty reference");
let mut elem = RefElem::new(target);
if let Some(supplement) = self.supplement() {
elem.supplement
.set(Smart::Custom(Some(Supplement::Content(supplement.eval(vm)?))));
}
Ok(elem.pack())
}
}
impl Eval for ast::Heading<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let depth = self.depth();
let body = self.body().eval(vm)?;
Ok(HeadingElem::new(body).with_depth(depth).pack())
}
}
impl Eval for ast::ListItem<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
Ok(ListItem::new(self.body().eval(vm)?).pack())
}
}
impl Eval for ast::EnumItem<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let body = self.body().eval(vm)?;
let mut elem = EnumItem::new(body);
if let Some(number) = self.number() {
elem.number.set(Smart::Custom(number));
}
Ok(elem.pack())
}
}
impl Eval for ast::TermItem<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let term = self.term().eval(vm)?;
let description = self.description().eval(vm)?;
Ok(TermItem::new(term, description).pack())
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/lib.rs | crates/typst-eval/src/lib.rs | //! Typst's code interpreter.
pub(crate) mod ops;
mod access;
mod binding;
mod call;
mod code;
mod flow;
mod import;
mod markup;
mod math;
mod methods;
mod rules;
mod vm;
pub use self::call::{CapturesVisitor, eval_closure};
pub use self::flow::FlowEvent;
pub use self::import::import;
pub use self::vm::{Vm, hint_if_shadowed_std};
use self::access::*;
use self::binding::*;
use self::methods::*;
use comemo::{Track, Tracked, TrackedMut};
use typst_library::World;
use typst_library::diag::{SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Context, Module, NativeElement, Scope, Scopes, Value};
use typst_library::introspection::Introspector;
use typst_library::math::EquationElem;
use typst_library::routines::Routines;
use typst_syntax::{Source, Span, SyntaxMode, ast, parse, parse_code, parse_math};
use typst_utils::Protected;
/// Evaluate a source file and return the resulting module.
#[comemo::memoize]
#[typst_macros::time(name = "eval", span = source.root().span())]
pub fn eval(
routines: &Routines,
world: Tracked<dyn World + '_>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
source: &Source,
) -> SourceResult<Module> {
// Prevent cyclic evaluation.
let id = source.id();
if route.contains(id) {
panic!("Tried to cyclicly evaluate {:?}", id.vpath());
}
// Prepare the engine.
let introspector = Introspector::default();
let engine = Engine {
routines,
world,
introspector: Protected::new(introspector.track()),
traced,
sink,
route: Route::extend(route).with_id(id),
};
// Prepare VM.
let context = Context::none();
let scopes = Scopes::new(Some(world.library()));
let root = source.root();
let mut vm = Vm::new(engine, context.track(), scopes, root.span());
// Check for well-formedness unless we are in trace mode.
let errors = root.errors();
if !errors.is_empty() && vm.inspected.is_none() {
return Err(errors.into_iter().map(Into::into).collect());
}
// Evaluate the module.
let markup = root.cast::<ast::Markup>().unwrap();
let output = markup.eval(&mut vm)?;
// Handle control flow.
if let Some(flow) = vm.flow {
bail!(flow.forbidden());
}
// Assemble the module.
let name = id
.vpath()
.as_rootless_path()
.file_stem()
.unwrap_or_default()
.to_string_lossy();
Ok(Module::new(name, vm.scopes.top).with_content(output).with_file_id(id))
}
/// Evaluate a string as code and return the resulting value.
///
/// Everything in the output is associated with the given `span`.
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
pub fn eval_string(
routines: &Routines,
world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
introspector: Tracked<Introspector>,
context: Tracked<Context>,
string: &str,
span: Span,
mode: SyntaxMode,
scope: Scope,
) -> SourceResult<Value> {
let mut root = match mode {
SyntaxMode::Code => parse_code(string),
SyntaxMode::Markup => parse(string),
SyntaxMode::Math => parse_math(string),
};
root.synthesize(span);
// Check for well-formedness.
let errors = root.errors();
if !errors.is_empty() {
return Err(errors.into_iter().map(Into::into).collect());
}
// Prepare the engine.
let traced = Traced::default();
let engine = Engine {
routines,
world,
introspector: Protected::new(introspector),
traced: traced.track(),
sink,
route: Route::default(),
};
// Prepare VM.
let scopes = Scopes::new(Some(world.library()));
let mut vm = Vm::new(engine, context, scopes, root.span());
vm.scopes.scopes.push(scope);
// Evaluate the code.
let output = match mode {
SyntaxMode::Code => root.cast::<ast::Code>().unwrap().eval(&mut vm)?,
SyntaxMode::Markup => {
Value::Content(root.cast::<ast::Markup>().unwrap().eval(&mut vm)?)
}
SyntaxMode::Math => Value::Content(
EquationElem::new(root.cast::<ast::Math>().unwrap().eval(&mut vm)?)
.with_block(false)
.pack()
.spanned(span),
),
};
// Handle control flow.
if let Some(flow) = vm.flow {
bail!(flow.forbidden());
}
Ok(output)
}
/// Evaluate an expression.
pub trait Eval {
/// The output of evaluating the expression.
type Output;
/// Evaluate the expression to the output value.
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output>;
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/flow.rs | crates/typst-eval/src/flow.rs | use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{IntoValue, Value, ops};
use typst_syntax::ast::{self, AstNode};
use typst_syntax::{Span, SyntaxKind, SyntaxNode};
use unicode_segmentation::UnicodeSegmentation;
use crate::{Eval, Vm, destructure};
/// The maximum number of loop iterations.
const MAX_ITERATIONS: usize = 10_000;
/// A control flow event that occurred during evaluation.
#[derive(Debug, Clone, PartialEq)]
pub enum FlowEvent {
/// Stop iteration in a loop.
Break(Span),
/// Skip the remainder of the current iteration in a loop.
Continue(Span),
/// Stop execution of a function early, optionally returning an explicit
/// value. The final boolean indicates whether the return was conditional.
Return(Span, Option<Value>, bool),
}
impl FlowEvent {
/// Return an error stating that this control flow is forbidden.
pub fn forbidden(&self) -> SourceDiagnostic {
match *self {
Self::Break(span) => {
error!(span, "cannot break outside of loop")
}
Self::Continue(span) => {
error!(span, "cannot continue outside of loop")
}
Self::Return(span, _, _) => {
error!(span, "cannot return outside of function")
}
}
}
}
impl Eval for ast::Conditional<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let condition = self.condition();
let output = if condition.eval(vm)?.cast::<bool>().at(condition.span())? {
self.if_body().eval(vm)?
} else if let Some(else_body) = self.else_body() {
else_body.eval(vm)?
} else {
Value::None
};
// Mark the return as conditional.
if let Some(FlowEvent::Return(_, _, conditional)) = &mut vm.flow {
*conditional = true;
}
Ok(output)
}
}
impl Eval for ast::WhileLoop<'_> {
type Output = Value;
#[typst_macros::time(name = "while loop", span = self.span())]
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let flow = vm.flow.take();
let mut output = Value::None;
let mut i = 0;
let condition = self.condition();
let body = self.body();
while condition.eval(vm)?.cast::<bool>().at(condition.span())? {
if i == 0
&& is_invariant(condition.to_untyped())
&& !can_diverge(body.to_untyped())
{
bail!(condition.span(), "condition is always true");
} else if i >= MAX_ITERATIONS {
bail!(self.span(), "loop seems to be infinite");
}
let value = body.eval(vm)?;
output = ops::join(output, value).at(body.span())?;
match vm.flow {
Some(FlowEvent::Break(_)) => {
vm.flow = None;
break;
}
Some(FlowEvent::Continue(_)) => vm.flow = None,
Some(FlowEvent::Return(..)) => break,
None => {}
}
i += 1;
}
if flow.is_some() {
vm.flow = flow;
}
// Mark the return as conditional.
if let Some(FlowEvent::Return(_, _, conditional)) = &mut vm.flow {
*conditional = true;
}
Ok(output)
}
}
impl Eval for ast::ForLoop<'_> {
type Output = Value;
#[typst_macros::time(name = "for loop", span = self.span())]
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let flow = vm.flow.take();
let mut output = Value::None;
macro_rules! iter {
(for $pat:ident in $iterable:expr) => {{
vm.scopes.enter();
#[allow(unused_parens)]
for value in $iterable {
destructure(vm, $pat, value.into_value())?;
let body = self.body();
let value = body.eval(vm)?;
output = ops::join(output, value).at(body.span())?;
match vm.flow {
Some(FlowEvent::Break(_)) => {
vm.flow = None;
break;
}
Some(FlowEvent::Continue(_)) => vm.flow = None,
Some(FlowEvent::Return(..)) => break,
None => {}
}
}
vm.scopes.exit();
}};
}
let pattern = self.pattern();
let iterable = self.iterable().eval(vm)?;
let iterable_type = iterable.ty();
use ast::Pattern;
match (pattern, iterable) {
(_, Value::Array(array)) => {
// Iterate over values of array.
iter!(for pattern in array);
}
(_, Value::Dict(dict)) => {
// Iterate over key-value pairs of dict.
iter!(for pattern in dict.iter());
}
(Pattern::Normal(_) | Pattern::Placeholder(_), Value::Str(str)) => {
// Iterate over graphemes of string.
iter!(for pattern in str.as_str().graphemes(true));
}
(Pattern::Normal(_) | Pattern::Placeholder(_), Value::Bytes(bytes)) => {
// Iterate over the integers of bytes.
iter!(for pattern in bytes.as_slice());
}
(Pattern::Destructuring(_), Value::Str(_) | Value::Bytes(_)) => {
bail!(pattern.span(), "cannot destructure values of {}", iterable_type);
}
_ => {
bail!(self.iterable().span(), "cannot loop over {}", iterable_type);
}
}
if flow.is_some() {
vm.flow = flow;
}
// Mark the return as conditional.
if let Some(FlowEvent::Return(_, _, conditional)) = &mut vm.flow {
*conditional = true;
}
Ok(output)
}
}
impl Eval for ast::LoopBreak<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
if vm.flow.is_none() {
vm.flow = Some(FlowEvent::Break(self.span()));
}
Ok(Value::None)
}
}
impl Eval for ast::LoopContinue<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
if vm.flow.is_none() {
vm.flow = Some(FlowEvent::Continue(self.span()));
}
Ok(Value::None)
}
}
impl Eval for ast::FuncReturn<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = self.body().map(|body| body.eval(vm)).transpose()?;
if vm.flow.is_none() {
vm.flow = Some(FlowEvent::Return(self.span(), value, false));
}
Ok(Value::None)
}
}
/// Whether the expression always evaluates to the same value.
fn is_invariant(expr: &SyntaxNode) -> bool {
match expr.cast() {
Some(ast::Expr::Ident(_)) => false,
Some(ast::Expr::MathIdent(_)) => false,
Some(ast::Expr::FieldAccess(access)) => {
is_invariant(access.target().to_untyped())
}
Some(ast::Expr::FuncCall(call)) => {
is_invariant(call.callee().to_untyped())
&& is_invariant(call.args().to_untyped())
}
_ => expr.children().all(is_invariant),
}
}
/// Whether the expression contains a break or return.
fn can_diverge(expr: &SyntaxNode) -> bool {
matches!(expr.kind(), SyntaxKind::Break | SyntaxKind::Return)
|| expr.children().any(can_diverge)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/call.rs | crates/typst-eval/src/call.rs | use comemo::{Tracked, TrackedMut};
use ecow::{EcoString, EcoVec, eco_format};
use typst_library::World;
use typst_library::diag::{
At, HintedStrResult, SourceDiagnostic, SourceResult, Trace, Tracepoint, bail, error,
};
use typst_library::engine::{Engine, Sink, Traced};
use typst_library::foundations::{
Arg, Args, Binding, Capturer, Closure, ClosureNode, Content, Context, Func,
NativeElement, Scope, Scopes, SymbolElem, Value,
};
use typst_library::introspection::Introspector;
use typst_library::math::LrElem;
use typst_library::routines::Routines;
use typst_syntax::ast::{self, AstNode, Ident};
use typst_syntax::{Span, Spanned, SyntaxNode};
use typst_utils::{LazyHash, Protected};
use crate::{
Access, Eval, FlowEvent, Route, Vm, call_method_mut, hint_if_shadowed_std,
is_mutating_method,
};
impl Eval for ast::FuncCall<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.span();
let callee = self.callee();
let callee_span = callee.span();
let args = self.args();
vm.engine.route.check_call_depth().at(span)?;
// Try to evaluate as a call to an associated function or field.
let (callee_value, args_value) = if let ast::Expr::FieldAccess(access) = callee {
let target = access.target();
let field = access.field();
match eval_field_call(target, field, args, span, vm)? {
FieldCall::Normal(callee, args) => {
if vm.inspected == Some(callee_span) {
vm.trace(callee.clone());
}
(callee, args)
}
FieldCall::Resolved(value) => return Ok(value),
}
} else {
// Function call order: we evaluate the callee before the arguments.
(callee.eval(vm)?, args.eval(vm)?.spanned(span))
};
let func_result = callee_value.clone().cast::<Func>();
if func_result.is_err() && in_math(callee) {
return wrap_args_in_math(
callee_value,
callee_span,
args_value,
args.trailing_comma(),
);
}
let func = func_result
.map_err(|err| hint_if_shadowed_std(vm, &self.callee(), err))
.at(callee_span)?;
let point = || Tracepoint::Call(func.name().map(Into::into));
let f = || {
func.call(&mut vm.engine, vm.context, args_value).trace(
vm.world(),
point,
span,
)
};
// Stacker is broken on WASM.
#[cfg(target_arch = "wasm32")]
return f();
#[cfg(not(target_arch = "wasm32"))]
stacker::maybe_grow(32 * 1024, 2 * 1024 * 1024, f)
}
}
impl Eval for ast::Args<'_> {
type Output = Args;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let mut items = EcoVec::with_capacity(self.items().count());
for arg in self.items() {
let span = arg.span();
match arg {
ast::Arg::Pos(expr) => {
items.push(Arg {
span,
name: None,
value: Spanned::new(expr.eval(vm)?, expr.span()),
});
}
ast::Arg::Named(named) => {
let expr = named.expr();
items.push(Arg {
span,
name: Some(named.name().get().clone().into()),
value: Spanned::new(expr.eval(vm)?, expr.span()),
});
}
ast::Arg::Spread(spread) => match spread.expr().eval(vm)? {
Value::None => {}
Value::Array(array) => {
items.extend(array.into_iter().map(|value| Arg {
span,
name: None,
value: Spanned::new(value, span),
}));
}
Value::Dict(dict) => {
items.extend(dict.into_iter().map(|(key, value)| Arg {
span,
name: Some(key),
value: Spanned::new(value, span),
}));
}
Value::Args(args) => items.extend(args.items),
v => bail!(spread.span(), "cannot spread {}", v.ty()),
},
}
}
// We do *not* use the `self.span()` here because we want the callsite
// span to be one level higher (the whole function call).
Ok(Args { span: Span::detached(), items })
}
}
impl Eval for ast::Closure<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
// Evaluate default values of named parameters.
let mut defaults = Vec::new();
for param in self.params().children() {
if let ast::Param::Named(named) = param {
defaults.push(named.expr().eval(vm)?);
}
}
// Collect captured variables.
let captured = {
let mut visitor = CapturesVisitor::new(Some(&vm.scopes), Capturer::Function);
visitor.visit(self.to_untyped());
visitor.finish()
};
// Define the closure.
let closure = Closure {
node: ClosureNode::Closure(self.to_untyped().clone()),
defaults,
captured,
num_pos_params: self
.params()
.children()
.filter(|p| matches!(p, ast::Param::Pos(_)))
.count(),
};
Ok(Value::Func(Func::from(closure).spanned(self.params().span())))
}
}
/// Call the function in the context with the arguments.
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
pub fn eval_closure(
func: &Func,
closure: &LazyHash<Closure>,
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
context: Tracked<Context>,
mut args: Args,
) -> SourceResult<Value> {
let (name, params, body) = match closure.node {
ClosureNode::Closure(ref node) => {
let closure =
node.cast::<ast::Closure>().expect("node to be an `ast::Closure`");
(closure.name(), closure.params(), closure.body())
}
ClosureNode::Context(ref node) => {
(None, ast::Params::default(), node.cast().unwrap())
}
};
// Don't leak the scopes from the call site. Instead, we use the scope
// of captured variables we collected earlier.
let mut scopes = Scopes::new(None);
scopes.top = closure.captured.clone();
// Prepare the engine.
let introspector = Protected::from_raw(introspector);
let engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route),
};
// Prepare VM.
let mut vm = Vm::new(engine, context, scopes, body.span());
// Provide the closure itself for recursive calls.
if let Some(name) = name {
vm.define(name, func.clone());
}
let num_pos_args = args.to_pos().len();
let sink_size = num_pos_args.checked_sub(closure.num_pos_params);
let mut sink = None;
let mut sink_pos_values = None;
let mut defaults = closure.defaults.iter();
for p in params.children() {
match p {
ast::Param::Pos(pattern) => match pattern {
ast::Pattern::Normal(ast::Expr::Ident(ident)) => {
vm.define(ident, args.expect::<Value>(&ident)?)
}
pattern => {
crate::destructure(
&mut vm,
pattern,
args.expect::<Value>("pattern parameter")?,
)?;
}
},
ast::Param::Spread(spread) => {
sink = Some(spread.sink_ident());
if let Some(sink_size) = sink_size {
sink_pos_values = Some(args.consume(sink_size)?);
}
}
ast::Param::Named(named) => {
let name = named.name();
let default = defaults.next().unwrap();
let value =
args.named::<Value>(&name)?.unwrap_or_else(|| default.clone());
vm.define(name, value);
}
}
}
if let Some(sink) = sink {
// Remaining args are captured regardless of whether the sink is named.
let mut remaining_args = args.take();
if let Some(sink_name) = sink {
if let Some(sink_pos_values) = sink_pos_values {
remaining_args.items.extend(sink_pos_values);
}
vm.define(sink_name, remaining_args);
}
}
// Ensure all arguments have been used.
args.finish()?;
// Handle control flow.
let output = body.eval(&mut vm)?;
match vm.flow {
Some(FlowEvent::Return(_, Some(explicit), _)) => return Ok(explicit),
Some(FlowEvent::Return(_, None, _)) => {}
Some(flow) => bail!(flow.forbidden()),
None => {}
}
Ok(output)
}
/// This used only as the return value of `eval_field_call`.
/// - `Normal` means that we have a function to call and the arguments to call it with.
/// - `Resolved` means that we have already resolved the call and have the value.
enum FieldCall {
Normal(Value, Args),
Resolved(Value),
}
/// Evaluate a field call's callee and arguments.
///
/// This follows the normal function call order: we evaluate the callee before the
/// arguments.
///
/// Prioritize associated functions on the value's type (e.g., methods) over its fields.
/// A function call on a field is only allowed for functions, types, modules (because
/// they are scopes), and symbols (because they have modifiers or associated functions).
///
/// For dictionaries, it is not allowed because it would be ambiguous - prioritizing
/// associated functions would make an addition of a new associated function a breaking
/// change and prioritizing fields would break associated functions for certain
/// dictionaries.
fn eval_field_call(
target_expr: ast::Expr,
field: Ident,
args: ast::Args,
span: Span,
vm: &mut Vm,
) -> SourceResult<FieldCall> {
// Evaluate the field-call's target and overall arguments.
let (target, mut args) = if is_mutating_method(&field) {
// If `field` looks like a mutating method, we evaluate the arguments first,
// because `target_expr.access(vm)` mutably borrows the `vm`, so that we can't
// evaluate the arguments after it.
let args = args.eval(vm)?.spanned(span);
// However, this difference from the normal call order is not observable because
// expressions like `(1, arr.len(), 2, 3).push(arr.pop())` evaluate the target to
// a temporary which we disallow mutation on (returning an error).
// Theoretically this could be observed if a method matching `is_mutating_method`
// was added to some type in the future and we didn't update this function.
match target_expr.access(vm)? {
// Only arrays and dictionaries have mutable methods.
target @ (Value::Array(_) | Value::Dict(_)) => {
let value = call_method_mut(target, &field, args, span);
let point = || Tracepoint::Call(Some(field.get().clone()));
return Ok(FieldCall::Resolved(value.trace(vm.world(), point, span)?));
}
target => (target.clone(), args),
}
} else {
let target = target_expr.eval(vm)?;
let args = args.eval(vm)?.spanned(span);
(target, args)
};
let field_span = field.span();
let sink = (&mut vm.engine, field_span);
if let Some(callee) = target.ty().scope().get(&field) {
args.insert(0, target_expr.span(), target);
Ok(FieldCall::Normal(callee.read_checked(sink).clone(), args))
} else if let Value::Content(content) = &target {
if let Some(callee) = content.elem().scope().get(&field) {
args.insert(0, target_expr.span(), target);
Ok(FieldCall::Normal(callee.read_checked(sink).clone(), args))
} else {
bail!(missing_field_call_error(target, field))
}
} else if matches!(
target,
Value::Symbol(_) | Value::Func(_) | Value::Type(_) | Value::Module(_)
) {
// Certain value types may have their own ways to access method fields.
// e.g. `$arrow.r(v)$`, `table.cell[..]`
let value = target.field(&field, sink).at(field_span)?;
Ok(FieldCall::Normal(value, args))
} else {
// Otherwise we cannot call this field.
bail!(missing_field_call_error(target, field))
}
}
/// Produce an error when we cannot call the field.
fn missing_field_call_error(target: Value, field: Ident) -> SourceDiagnostic {
let mut error = match &target {
Value::Content(content) => error!(
field.span(),
"element {} has no method `{}`",
content.elem().name(),
field.as_str(),
),
_ => error!(
field.span(),
"type {} has no method `{}`",
target.ty(),
field.as_str(),
),
};
match target {
Value::Dict(ref dict) if matches!(dict.get(&field), Ok(Value::Func(_))) => {
error.hint(eco_format!(
"to call the function stored in the dictionary, surround \
the field access with parentheses, e.g. `(dict.{})(..)`",
field.as_str(),
));
}
_ if target.field(&field, ()).is_ok() => {
error.hint(eco_format!(
"did you mean to access the field `{}`?",
field.as_str(),
));
}
_ => {}
}
error
}
/// Check if the expression is in a math context.
fn in_math(expr: ast::Expr) -> bool {
match expr {
ast::Expr::MathIdent(_) => true,
ast::Expr::FieldAccess(access) => in_math(access.target()),
_ => false,
}
}
/// For non-functions in math, we wrap the arguments in parentheses.
fn wrap_args_in_math(
callee: Value,
callee_span: Span,
mut args: Args,
trailing_comma: bool,
) -> SourceResult<Value> {
let mut body = Content::empty();
for (i, arg) in args.all::<Content>()?.into_iter().enumerate() {
if i > 0 {
body += SymbolElem::packed(',');
}
body += arg;
}
if trailing_comma {
body += SymbolElem::packed(',');
}
let formatted = callee.display().spanned(callee_span)
+ LrElem::new(SymbolElem::packed('(') + body + SymbolElem::packed(')'))
.pack()
.spanned(args.span);
args.finish()?;
Ok(Value::Content(formatted))
}
/// A visitor that determines which variables to capture for a closure.
pub struct CapturesVisitor<'a> {
external: Option<&'a Scopes<'a>>,
internal: Scopes<'a>,
captures: Scope,
capturer: Capturer,
}
impl<'a> CapturesVisitor<'a> {
/// Create a new visitor for the given external scopes.
pub fn new(external: Option<&'a Scopes<'a>>, capturer: Capturer) -> Self {
Self {
external,
internal: Scopes::new(None),
captures: Scope::new(),
capturer,
}
}
/// Return the scope of captured variables.
pub fn finish(self) -> Scope {
self.captures
}
/// Visit any node and collect all captured variables.
pub fn visit(&mut self, node: &SyntaxNode) {
match node.cast() {
// Every identifier is a potential variable that we need to capture.
// Identifiers that shouldn't count as captures because they
// actually bind a new name are handled below (individually through
// the expressions that contain them).
Some(ast::Expr::Ident(ident)) => self.capture(ident.get(), Scopes::get),
Some(ast::Expr::MathIdent(ident)) => {
self.capture(ident.get(), Scopes::get_in_math)
}
// Code and content blocks create a scope.
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
self.internal.enter();
for child in node.children() {
self.visit(child);
}
self.internal.exit();
}
// Don't capture the field of a field access.
Some(ast::Expr::FieldAccess(access)) => {
self.visit(access.target().to_untyped());
}
// A closure contains parameter bindings, which are bound before the
// body is evaluated. Care must be taken so that the default values
// of named parameters cannot access previous parameter bindings.
Some(ast::Expr::Closure(expr)) => {
for param in expr.params().children() {
if let ast::Param::Named(named) = param {
self.visit(named.expr().to_untyped());
}
}
self.internal.enter();
if let Some(name) = expr.name() {
self.bind(name);
}
for param in expr.params().children() {
match param {
ast::Param::Pos(pattern) => {
for ident in pattern.bindings() {
self.bind(ident);
}
}
ast::Param::Named(named) => self.bind(named.name()),
ast::Param::Spread(spread) => {
if let Some(ident) = spread.sink_ident() {
self.bind(ident);
}
}
}
}
self.visit(expr.body().to_untyped());
self.internal.exit();
}
// A let expression contains a binding, but that binding is only
// active after the body is evaluated.
Some(ast::Expr::LetBinding(expr)) => {
if let Some(init) = expr.init() {
self.visit(init.to_untyped());
}
for ident in expr.kind().bindings() {
self.bind(ident);
}
}
// A for loop contains one or two bindings in its pattern. These are
// active after the iterable is evaluated but before the body is
// evaluated.
Some(ast::Expr::ForLoop(expr)) => {
self.visit(expr.iterable().to_untyped());
self.internal.enter();
let pattern = expr.pattern();
for ident in pattern.bindings() {
self.bind(ident);
}
self.visit(expr.body().to_untyped());
self.internal.exit();
}
// An import contains items, but these are active only after the
// path is evaluated.
Some(ast::Expr::ModuleImport(expr)) => {
self.visit(expr.source().to_untyped());
if let Some(ast::Imports::Items(items)) = expr.imports() {
for item in items.iter() {
self.bind(item.bound_name());
}
}
}
_ => {
// Never capture the name part of a named pair.
if let Some(named) = node.cast::<ast::Named>() {
self.visit(named.expr().to_untyped());
return;
}
// Everything else is traversed from left to right.
for child in node.children() {
self.visit(child);
}
}
}
}
/// Bind a new internal variable.
fn bind(&mut self, ident: ast::Ident) {
// The concrete value does not matter as we only use the scoping
// mechanism of `Scopes`, not the values themselves.
self.internal
.top
.bind(ident.get().clone(), Binding::detached(Value::None));
}
/// Capture a variable if it isn't internal.
fn capture(
&mut self,
ident: &EcoString,
getter: impl FnOnce(&'a Scopes<'a>, &str) -> HintedStrResult<&'a Binding>,
) {
if self.internal.get(ident).is_ok() {
return;
}
let binding = match self.external {
Some(external) => match getter(external, ident) {
Ok(binding) => binding.capture(self.capturer),
Err(_) => return,
},
// The external scopes are only `None` when we are doing IDE capture
// analysis, in which case the concrete value doesn't matter.
None => Binding::detached(Value::None),
};
self.captures.bind(ident.clone(), binding);
}
}
#[cfg(test)]
mod tests {
use typst_syntax::parse;
use super::*;
#[track_caller]
fn test(scopes: &Scopes, text: &str, result: &[&str]) {
let mut visitor = CapturesVisitor::new(Some(scopes), Capturer::Function);
let root = parse(text);
visitor.visit(&root);
let captures = visitor.finish();
let mut names: Vec<_> = captures.iter().map(|(k, ..)| k).collect();
names.sort();
assert_eq!(names, result);
}
#[test]
fn test_captures() {
let mut scopes = Scopes::new(None);
scopes.top.define("f", 0);
scopes.top.define("x", 0);
scopes.top.define("y", 0);
scopes.top.define("z", 0);
let s = &scopes;
// Let binding and function definition.
test(s, "#let x = x", &["x"]);
test(s, "#let x; #(x + y)", &["y"]);
test(s, "#let f(x, y) = x + y", &[]);
test(s, "#let f(x, y) = f", &[]);
test(s, "#let f = (x, y) => f", &["f"]);
// Closure with different kinds of params.
test(s, "#((x, y) => x + z)", &["z"]);
test(s, "#((x: y, z) => x + z)", &["y"]);
test(s, "#((..x) => x + y)", &["y"]);
test(s, "#((x, y: x + z) => x + y)", &["x", "z"]);
test(s, "#{x => x; x}", &["x"]);
// Show rule.
test(s, "#show y: x => x", &["y"]);
test(s, "#show y: x => x + z", &["y", "z"]);
test(s, "#show x: x => x", &["x"]);
// For loop.
test(s, "#for x in y { x + z }", &["y", "z"]);
test(s, "#for (x, y) in y { x + y }", &["y"]);
test(s, "#for x in y {} #x", &["x", "y"]);
// Import.
test(s, "#import z: x, y", &["z"]);
test(s, "#import x + y: x, y, z", &["x", "y"]);
// Blocks.
test(s, "#{ let x = 1; { let y = 2; y }; x + y }", &["y"]);
test(s, "#[#let x = 1]#x", &["x"]);
// Field access.
test(s, "#x.y.f(z)", &["x", "z"]);
// Parenthesized expressions.
test(s, "#f(x: 1)", &["f"]);
test(s, "#(x: 1)", &[]);
test(s, "#(x = 1)", &["x"]);
test(s, "#(x += y)", &["x", "y"]);
test(s, "#{ (x, z) = (y, 1) }", &["x", "y", "z"]);
test(s, "#(x.at(y) = 5)", &["x", "y"]);
}
#[test]
fn test_captures_in_math() {
let mut scopes = Scopes::new(None);
scopes.top.define("f", 0);
scopes.top.define("x", 0);
scopes.top.define("y", 0);
scopes.top.define("z", 0);
// Multi-letter variables are required for math.
scopes.top.define("foo", 0);
scopes.top.define("bar", 0);
scopes.top.define("x-bar", 0);
scopes.top.define("x_bar", 0);
let s = &scopes;
// Basic math identifier differences.
test(s, "$ x f(z) $", &[]); // single letters not captured.
test(s, "$ #x #f(z) $", &["f", "x", "z"]);
test(s, "$ foo f(bar) $", &["bar", "foo"]);
test(s, "$ #foo[#$bar$] $", &["bar", "foo"]);
test(s, "$ #let foo = x; foo $", &["x"]);
// Math idents don't have dashes/underscores
test(s, "$ x-y x_y foo-x x_bar $", &["bar", "foo"]);
test(s, "$ #x-bar #x_bar $", &["x-bar", "x_bar"]);
// Named-params.
test(s, "$ foo(bar: y) $", &["foo"]);
test(s, "$ foo(x-y: 1, bar-z: 2) $", &["foo"]);
// Field access in math.
test(s, "$ foo.bar $", &["foo"]);
test(s, "$ foo.x $", &["foo"]);
test(s, "$ x.foo $", &["foo"]);
test(s, "$ foo . bar $", &["bar", "foo"]);
test(s, "$ foo.x.y.bar(z) $", &["foo"]);
test(s, "$ foo.x-bar $", &["bar", "foo"]);
test(s, "$ foo.x_bar $", &["bar", "foo"]);
test(s, "$ #x_bar.x-bar $", &["x_bar"]);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/math.rs | crates/typst-eval/src/math.rs | use ecow::eco_format;
use typst_library::diag::{At, SourceResult};
use typst_library::foundations::{Content, NativeElement, Symbol, SymbolElem, Value};
use typst_library::math::{
AlignPointElem, AttachElem, EquationElem, FracElem, LrElem, PrimesElem, RootElem,
};
use typst_library::text::TextElem;
use typst_syntax::ast::{self, AstNode, MathTextKind};
use crate::{Eval, Vm};
impl Eval for ast::Equation<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let body = self.body().eval(vm)?;
let block = self.block();
Ok(EquationElem::new(body).with_block(block).pack())
}
}
impl Eval for ast::Math<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
Ok(Content::sequence(
self.exprs()
.map(|expr| expr.eval_display(vm))
.collect::<SourceResult<Vec<_>>>()?,
))
}
}
impl Eval for ast::MathText<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
match self.get() {
MathTextKind::Grapheme(text) => Ok(SymbolElem::packed(text.clone())),
MathTextKind::Number(text) => Ok(TextElem::packed(text.clone())),
}
}
}
impl Eval for ast::MathIdent<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.span();
Ok(vm
.scopes
.get_in_math(&self)
.at(span)?
.read_checked((&mut vm.engine, span))
.clone())
}
}
impl Eval for ast::MathShorthand<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Symbol(Symbol::runtime_char(self.get())))
}
}
impl Eval for ast::MathAlignPoint<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(AlignPointElem::shared().clone())
}
}
impl Eval for ast::MathDelimited<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let open = self.open().eval_display(vm)?;
let body = self.body().eval(vm)?;
let close = self.close().eval_display(vm)?;
Ok(LrElem::new(open + body + close).pack())
}
}
impl Eval for ast::MathAttach<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let base = self.base().eval_display(vm)?;
let mut elem = AttachElem::new(base);
if let Some(expr) = self.top() {
elem.t.set(Some(expr.eval_display(vm)?));
}
// Always attach primes in scripts style (not limits style),
// i.e. at the top-right corner.
if let Some(primes) = self.primes() {
elem.tr.set(Some(primes.eval(vm)?));
}
if let Some(expr) = self.bottom() {
elem.b.set(Some(expr.eval_display(vm)?));
}
Ok(elem.pack())
}
}
impl Eval for ast::MathPrimes<'_> {
type Output = Content;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(PrimesElem::new(self.count()).pack())
}
}
impl Eval for ast::MathFrac<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let num_expr = self.num();
let num = num_expr.eval_display(vm)?;
let denom_expr = self.denom();
let denom = denom_expr.eval_display(vm)?;
let num_depar =
matches!(num_expr, ast::Expr::Math(math) if math.was_deparenthesized());
let denom_depar =
matches!(denom_expr, ast::Expr::Math(math) if math.was_deparenthesized());
Ok(FracElem::new(num, denom)
.with_num_deparenthesized(num_depar)
.with_denom_deparenthesized(denom_depar)
.pack())
}
}
impl Eval for ast::MathRoot<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
// Use `TextElem` to match `MathTextKind::Number` above.
let index = self.index().map(|i| TextElem::packed(eco_format!("{i}")));
let radicand = self.radicand().eval_display(vm)?;
Ok(RootElem::new(radicand).with_index(index).pack())
}
}
trait ExprExt {
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content>;
}
impl ExprExt for ast::Expr<'_> {
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content> {
Ok(self.eval(vm)?.display().spanned(self.span()))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/methods.rs | crates/typst-eval/src/methods.rs | //! Handles special built-in methods on values.
use ecow::{EcoString, eco_format};
use typst_library::diag::{At, SourceResult};
use typst_library::foundations::{Args, Str, Type, Value};
use typst_syntax::Span;
/// Whether a specific method is mutating.
pub(crate) fn is_mutating_method(method: &str) -> bool {
matches!(method, "push" | "pop" | "insert" | "remove")
}
/// Whether a specific method is an accessor.
pub(crate) fn is_accessor_method(method: &str) -> bool {
matches!(method, "first" | "last" | "at")
}
/// Call a mutating method on a value.
pub(crate) fn call_method_mut(
value: &mut Value,
method: &str,
mut args: Args,
span: Span,
) -> SourceResult<Value> {
let ty = value.ty();
let missing = || Err(missing_method(ty, method)).at(span);
let mut output = Value::None;
match value {
Value::Array(array) => match method {
"push" => array.push(args.expect("value")?),
"pop" => output = array.pop().at(span)?,
"insert" => {
array.insert(args.expect("index")?, args.expect("value")?).at(span)?
}
"remove" => {
output = array
.remove(args.expect("index")?, args.named("default")?)
.at(span)?
}
_ => return missing(),
},
Value::Dict(dict) => match method {
"insert" => dict.insert(args.expect::<Str>("key")?, args.expect("value")?),
"remove" => {
output =
dict.remove(args.expect("key")?, args.named("default")?).at(span)?
}
_ => return missing(),
},
_ => return missing(),
}
args.finish()?;
Ok(output)
}
/// Call an accessor method on a value.
pub(crate) fn call_method_access<'a>(
value: &'a mut Value,
method: &str,
mut args: Args,
span: Span,
) -> SourceResult<&'a mut Value> {
let ty = value.ty();
let missing = || Err(missing_method(ty, method)).at(span);
let slot = match value {
Value::Array(array) => match method {
"first" => array.first_mut().at(span)?,
"last" => array.last_mut().at(span)?,
"at" => array.at_mut(args.expect("index")?).at(span)?,
_ => return missing(),
},
Value::Dict(dict) => match method {
"at" => dict.at_mut(&args.expect::<Str>("key")?).at(span)?,
_ => return missing(),
},
_ => return missing(),
};
args.finish()?;
Ok(slot)
}
/// The missing method error message.
#[cold]
fn missing_method(ty: Type, method: &str) -> EcoString {
eco_format!("type {ty} has no method `{method}`")
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/code.rs | crates/typst-eval/src/code.rs | use ecow::{EcoVec, eco_vec};
use typst_library::diag::{At, SourceResult, bail, error, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{
Array, Capturer, Closure, ClosureNode, Content, ContextElem, Dict, Func,
NativeElement, Selector, Str, Value, ops,
};
use typst_library::introspection::{Counter, State};
use typst_syntax::ast::{self, AstNode};
use typst_utils::singleton;
use crate::{CapturesVisitor, Eval, FlowEvent, Vm};
impl Eval for ast::Code<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
eval_code(vm, &mut self.exprs())
}
}
/// Evaluate a stream of expressions.
fn eval_code<'a>(
vm: &mut Vm,
exprs: &mut impl Iterator<Item = ast::Expr<'a>>,
) -> SourceResult<Value> {
let flow = vm.flow.take();
let mut output = Value::None;
while let Some(expr) = exprs.next() {
let span = expr.span();
let value = match expr {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
}
let tail = eval_code(vm, exprs)?.display();
Value::Content(tail.styled_with_map(styles))
}
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;
}
let tail = eval_code(vm, exprs)?.display();
Value::Content(tail.styled_with_recipe(
&mut vm.engine,
vm.context,
recipe,
)?)
}
_ => expr.eval(vm)?,
};
output = ops::join(output, value).at(span)?;
if let Some(event) = &vm.flow {
warn_for_discarded_content(&mut vm.engine, event, &output);
break;
}
}
if flow.is_some() {
vm.flow = flow;
}
Ok(output)
}
impl Eval for ast::Expr<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.span();
let forbidden = |name| {
error!(span, "{} is only allowed directly in code and content blocks", name)
};
let v = match self {
Self::Text(v) => v.eval(vm).map(Value::Content),
Self::Space(v) => v.eval(vm).map(Value::Content),
Self::Linebreak(v) => v.eval(vm).map(Value::Content),
Self::Parbreak(v) => v.eval(vm).map(Value::Content),
Self::Escape(v) => v.eval(vm),
Self::Shorthand(v) => v.eval(vm),
Self::SmartQuote(v) => v.eval(vm).map(Value::Content),
Self::Strong(v) => v.eval(vm).map(Value::Content),
Self::Emph(v) => v.eval(vm).map(Value::Content),
Self::Raw(v) => v.eval(vm).map(Value::Content),
Self::Link(v) => v.eval(vm).map(Value::Content),
Self::Label(v) => v.eval(vm),
Self::Ref(v) => v.eval(vm).map(Value::Content),
Self::Heading(v) => v.eval(vm).map(Value::Content),
Self::ListItem(v) => v.eval(vm).map(Value::Content),
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
Self::TermItem(v) => v.eval(vm).map(Value::Content),
Self::Equation(v) => v.eval(vm).map(Value::Content),
Self::Math(v) => v.eval(vm).map(Value::Content),
Self::MathText(v) => v.eval(vm).map(Value::Content),
Self::MathIdent(v) => v.eval(vm),
Self::MathShorthand(v) => v.eval(vm),
Self::MathAlignPoint(v) => v.eval(vm).map(Value::Content),
Self::MathDelimited(v) => v.eval(vm).map(Value::Content),
Self::MathAttach(v) => v.eval(vm).map(Value::Content),
Self::MathPrimes(v) => v.eval(vm).map(Value::Content),
Self::MathFrac(v) => v.eval(vm).map(Value::Content),
Self::MathRoot(v) => v.eval(vm).map(Value::Content),
Self::Ident(v) => v.eval(vm),
Self::None(v) => v.eval(vm),
Self::Auto(v) => v.eval(vm),
Self::Bool(v) => v.eval(vm),
Self::Int(v) => v.eval(vm),
Self::Float(v) => v.eval(vm),
Self::Numeric(v) => v.eval(vm),
Self::Str(v) => v.eval(vm),
Self::CodeBlock(v) => v.eval(vm),
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
Self::Array(v) => v.eval(vm).map(Value::Array),
Self::Dict(v) => v.eval(vm).map(Value::Dict),
Self::Parenthesized(v) => v.eval(vm),
Self::FieldAccess(v) => v.eval(vm),
Self::FuncCall(v) => v.eval(vm),
Self::Closure(v) => v.eval(vm),
Self::Unary(v) => v.eval(vm),
Self::Binary(v) => v.eval(vm),
Self::LetBinding(v) => v.eval(vm),
Self::DestructAssignment(v) => v.eval(vm),
Self::SetRule(_) => bail!(forbidden("set")),
Self::ShowRule(_) => bail!(forbidden("show")),
Self::Contextual(v) => v.eval(vm).map(Value::Content),
Self::Conditional(v) => v.eval(vm),
Self::WhileLoop(v) => v.eval(vm),
Self::ForLoop(v) => v.eval(vm),
Self::ModuleImport(v) => v.eval(vm),
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
Self::LoopBreak(v) => v.eval(vm),
Self::LoopContinue(v) => v.eval(vm),
Self::FuncReturn(v) => v.eval(vm),
}?
.spanned(span);
if vm.inspected == Some(span) {
vm.trace(v.clone());
}
Ok(v)
}
}
impl Eval for ast::Ident<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.span();
Ok(vm
.scopes
.get(&self)
.at(span)?
.read_checked((&mut vm.engine, span))
.clone())
}
}
impl Eval for ast::None<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::None)
}
}
impl Eval for ast::Auto<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Auto)
}
}
impl Eval for ast::Bool<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Bool(self.get()))
}
}
impl Eval for ast::Int<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Int(self.get()))
}
}
impl Eval for ast::Float<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Float(self.get()))
}
}
impl Eval for ast::Numeric<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::numeric(self.get()))
}
}
impl Eval for ast::Str<'_> {
type Output = Value;
fn eval(self, _: &mut Vm) -> SourceResult<Self::Output> {
Ok(Value::Str(self.get().into()))
}
}
impl Eval for ast::Array<'_> {
type Output = Array;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let items = self.items();
let mut vec = EcoVec::with_capacity(items.size_hint().0);
for item in items {
match item {
ast::ArrayItem::Pos(expr) => vec.push(expr.eval(vm)?),
ast::ArrayItem::Spread(spread) => match spread.expr().eval(vm)? {
Value::None => {}
Value::Array(array) => vec.extend(array.into_iter()),
v => bail!(spread.span(), "cannot spread {} into array", v.ty()),
},
}
}
Ok(vec.into())
}
}
impl Eval for ast::Dict<'_> {
type Output = Dict;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let mut map = indexmap::IndexMap::default();
let mut invalid_keys = eco_vec![];
for item in self.items() {
match item {
ast::DictItem::Named(named) => {
map.insert(named.name().get().clone().into(), named.expr().eval(vm)?);
}
ast::DictItem::Keyed(keyed) => {
let raw_key = keyed.key();
let key = raw_key.eval(vm)?;
let key =
key.cast::<Str>().at(raw_key.span()).unwrap_or_else(|errors| {
invalid_keys.extend(errors);
Str::default()
});
map.insert(key, keyed.expr().eval(vm)?);
}
ast::DictItem::Spread(spread) => match spread.expr().eval(vm)? {
Value::None => {}
Value::Dict(dict) => map.extend(dict.into_iter()),
v => bail!(spread.span(), "cannot spread {} into dictionary", v.ty()),
},
}
}
if !invalid_keys.is_empty() {
return Err(invalid_keys);
}
Ok(map.into())
}
}
impl Eval for ast::CodeBlock<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
vm.scopes.enter();
let output = self.body().eval(vm)?;
vm.scopes.exit();
Ok(output)
}
}
impl Eval for ast::ContentBlock<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
vm.scopes.enter();
let content = self.body().eval(vm)?;
vm.scopes.exit();
Ok(content)
}
}
impl Eval for ast::Parenthesized<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
self.expr().eval(vm)
}
}
impl Eval for ast::FieldAccess<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = self.target().eval(vm)?;
let field = self.field();
let field_span = field.span();
let err = match value.field(&field, (&mut vm.engine, field_span)).at(field_span) {
Ok(value) => return Ok(value),
Err(err) => err,
};
// Check whether this is a get rule field access.
if let Value::Func(func) = &value
&& let Some(element) = func.to_element()
&& let Some(id) = element.field_id(&field)
&& let styles = vm.context.styles().at(field.span())
&& let Ok(value) = element
.field_from_styles(id, styles.as_ref().map(|&s| s).unwrap_or_default())
{
// Only validate the context once we know that this is indeed
// a field from the style chain.
let _ = styles?;
return Ok(value);
}
Err(err)
}
}
impl Eval for ast::Contextual<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let body = self.body();
// Collect captured variables.
let captured = {
let mut visitor = CapturesVisitor::new(Some(&vm.scopes), Capturer::Context);
visitor.visit(body.to_untyped());
visitor.finish()
};
// Define the closure.
let closure = Closure {
node: ClosureNode::Context(self.body().to_untyped().clone()),
defaults: vec![],
captured,
num_pos_params: 0,
};
let func = Func::from(closure).spanned(body.span());
Ok(ContextElem::new(func).pack().spanned(body.span()))
}
}
/// Emits a warning when we discard content while returning unconditionally.
fn warn_for_discarded_content(engine: &mut Engine, event: &FlowEvent, joined: &Value) {
let FlowEvent::Return(span, Some(_), false) = event else { return };
let Value::Content(tree) = &joined else { return };
let selector = singleton!(
Selector,
Selector::Or(eco_vec![State::select_any(), Counter::select_any()])
);
let mut warning = warning!(
*span,
"this return unconditionally discards the content before it";
hint: "try omitting the `return` to automatically join all values";
);
if tree.query_first_naive(selector).is_some() {
warning.hint("state/counter updates are content that must end up in the document to have an effect");
}
engine.sink.warn(warning);
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/access.rs | crates/typst-eval/src/access.rs | use ecow::eco_format;
use typst_library::diag::{At, Hint, SourceResult, Trace, Tracepoint, bail};
use typst_library::foundations::{Dict, Value};
use typst_syntax::ast::{self, AstNode};
use crate::{Eval, Vm, call_method_access, is_accessor_method};
/// Access an expression mutably.
pub(crate) trait Access {
/// Access the expression's evaluated value mutably.
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value>;
}
impl Access for ast::Expr<'_> {
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
match self {
Self::Ident(v) => v.access(vm),
Self::Parenthesized(v) => v.access(vm),
Self::FieldAccess(v) => v.access(vm),
Self::FuncCall(v) => v.access(vm),
_ => {
let _ = self.eval(vm)?;
bail!(self.span(), "cannot mutate a temporary value");
}
}
}
}
impl Access for ast::Ident<'_> {
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
let span = self.span();
if vm.inspected == Some(span)
&& let Ok(binding) = vm.scopes.get(&self)
{
vm.trace(binding.read().clone());
}
vm.scopes
.get_mut(&self)
.and_then(|b| b.write().map_err(Into::into))
.at(span)
}
}
impl Access for ast::Parenthesized<'_> {
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
self.expr().access(vm)
}
}
impl Access for ast::FieldAccess<'_> {
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
access_dict(vm, self)?.at_mut(self.field().get()).at(self.span())
}
}
impl Access for ast::FuncCall<'_> {
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
if let ast::Expr::FieldAccess(access) = self.callee() {
let method = access.field();
if is_accessor_method(&method) {
let span = self.span();
let world = vm.world();
let args = self.args().eval(vm)?.spanned(span);
let value = access.target().access(vm)?;
let result = call_method_access(value, &method, args, span);
let point = || Tracepoint::Call(Some(method.get().clone()));
return result.trace(world, point, span);
}
}
let _ = self.eval(vm)?;
bail!(self.span(), "cannot mutate a temporary value");
}
}
pub(crate) fn access_dict<'a>(
vm: &'a mut Vm,
access: ast::FieldAccess,
) -> SourceResult<&'a mut Dict> {
match access.target().access(vm)? {
Value::Dict(dict) => Ok(dict),
value => {
let ty = value.ty();
let span = access.target().span();
if matches!(
value, // those types have their own field getters
Value::Symbol(_) | Value::Content(_) | Value::Module(_) | Value::Func(_)
) {
bail!(span, "cannot mutate fields on {ty}");
} else if typst_library::foundations::fields_on(ty).is_empty() {
bail!(span, "{ty} does not have accessible fields");
} else {
// type supports static fields, which don't yet have
// setters
Err(eco_format!("fields on {ty} are not yet mutable"))
.hint(eco_format!(
"try creating a new {ty} with the updated field value instead"
))
.at(span)
}
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/binding.rs | crates/typst-eval/src/binding.rs | use ecow::eco_format;
use rustc_hash::FxHashSet;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{Array, Dict, Value};
use typst_syntax::ast::{self, AstNode};
use crate::{Access, Eval, Vm};
impl Eval for ast::LetBinding<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = match self.init() {
Some(expr) => expr.eval(vm)?,
None => Value::None,
};
if vm.flow.is_some() {
return Ok(Value::None);
}
match self.kind() {
ast::LetBindingKind::Normal(pattern) => destructure(vm, pattern, value)?,
ast::LetBindingKind::Closure(ident) => vm.define(ident, value),
}
Ok(Value::None)
}
}
impl Eval for ast::DestructAssignment<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = self.value().eval(vm)?;
destructure_impl(vm, self.pattern(), value, &mut |vm, expr, value| {
let location = expr.access(vm)?;
*location = value;
Ok(())
})?;
Ok(Value::None)
}
}
/// Destructures a value into a pattern.
pub(crate) fn destructure(
vm: &mut Vm,
pattern: ast::Pattern,
value: Value,
) -> SourceResult<()> {
destructure_impl(vm, pattern, value, &mut |vm, expr, value| match expr {
ast::Expr::Ident(ident) => {
vm.define(ident, value);
Ok(())
}
_ => bail!(expr.span(), "cannot assign to this expression"),
})
}
/// Destruct the given value into the pattern and apply the function to each binding.
fn destructure_impl<F>(
vm: &mut Vm,
pattern: ast::Pattern,
value: Value,
f: &mut F,
) -> SourceResult<()>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
{
match pattern {
ast::Pattern::Normal(expr) => f(vm, expr, value)?,
ast::Pattern::Placeholder(_) => {}
ast::Pattern::Parenthesized(parenthesized) => {
destructure_impl(vm, parenthesized.pattern(), value, f)?
}
ast::Pattern::Destructuring(destruct) => match value {
Value::Array(value) => destructure_array(vm, destruct, value, f)?,
Value::Dict(value) => destructure_dict(vm, destruct, value, f)?,
_ => bail!(pattern.span(), "cannot destructure {}", value.ty()),
},
}
Ok(())
}
fn destructure_array<F>(
vm: &mut Vm,
destruct: ast::Destructuring,
value: Array,
f: &mut F,
) -> SourceResult<()>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
{
let len = value.as_slice().len();
let mut i = 0;
for p in destruct.items() {
match p {
ast::DestructuringItem::Pattern(pattern) => {
let Ok(v) = value.at(i as i64, None) else {
bail!(wrong_number_of_elements(destruct, len));
};
destructure_impl(vm, pattern, v, f)?;
i += 1;
}
ast::DestructuringItem::Spread(spread) => {
let sink_size = (1 + len).checked_sub(destruct.items().count());
let sink = sink_size.and_then(|s| value.as_slice().get(i..i + s));
let (Some(sink_size), Some(sink)) = (sink_size, sink) else {
bail!(wrong_number_of_elements(destruct, len));
};
if let Some(expr) = spread.sink_expr() {
f(vm, expr, Value::Array(sink.into()))?;
}
i += sink_size;
}
ast::DestructuringItem::Named(named) => {
bail!(named.span(), "cannot destructure named pattern from an array")
}
}
}
if i < len {
bail!(wrong_number_of_elements(destruct, len));
}
Ok(())
}
fn destructure_dict<F>(
vm: &mut Vm,
destruct: ast::Destructuring,
dict: Dict,
f: &mut F,
) -> SourceResult<()>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
{
let mut sink = None;
let mut used = FxHashSet::default();
for p in destruct.items() {
match p {
// Shorthand for a direct identifier.
ast::DestructuringItem::Pattern(ast::Pattern::Normal(ast::Expr::Ident(
ident,
))) => {
let v = dict.get(&ident).at(ident.span())?;
f(vm, ast::Expr::Ident(ident), v.clone())?;
used.insert(ident.get().clone());
}
ast::DestructuringItem::Named(named) => {
let name = named.name();
let v = dict.get(&name).at(name.span())?;
destructure_impl(vm, named.pattern(), v.clone(), f)?;
used.insert(name.get().clone());
}
ast::DestructuringItem::Spread(spread) => sink = spread.sink_expr(),
ast::DestructuringItem::Pattern(expr) => {
bail!(expr.span(), "cannot destructure unnamed pattern from dictionary");
}
}
}
if let Some(expr) = sink {
let mut sink = Dict::new();
for (key, value) in dict {
if !used.contains(key.as_str()) {
sink.insert(key, value);
}
}
f(vm, expr, Value::Dict(sink))?;
}
Ok(())
}
/// The error message when the number of elements of the destructuring and the
/// array is mismatched.
#[cold]
fn wrong_number_of_elements(
destruct: ast::Destructuring,
len: usize,
) -> SourceDiagnostic {
let mut count = 0;
let mut spread = false;
for p in destruct.items() {
match p {
ast::DestructuringItem::Pattern(_) => count += 1,
ast::DestructuringItem::Spread(_) => spread = true,
ast::DestructuringItem::Named(_) => {}
}
}
let quantifier = if len > count { "too many" } else { "not enough" };
let expected = match (spread, count) {
(true, 1) => "at least 1 element".into(),
(true, c) => eco_format!("at least {c} elements"),
(false, 0) => "an empty array".into(),
(false, 1) => "a single element".into(),
(false, c) => eco_format!("{c} elements",),
};
error!(
destruct.span(), "{quantifier} elements to destructure";
hint: "the provided array has a length of {len}, \
but the pattern expects {expected}";
)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/rules.rs | crates/typst-eval/src/rules.rs | use typst_library::diag::{At, SourceResult, warning};
use typst_library::foundations::{
Element, Func, Recipe, Selector, ShowableSelector, Styles, Transformation,
};
use typst_library::layout::{BlockElem, PageElem};
use typst_library::model::ParElem;
use typst_syntax::ast::{self, AstNode};
use crate::{Eval, Vm, hint_if_shadowed_std};
impl Eval for ast::SetRule<'_> {
type Output = Styles;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
if let Some(condition) = self.condition()
&& !condition.eval(vm)?.cast::<bool>().at(condition.span())?
{
return Ok(Styles::new());
}
let target_expr = self.target();
let target = target_expr
.eval(vm)?
.cast::<Func>()
.map_err(|err| hint_if_shadowed_std(vm, &target_expr, err))
.and_then(|func| {
func.to_element().ok_or_else(|| {
"only element functions can be used in set rules".into()
})
})
.at(target_expr.span())?;
let args = self.args().eval(vm)?.spanned(self.span());
Ok(target.set(&mut vm.engine, args)?.spanned(self.span()).liftable())
}
}
impl Eval for ast::ShowRule<'_> {
type Output = Recipe;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let selector = self
.selector()
.map(|sel| {
sel.eval(vm)?
.cast::<ShowableSelector>()
.map_err(|err| hint_if_shadowed_std(vm, &sel, err))
.at(sel.span())
})
.transpose()?
.map(|selector| selector.0);
let transform = self.transform();
let transform = match transform {
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
};
let recipe = Recipe::new(selector, transform, self.span());
check_show_page_rule(vm, &recipe);
check_show_par_set_block(vm, &recipe);
Ok(recipe)
}
}
/// Warns that `show page` rules currently have no effect.
fn check_show_page_rule(vm: &mut Vm, recipe: &Recipe) {
if let Some(Selector::Elem(elem, _)) = recipe.selector()
&& *elem == Element::of::<PageElem>()
{
vm.engine.sink.warn(warning!(
recipe.span(),
"`show page` is not supported and has no effect";
hint: "customize pages with `set page(..)` instead";
));
}
}
/// Migration hint for `show par: set block(spacing: ..)`.
fn check_show_par_set_block(vm: &mut Vm, recipe: &Recipe) {
if let Some(Selector::Elem(elem, _)) = recipe.selector()
&& *elem == Element::of::<ParElem>()
&& let Transformation::Style(styles) = recipe.transform()
&& (styles.has(BlockElem::above) || styles.has(BlockElem::below))
{
vm.engine.sink.warn(warning!(
recipe.span(),
"`show par: set block(spacing: ..)` has no effect anymore";
hint: "write `set par(spacing: ..)` instead";
hint: "this is specific to paragraphs as they are not considered blocks \
anymore";
))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/import.rs | crates/typst-eval/src/import.rs | use comemo::TrackedMut;
use ecow::{EcoString, eco_format, eco_vec};
use typst_library::World;
use typst_library::diag::{
At, FileError, SourceResult, Trace, Tracepoint, bail, error, warning,
};
use typst_library::engine::Engine;
use typst_library::foundations::{Binding, Content, Module, Value};
use typst_syntax::ast::{self, AstNode, BareImportError};
use typst_syntax::package::{PackageManifest, PackageSpec};
use typst_syntax::{FileId, Span, VirtualPath};
use crate::{Eval, Vm, eval};
impl Eval for ast::ModuleImport<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let source_expr = self.source();
let source_span = source_expr.span();
let mut source = source_expr.eval(vm)?;
let mut is_str = false;
match &source {
Value::Func(func) => {
if func.scope().is_none() {
bail!(source_span, "cannot import from user-defined functions");
}
}
Value::Type(_) => {}
Value::Module(_) => {}
Value::Str(path) => {
source = Value::Module(import(&mut vm.engine, path, source_span)?);
is_str = true;
}
v => {
bail!(
source_span,
"expected path, module, function, or type, found {}",
v.ty(),
)
}
}
// If there is a rename, import the source itself under that name.
let new_name = self.new_name();
if let Some(new_name) = new_name {
if let ast::Expr::Ident(ident) = self.source()
&& ident.as_str() == new_name.as_str()
{
// Warn on `import x as x`
vm.engine.sink.warn(warning!(
new_name.span(),
"unnecessary import rename to same name",
));
}
// Define renamed module on the scope.
vm.define(new_name, source.clone());
}
let scope = source.scope().unwrap();
match self.imports() {
None => {
if new_name.is_none() {
match self.bare_name() {
// Bare dynamic string imports are not allowed.
Ok(name)
if !is_str || matches!(source_expr, ast::Expr::Str(_)) =>
{
if matches!(source_expr, ast::Expr::Ident(_)) {
vm.engine.sink.warn(warning!(
source_expr.span(),
"this import has no effect",
));
}
vm.scopes.top.bind(name, Binding::new(source, source_span));
}
Ok(_) | Err(BareImportError::Dynamic) => bail!(
source_span, "dynamic import requires an explicit name";
hint: "you can name the import with `as`";
),
Err(BareImportError::PathInvalid) => bail!(
source_span, "module name would not be a valid identifier";
hint: "you can rename the import with `as`";
),
// Bad package spec would have failed the import already.
Err(BareImportError::PackageInvalid) => unreachable!(),
}
}
}
Some(ast::Imports::Wildcard) => {
for (var, binding) in scope.iter() {
vm.scopes.top.bind(var.clone(), binding.clone());
}
}
Some(ast::Imports::Items(items)) => {
let mut errors = eco_vec![];
for item in items.iter() {
let mut path = item.path().iter().peekable();
let mut scope = scope;
while let Some(component) = &path.next() {
let Some(binding) = scope.get(component) else {
errors.push(error!(component.span(), "unresolved import"));
break;
};
if path.peek().is_some() {
// Nested import, as this is not the last component.
// This must be a submodule.
let value = binding.read();
let Some(submodule) = value.scope() else {
let error = if matches!(value, Value::Func(function) if function.scope().is_none())
{
error!(
component.span(),
"cannot import from user-defined functions",
)
} else if !matches!(
value,
Value::Func(_) | Value::Module(_) | Value::Type(_)
) {
error!(
component.span(),
"expected module, function, or type, found {}",
value.ty(),
)
} else {
panic!("unexpected nested import failure")
};
errors.push(error);
break;
};
// Walk into the submodule.
scope = submodule;
} else {
// Now that we have the scope of the innermost submodule
// in the import path, we may extract the desired item from
// it.
// Warn on `import ...: x as x`
if let ast::ImportItem::Renamed(renamed_item) = &item
&& renamed_item.original_name().as_str()
== renamed_item.new_name().as_str()
{
vm.engine.sink.warn(warning!(
renamed_item.new_name().span(),
"unnecessary import rename to same name",
));
}
vm.bind(item.bound_name(), binding.clone());
}
}
}
if !errors.is_empty() {
return Err(errors);
}
}
}
Ok(Value::None)
}
}
impl Eval for ast::ModuleInclude<'_> {
type Output = Content;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.source().span();
let source = self.source().eval(vm)?;
let module = match source {
Value::Str(path) => import(&mut vm.engine, &path, span)?,
Value::Module(module) => module,
v => bail!(span, "expected path or module, found {}", v.ty()),
};
Ok(module.content())
}
}
/// Process an import of a package or file relative to the current location.
pub fn import(engine: &mut Engine, from: &str, span: Span) -> SourceResult<Module> {
if from.starts_with('@') {
let spec = from.parse::<PackageSpec>().at(span)?;
import_package(engine, spec, span)
} else {
let id = span.resolve_path(from).at(span)?;
import_file(engine, id, span)
}
}
/// Import a file from a path. The path is resolved relative to the given
/// `span`.
fn import_file(engine: &mut Engine, id: FileId, span: Span) -> SourceResult<Module> {
// Load the source file.
let source = engine.world.source(id).at(span)?;
// Prevent cyclic importing.
if engine.route.contains(source.id()) {
bail!(span, "cyclic import");
}
// Evaluate the file.
let point = || Tracepoint::Import;
eval(
engine.routines,
engine.world,
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
&source,
)
.trace(engine.world, point, span)
}
/// Import an external package.
fn import_package(
engine: &mut Engine,
spec: PackageSpec,
span: Span,
) -> SourceResult<Module> {
let (name, id) = resolve_package(engine, spec, span)?;
import_file(engine, id, span).map(|module| module.with_name(name))
}
/// Resolve the name and entrypoint of a package.
fn resolve_package(
engine: &mut Engine,
spec: PackageSpec,
span: Span,
) -> SourceResult<(EcoString, FileId)> {
// Evaluate the manifest.
let manifest_id = FileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
let bytes = engine.world.file(manifest_id).at(span)?;
let string = bytes.as_str().map_err(FileError::from).at(span)?;
let manifest: PackageManifest = toml::from_str(string)
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
.at(span)?;
manifest.validate(&spec).at(span)?;
// Evaluate the entry point.
Ok((manifest.package.name, manifest_id.join(&manifest.package.entrypoint)))
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-eval/src/ops.rs | crates/typst-eval/src/ops.rs | use typst_library::diag::{At, HintedStrResult, SourceResult};
use typst_library::foundations::{IntoValue, Value, ops};
use typst_syntax::ast::{self, AstNode};
use crate::{Access, Eval, Vm, access_dict};
impl Eval for ast::Unary<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = self.expr().eval(vm)?;
let result = match self.op() {
ast::UnOp::Pos => ops::pos(value),
ast::UnOp::Neg => ops::neg(value),
ast::UnOp::Not => ops::not(value),
};
result.at(self.span())
}
}
impl Eval for ast::Binary<'_> {
type Output = Value;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
match self.op() {
ast::BinOp::Add => apply_binary(self, vm, ops::add),
ast::BinOp::Sub => apply_binary(self, vm, ops::sub),
ast::BinOp::Mul => apply_binary(self, vm, ops::mul),
ast::BinOp::Div => apply_binary(self, vm, ops::div),
ast::BinOp::And => apply_binary(self, vm, ops::and),
ast::BinOp::Or => apply_binary(self, vm, ops::or),
ast::BinOp::Eq => apply_binary(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary(self, vm, ops::neq),
ast::BinOp::Lt => apply_binary(self, vm, ops::lt),
ast::BinOp::Leq => apply_binary(self, vm, ops::leq),
ast::BinOp::Gt => apply_binary(self, vm, ops::gt),
ast::BinOp::Geq => apply_binary(self, vm, ops::geq),
ast::BinOp::In => apply_binary(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary(self, vm, ops::not_in),
ast::BinOp::Assign => apply_assignment(self, vm, |_, b| Ok(b)),
ast::BinOp::AddAssign => apply_assignment(self, vm, ops::add),
ast::BinOp::SubAssign => apply_assignment(self, vm, ops::sub),
ast::BinOp::MulAssign => apply_assignment(self, vm, ops::mul),
ast::BinOp::DivAssign => apply_assignment(self, vm, ops::div),
}
}
}
/// Apply a basic binary operation.
fn apply_binary(
binary: ast::Binary,
vm: &mut Vm,
op: fn(Value, Value) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let lhs = binary.lhs().eval(vm)?;
// Short-circuit boolean operations.
if (binary.op() == ast::BinOp::And && lhs == false.into_value())
|| (binary.op() == ast::BinOp::Or && lhs == true.into_value())
{
return Ok(lhs);
}
let rhs = binary.rhs().eval(vm)?;
op(lhs, rhs).at(binary.span())
}
/// Apply an assignment operation.
fn apply_assignment(
binary: ast::Binary,
vm: &mut Vm,
op: fn(Value, Value) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let rhs = binary.rhs().eval(vm)?;
let lhs = binary.lhs();
// An assignment to a dictionary field is different from a normal access
// since it can create the field instead of just modifying it.
if binary.op() == ast::BinOp::Assign
&& let ast::Expr::FieldAccess(access) = lhs
{
let dict = access_dict(vm, access)?;
dict.insert(access.field().get().clone().into(), rhs);
return Ok(Value::None);
}
let location = binary.lhs().access(vm)?;
let lhs = std::mem::take(&mut *location);
*location = op(lhs, rhs).at(binary.span())?;
Ok(Value::None)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/build.rs | crates/typst-utils/build.rs | use std::process::Command;
fn main() {
println!("cargo:rerun-if-env-changed=TYPST_VERSION");
println!("cargo:rerun-if-env-changed=TYPST_COMMIT_SHA");
if option_env!("TYPST_VERSION").is_none() {
println!("cargo:rustc-env=TYPST_VERSION={}", env!("CARGO_PKG_VERSION"));
}
if option_env!("TYPST_COMMIT_SHA").is_none()
&& let Some(sha) = Command::new("git")
.args(["rev-parse", "HEAD"])
.output()
.ok()
.filter(|output| output.status.success())
.and_then(|output| String::from_utf8(output.stdout).ok())
{
println!("cargo:rustc-env=TYPST_COMMIT_SHA={sha}");
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/bitset.rs | crates/typst-utils/src/bitset.rs | use std::fmt::{self, Debug, Formatter};
use thin_vec::ThinVec;
/// The number of bits per chunk.
const BITS: usize = usize::BITS as usize;
/// Stores a set of numbers which are expected to be rather small.
///
/// Inserting a very small value is cheap while inserting a large one may be
/// very expensive.
///
/// Unless you're managing small numbers yourself, you should likely prefer
/// `SmallBitSet`, which has a bit larger memory size, but does not allocate
/// for small numbers.
#[derive(Clone, PartialEq, Hash)]
pub struct BitSet(ThinVec<usize>);
impl BitSet {
/// Creates a new empty bit set.
pub fn new() -> Self {
Self(ThinVec::new())
}
/// Inserts a number into the set.
pub fn insert(&mut self, value: usize) {
let chunk = value / BITS;
let within = value % BITS;
if chunk >= self.0.len() {
self.0.resize(chunk + 1, 0);
}
self.0[chunk] |= 1 << within;
}
/// Whether a number is present in the set.
pub fn contains(&self, value: usize) -> bool {
let chunk = value / BITS;
let within = value % BITS;
let Some(bits) = self.0.get(chunk) else { return false };
(bits & (1 << within)) != 0
}
}
impl Default for BitSet {
fn default() -> Self {
Self::new()
}
}
impl Debug for BitSet {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let mut list = f.debug_list();
let chunks = self.0.len();
for v in 0..chunks * BITS {
if self.contains(v) {
list.entry(&v);
}
}
list.finish()
}
}
/// Efficiently stores a set of numbers which are expected to be very small.
/// Values `< 32/64` (depending on the architecture) are stored inline, while
/// values larger than that will lead to an allocation.
#[derive(Clone, PartialEq, Hash)]
pub struct SmallBitSet {
/// Used to store values < BITS.
low: usize,
/// Used to store values > BITS.
hi: BitSet,
}
impl SmallBitSet {
/// Creates a new empty bit set.
pub fn new() -> Self {
Self { low: 0, hi: BitSet::new() }
}
/// Inserts a number into the set.
pub fn insert(&mut self, value: usize) {
if value < BITS {
self.low |= 1 << value;
} else {
self.hi.insert(value - BITS);
}
}
/// Whether a number is present in the set.
pub fn contains(&self, value: usize) -> bool {
if value < BITS {
(self.low & (1 << value)) != 0
} else {
self.hi.contains(value - BITS)
}
}
}
impl Default for SmallBitSet {
fn default() -> Self {
Self::new()
}
}
impl Debug for SmallBitSet {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let mut list = f.debug_list();
let chunks = 1 + self.hi.0.len();
for v in 0..chunks * BITS {
if self.contains(v) {
list.entry(&v);
}
}
list.finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bitset() {
let mut set = SmallBitSet::new();
assert!(!set.contains(0));
assert!(!set.contains(5));
set.insert(0);
set.insert(1);
set.insert(5);
set.insert(64);
set.insert(105);
set.insert(208);
assert!(set.contains(0));
assert!(set.contains(1));
assert!(!set.contains(2));
assert!(set.contains(5));
assert!(!set.contains(63));
assert!(set.contains(64));
assert!(!set.contains(65));
assert!(!set.contains(104));
assert!(set.contains(105));
assert!(!set.contains(106));
assert!(set.contains(208));
assert!(!set.contains(209));
assert_eq!(format!("{set:?}"), "[0, 1, 5, 64, 105, 208]");
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/protected.rs | crates/typst-utils/src/protected.rs | /// Wraps a type, requiring justification on access.
///
/// On the type-system level, this does not do much, but it makes sure that
/// users of the value think twice and justify their use.
#[derive(Debug, Copy, Clone)]
pub struct Protected<T>(T);
impl<T> Protected<T> {
/// Wrap a value of type `T`.
pub fn new(inner: T) -> Self {
Self(inner)
}
/// Rewrap a value extracted via [`into_raw`](Self::into_raw).
///
/// This is distinct from [`new`](Self::new) as it's only meant to be used
/// for rewrapping and not for initial wrapping.
pub fn from_raw(inner: T) -> Self {
Self(inner)
}
/// Extract the inner value without justification. The result may only be
/// used with [`from_raw`](Self::from_raw).
pub fn into_raw(self) -> T {
self.0
}
/// Access the underlying value, providing justification why it's okay.
pub fn access(&self, _justification: &'static str) -> &T {
&self.0
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/lib.rs | crates/typst-utils/src/lib.rs | //! Utilities for Typst.
pub mod fat;
#[macro_use]
mod macros;
mod bitset;
mod deferred;
mod duration;
mod hash;
mod listset;
mod pico;
mod protected;
mod round;
mod scalar;
#[path = "version.rs"]
mod version_;
pub use self::bitset::{BitSet, SmallBitSet};
pub use self::deferred::Deferred;
pub use self::duration::format_duration;
pub use self::hash::{HashLock, LazyHash, ManuallyHash, hash128};
pub use self::listset::ListSet;
pub use self::pico::{PicoStr, ResolvedPicoStr};
pub use self::protected::Protected;
pub use self::round::{round_int_with_precision, round_with_precision};
pub use self::scalar::Scalar;
pub use self::version_::{TypstVersion, display_commit, version};
#[doc(hidden)]
pub use once_cell;
use std::fmt::{Debug, Display, Formatter};
use std::hash::Hash;
use std::iter::{Chain, Flatten, Rev};
use std::num::{NonZeroU32, NonZeroUsize};
use std::ops::{Add, Deref, DerefMut, Div, Mul, Neg, Sub};
use std::sync::Arc;
use unicode_math_class::MathClass;
/// Turn a closure into a struct implementing [`Debug`].
pub fn debug<F>(f: F) -> impl Debug
where
F: Fn(&mut Formatter) -> std::fmt::Result,
{
struct Wrapper<F>(F);
impl<F> Debug for Wrapper<F>
where
F: Fn(&mut Formatter) -> std::fmt::Result,
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0(f)
}
}
Wrapper(f)
}
/// Turn a closure into a struct implementing [`Display`].
pub fn display<F>(f: F) -> impl Display
where
F: Fn(&mut Formatter) -> std::fmt::Result,
{
struct Wrapper<F>(F);
impl<F> Display for Wrapper<F>
where
F: Fn(&mut Formatter) -> std::fmt::Result,
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0(f)
}
}
Wrapper(f)
}
/// An extra constant for [`NonZeroUsize`].
pub trait NonZeroExt {
/// The number `1`.
const ONE: Self;
}
impl NonZeroExt for NonZeroUsize {
const ONE: Self = Self::new(1).unwrap();
}
impl NonZeroExt for NonZeroU32 {
const ONE: Self = Self::new(1).unwrap();
}
/// Extra methods for [`Arc`].
pub trait ArcExt<T> {
/// Takes the inner value if there is exactly one strong reference and
/// clones it otherwise.
fn take(self) -> T;
}
impl<T: Clone> ArcExt<T> for Arc<T> {
fn take(self) -> T {
match Arc::try_unwrap(self) {
Ok(v) => v,
Err(rc) => (*rc).clone(),
}
}
}
/// Extra methods for [`Option`].
pub trait OptionExt<T> {
/// Maps an `Option<T>` to `U` by applying a function to a contained value
/// (if `Some`) or returns a default (if `None`).
fn map_or_default<U: Default, F>(self, f: F) -> U
where
F: FnOnce(T) -> U;
}
impl<T> OptionExt<T> for Option<T> {
fn map_or_default<U: Default, F>(self, f: F) -> U
where
F: FnOnce(T) -> U,
{
match self {
Some(x) => f(x),
None => U::default(),
}
}
}
/// Extra methods for [`[T]`](slice).
pub trait SliceExt<T> {
/// Returns a slice with all matching elements from the start of the slice
/// removed.
fn trim_start_matches<F>(&self, f: F) -> &[T]
where
F: FnMut(&T) -> bool;
/// Returns a slice with all matching elements from the end of the slice
/// removed.
fn trim_end_matches<F>(&self, f: F) -> &[T]
where
F: FnMut(&T) -> bool;
/// Split a slice into consecutive runs with the same key and yield for
/// each such run the key and the slice of elements with that key.
fn group_by_key<K, F>(&self, f: F) -> GroupByKey<'_, T, F>
where
F: FnMut(&T) -> K,
K: PartialEq;
/// Computes two indices which split a slice into three parts.
///
/// - A prefix which matches `f`
/// - An inner portion
/// - A suffix which matches `f` and does not overlap with the prefix
///
/// If all elements match `f`, the prefix becomes `self` and the suffix
/// will be empty.
///
/// Returns the indices at which the inner portion and the suffix start.
fn split_prefix_suffix<F>(&self, f: F) -> (usize, usize)
where
F: FnMut(&T) -> bool;
}
impl<T> SliceExt<T> for [T] {
fn trim_start_matches<F>(&self, mut f: F) -> &[T]
where
F: FnMut(&T) -> bool,
{
let len = self.len();
let mut i = 0;
while i < len && f(&self[i]) {
i += 1;
}
&self[i..]
}
fn trim_end_matches<F>(&self, mut f: F) -> &[T]
where
F: FnMut(&T) -> bool,
{
let mut i = self.len();
while i > 0 && f(&self[i - 1]) {
i -= 1;
}
&self[..i]
}
fn group_by_key<K, F>(&self, f: F) -> GroupByKey<'_, T, F> {
GroupByKey { slice: self, f }
}
fn split_prefix_suffix<F>(&self, mut f: F) -> (usize, usize)
where
F: FnMut(&T) -> bool,
{
let start = self.iter().position(|v| !f(v)).unwrap_or(self.len());
let end = self
.iter()
.skip(start)
.rposition(|v| !f(v))
.map_or(start, |i| start + i + 1);
(start, end)
}
}
/// This struct is created by [`SliceExt::group_by_key`].
pub struct GroupByKey<'a, T, F> {
slice: &'a [T],
f: F,
}
impl<'a, T, K, F> Iterator for GroupByKey<'a, T, F>
where
F: FnMut(&T) -> K,
K: PartialEq,
{
type Item = (K, &'a [T]);
fn next(&mut self) -> Option<Self::Item> {
let mut iter = self.slice.iter();
let key = (self.f)(iter.next()?);
let count = 1 + iter.take_while(|t| (self.f)(t) == key).count();
let (head, tail) = self.slice.split_at(count);
self.slice = tail;
Some((key, head))
}
}
/// Adapter for reversing iterators conditionally.
pub trait MaybeReverseIter {
type RevIfIter;
/// Reverse this iterator (apply .rev()) based on some condition.
fn rev_if(self, condition: bool) -> Self::RevIfIter
where
Self: Sized;
}
impl<I: Iterator + DoubleEndedIterator> MaybeReverseIter for I {
type RevIfIter =
Chain<Flatten<std::option::IntoIter<I>>, Flatten<std::option::IntoIter<Rev<I>>>>;
fn rev_if(self, condition: bool) -> Self::RevIfIter
where
Self: Sized,
{
let (maybe_self_iter, maybe_rev_iter) =
if condition { (None, Some(self.rev())) } else { (Some(self), None) };
maybe_self_iter
.into_iter()
.flatten()
.chain(maybe_rev_iter.into_iter().flatten())
}
}
/// Check if the [`Option`]-wrapped L is same to R.
pub fn option_eq<L, R>(left: Option<L>, other: R) -> bool
where
L: PartialEq<R>,
{
left.is_some_and(|v| v == other)
}
/// A container around a static reference that is cheap to clone and hash.
#[derive(Debug)]
pub struct Static<T: 'static>(pub &'static T);
impl<T> Deref for Static<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.0
}
}
impl<T> Copy for Static<T> {}
impl<T> Clone for Static<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Eq for Static<T> {}
impl<T> PartialEq for Static<T> {
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(self.0, other.0)
}
}
impl<T> Hash for Static<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_usize(self.0 as *const _ as _);
}
}
/// Generic access to a structure's components.
pub trait Get<Index> {
/// The structure's component type.
type Component;
/// Borrow the component for the specified index.
fn get_ref(&self, index: Index) -> &Self::Component;
/// Borrow the component for the specified index mutably.
fn get_mut(&mut self, index: Index) -> &mut Self::Component;
/// Convenience method for getting a copy of a component.
fn get(self, index: Index) -> Self::Component
where
Self: Sized,
Self::Component: Copy,
{
*self.get_ref(index)
}
/// Convenience method for setting a component.
fn set(&mut self, index: Index, component: Self::Component) {
*self.get_mut(index) = component;
}
/// Builder-style method for setting a component.
fn with(mut self, index: Index, component: Self::Component) -> Self
where
Self: Sized,
{
self.set(index, component);
self
}
}
/// A numeric type.
pub trait Numeric:
Sized
+ Debug
+ Copy
+ PartialEq
+ Neg<Output = Self>
+ Add<Output = Self>
+ Sub<Output = Self>
+ Mul<f64, Output = Self>
+ Div<f64, Output = Self>
{
/// The identity element for addition.
fn zero() -> Self;
/// Whether `self` is zero.
fn is_zero(self) -> bool {
self == Self::zero()
}
/// Whether `self` consists only of finite parts.
fn is_finite(self) -> bool;
}
/// Returns the default math class of a character in Typst, if it has one.
///
/// This is determined by the Unicode math class, with some manual overrides.
pub fn default_math_class(c: char) -> Option<MathClass> {
match c {
// Better spacing.
// https://github.com/typst/typst/commit/2e039cb052fcb768027053cbf02ce396f6d7a6be
':' => Some(MathClass::Relation),
// Better spacing when used alongside + PLUS SIGN.
// https://github.com/typst/typst/pull/1726
'⋯' | '⋱' | '⋰' | '⋮' => Some(MathClass::Normal),
// Better spacing.
// https://github.com/typst/typst/pull/1855
'.' | '/' => Some(MathClass::Normal),
// ⊥ UP TACK should not be a relation, contrary to ⟂ PERPENDICULAR.
// https://github.com/typst/typst/pull/5714
'\u{22A5}' => Some(MathClass::Normal),
// Used as a binary connector in linear logic, where it is referred to
// as "par".
// https://github.com/typst/typst/issues/5764
'⅋' => Some(MathClass::Binary),
// Those overrides should become the default in the next revision of
// MathClass.txt.
// https://github.com/typst/typst/issues/5764#issuecomment-2632435247
'⎰' | '⟅' => Some(MathClass::Opening),
'⎱' | '⟆' => Some(MathClass::Closing),
// Both ∨ and ⟑ are classified as Binary.
// https://github.com/typst/typst/issues/5764
'⟇' => Some(MathClass::Binary),
// Arabic comma.
// https://github.com/latex3/unicode-math/pull/633#issuecomment-2028936135
'،' => Some(MathClass::Punctuation),
c => unicode_math_class::class(c),
}
}
/// Automatically calls a deferred function when the returned handle is dropped.
pub fn defer<T, F: FnOnce(&mut T)>(
thing: &mut T,
deferred: F,
) -> impl DerefMut<Target = T> {
pub struct DeferHandle<'a, T, F: FnOnce(&mut T)> {
thing: &'a mut T,
deferred: Option<F>,
}
impl<'a, T, F: FnOnce(&mut T)> Drop for DeferHandle<'a, T, F> {
fn drop(&mut self) {
std::mem::take(&mut self.deferred).expect("deferred function")(self.thing);
}
}
impl<T, F: FnOnce(&mut T)> std::ops::Deref for DeferHandle<'_, T, F> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.thing
}
}
impl<T, F: FnOnce(&mut T)> std::ops::DerefMut for DeferHandle<'_, T, F> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.thing
}
}
DeferHandle { thing, deferred: Some(deferred) }
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/listset.rs | crates/typst-utils/src/listset.rs | use std::ops::DerefMut;
/// Picked by gut feeling. Could probably even be a bit larger.
const CUT_OFF: usize = 15;
/// A set backed by a mutable slice-like data structure.
///
/// This data structure uses two different strategies depending on size:
///
/// - When the list is small, it is just kept as is and searched linearly in
/// [`contains`](Self::contains).
///
/// - When the list is a bit bigger, it's sorted in [`new`](Self::new) and then
/// binary-searched for containment checks.
pub struct ListSet<S>(S);
impl<T, S> ListSet<S>
where
S: DerefMut<Target = [T]>,
T: Ord,
{
/// Creates a new list set.
///
/// If the list is longer than the cutoff point, it is sorted.
pub fn new(mut list: S) -> Self {
if list.len() > CUT_OFF {
list.sort_unstable();
}
Self(list)
}
/// Whether the set contains no elements.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
/// Checks whether the set contains the given value.
///
/// If the list is shorter than the cutoff point, performs a linear search.
/// If it is longer, performs a binary search.
pub fn contains(&self, value: &T) -> bool {
if self.0.len() > CUT_OFF {
self.0.binary_search(value).is_ok()
} else {
self.0.contains(value)
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/version.rs | crates/typst-utils/src/version.rs | //! Typst version information.
/// Returns the version of Typst.
///
/// The information is read from the following sources:
///
/// - For the version number: The `TYPST_VERSION` environment variable
/// - For the commit hash: The `TYPST_COMMIT_SHA` environment variable
///
/// Build tooling can set these environment variables to configure the exposed
/// information. If the environment variables are left unset, the values are
/// populated via `build.rs` from the Cargo package manifest version and the git
/// hash in the current repository (if any).
///
/// # Panics
/// If the `TYPST_VERSION` environment variable holds a version string that
/// doesn't conform to SemVer.
pub fn version() -> TypstVersion {
*crate::singleton!(TypstVersion, {
let raw = env!("TYPST_VERSION");
let commit = option_env!("TYPST_COMMIT_SHA");
match semver::Version::parse(raw) {
Ok(version) => {
return TypstVersion {
major: version.major.try_into().unwrap(),
minor: version.minor.try_into().unwrap(),
patch: version.patch.try_into().unwrap(),
raw,
commit,
};
}
Err(err) => {
panic!("failed to parse {raw:?} as semantic version number: {err:?}")
}
}
})
}
/// Typst version definition.
///
/// This structure contains the current Typst version. To query the precise
/// version number, refer to the [`TypstVersion::major()`],
/// [`TypstVersion::minor()`] and [`TypstVersion::patch()`] functions. You can
/// read the underlying, raw version string (e.g., for CLI output) with
/// [`TypstVersion::raw`].
///
/// Optionally, this may also contain the hash value of the Git commit from
/// which Typst was built. However, this field may be unpopulated.
#[derive(Debug, Clone, Copy)]
pub struct TypstVersion {
/// Typst major version number.
major: u32,
/// Typst minor version number.
minor: u32,
/// Typst patch version number.
patch: u32,
/// Raw, unmodified version string.
raw: &'static str,
/// The raw commit hash.
commit: Option<&'static str>,
}
impl TypstVersion {
/// Returns the Typst major version.
pub fn major(&self) -> u32 {
self.major
}
/// Returns the Typst minor version.
pub fn minor(&self) -> u32 {
self.minor
}
/// Returns the Typst patch version.
pub fn patch(&self) -> u32 {
self.patch
}
/// Returns the raw, unparsed version string.
///
/// Guaranteed to conform to SemVer.
pub fn raw(&self) -> &'static str {
self.raw
}
/// Returns the commit Typst was built from, if known.
pub fn commit(&self) -> Option<&'static str> {
self.commit
}
}
/// Displays the commit Typst was built from human-readably.
pub fn display_commit(commit: Option<&'static str>) -> &'static str {
const LENGTH: usize = 8;
match commit {
Some(s) => &s[..s.len().min(LENGTH)],
None => "unknown commit",
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/round.rs | crates/typst-utils/src/round.rs | /// Returns value with `n` digits after floating point where `n` is `precision`.
/// Standard rounding rules apply (if `n+1`th digit >= 5, round away from zero).
///
/// If `precision` is negative, returns value with `n` less significant integer
/// digits before floating point where `n` is `-precision`. Standard rounding
/// rules apply to the first remaining significant digit (if `n`th digit from
/// the floating point >= 5, round away from zero).
///
/// If rounding the `value` will have no effect (e.g., it's infinite or NaN),
/// returns `value` unchanged.
///
/// Note that rounding with negative precision may return plus or minus
/// infinity if the result would overflow or underflow (respectively) the range
/// of floating-point numbers.
///
/// # Examples
///
/// ```
/// # use typst_utils::round_with_precision;
/// let rounded = round_with_precision(-0.56553, 2);
/// assert_eq!(-0.57, rounded);
///
/// let rounded_negative = round_with_precision(823543.0, -3);
/// assert_eq!(824000.0, rounded_negative);
/// ```
pub fn round_with_precision(value: f64, precision: i16) -> f64 {
// Don't attempt to round the float if that wouldn't have any effect.
// This includes infinite or NaN values, as well as integer values
// with a filled mantissa (which can't have a fractional part).
// Rounding with a precision larger than the amount of digits that can be
// effectively represented would also be a no-op. Given that, the check
// below ensures we won't proceed if `|value| >= 2^53` or if
// `precision >= 15`, which also ensures the multiplication by `offset`
// won't return `inf`, since `2^53 * 10^15` (larger than any possible
// `value * offset` multiplication) does not.
if value.is_infinite()
|| value.is_nan()
|| precision >= 0 && value.abs() >= (1_i64 << f64::MANTISSA_DIGITS) as f64
|| precision >= f64::DIGITS as i16
{
return value;
}
// Floats cannot have more than this amount of base-10 integer digits.
if precision < -(f64::MAX_10_EXP as i16) {
// Multiply by zero to ensure sign is kept.
return value * 0.0;
}
if precision > 0 {
let offset = 10_f64.powi(precision.into());
assert!((value * offset).is_finite(), "{value} * {offset} is not finite!");
(value * offset).round() / offset
} else {
// Divide instead of multiplying by a negative exponent given that
// `f64::MAX_10_EXP` is larger than `f64::MIN_10_EXP` in absolute value
// (|308| > |-307|), allowing for the precision of -308 to be used.
let offset = 10_f64.powi((-precision).into());
(value / offset).round() * offset
}
}
/// This is used for rounding into integer digits, and is a no-op for positive
/// `precision`.
///
/// If `precision` is negative, returns value with `n` less significant integer
/// digits from the first digit where `n` is `-precision`. Standard rounding
/// rules apply to the first remaining significant digit (if `n`th digit from
/// the first digit >= 5, round away from zero).
///
/// Note that this may return `None` for negative precision when rounding
/// beyond [`i64::MAX`] or [`i64::MIN`].
///
/// # Examples
///
/// ```
/// # use typst_utils::round_int_with_precision;
/// let rounded = round_int_with_precision(-154, -2);
/// assert_eq!(Some(-200), rounded);
///
/// let rounded = round_int_with_precision(823543, -3);
/// assert_eq!(Some(824000), rounded);
/// ```
pub fn round_int_with_precision(value: i64, precision: i16) -> Option<i64> {
if precision >= 0 {
return Some(value);
}
let digits = -precision as u32;
let Some(ten_to_digits) = 10i64.checked_pow(digits - 1) else {
// Larger than any possible amount of integer digits.
return Some(0);
};
// Divide by 10^(digits - 1).
//
// We keep the last digit we want to remove as the first digit of this
// number, so we can check it with mod 10 for rounding purposes.
let truncated = value / ten_to_digits;
if truncated == 0 {
return Some(0);
}
let rounded = if (truncated % 10).abs() >= 5 {
// Round away from zero (towards the next multiple of 10).
//
// This may overflow in the particular case of rounding MAX/MIN
// with -1.
truncated.checked_add(truncated.signum() * (10 - (truncated % 10).abs()))?
} else {
// Just replace the last digit with zero, since it's < 5.
truncated - (truncated % 10)
};
// Multiply back by 10^(digits - 1).
//
// May overflow / underflow, in which case we fail.
rounded.checked_mul(ten_to_digits)
}
#[cfg(test)]
mod tests {
use super::{round_int_with_precision as rip, round_with_precision as rp};
#[test]
fn test_round_with_precision_0() {
let round = |value| rp(value, 0);
assert_eq!(round(0.0), 0.0);
assert_eq!(round(-0.0), -0.0);
assert_eq!(round(0.4), 0.0);
assert_eq!(round(-0.4), -0.0);
assert_eq!(round(0.56453), 1.0);
assert_eq!(round(-0.56453), -1.0);
}
#[test]
fn test_round_with_precision_1() {
let round = |value| rp(value, 1);
assert_eq!(round(0.0), 0.0);
assert_eq!(round(-0.0), -0.0);
assert_eq!(round(0.4), 0.4);
assert_eq!(round(-0.4), -0.4);
assert_eq!(round(0.44), 0.4);
assert_eq!(round(-0.44), -0.4);
assert_eq!(round(0.56453), 0.6);
assert_eq!(round(-0.56453), -0.6);
assert_eq!(round(0.96453), 1.0);
assert_eq!(round(-0.96453), -1.0);
}
#[test]
fn test_round_with_precision_2() {
let round = |value| rp(value, 2);
assert_eq!(round(0.0), 0.0);
assert_eq!(round(-0.0), -0.0);
assert_eq!(round(0.4), 0.4);
assert_eq!(round(-0.4), -0.4);
assert_eq!(round(0.44), 0.44);
assert_eq!(round(-0.44), -0.44);
assert_eq!(round(0.444), 0.44);
assert_eq!(round(-0.444), -0.44);
assert_eq!(round(0.56553), 0.57);
assert_eq!(round(-0.56553), -0.57);
assert_eq!(round(0.99553), 1.0);
assert_eq!(round(-0.99553), -1.0);
}
#[test]
fn test_round_with_precision_negative_1() {
let round = |value| rp(value, -1);
assert_eq!(round(0.0), 0.0);
assert_eq!(round(-0.0), -0.0);
assert_eq!(round(0.4), 0.0);
assert_eq!(round(-0.4), -0.0);
assert_eq!(round(1234.5), 1230.0);
assert_eq!(round(-1234.5), -1230.0);
assert_eq!(round(1245.232), 1250.0);
assert_eq!(round(-1245.232), -1250.0);
}
#[test]
fn test_round_with_precision_negative_2() {
let round = |value| rp(value, -2);
assert_eq!(round(0.0), 0.0);
assert_eq!(round(-0.0), -0.0);
assert_eq!(round(0.4), 0.0);
assert_eq!(round(-0.4), -0.0);
assert_eq!(round(1243.232), 1200.0);
assert_eq!(round(-1243.232), -1200.0);
assert_eq!(round(1253.232), 1300.0);
assert_eq!(round(-1253.232), -1300.0);
}
#[test]
fn test_round_with_precision_fuzzy() {
let max_int = (1_i64 << f64::MANTISSA_DIGITS) as f64;
let max_digits = f64::DIGITS as i16;
// Special cases.
assert_eq!(rp(f64::INFINITY, 0), f64::INFINITY);
assert_eq!(rp(f64::NEG_INFINITY, 0), f64::NEG_INFINITY);
assert!(rp(f64::NAN, 0).is_nan());
// Max
assert_eq!(rp(max_int, 0), max_int);
assert_eq!(rp(0.123456, max_digits), 0.123456);
assert_eq!(rp(max_int, max_digits), max_int);
// Max - 1
assert_eq!(rp(max_int - 1.0, 0), max_int - 1.0);
assert_eq!(rp(0.123456, max_digits - 1), 0.123456);
assert_eq!(rp(max_int - 1.0, max_digits), max_int - 1.0);
assert_eq!(rp(max_int, max_digits - 1), max_int);
assert_eq!(rp(max_int - 1.0, max_digits - 1), max_int - 1.0);
}
#[test]
fn test_round_with_precision_fuzzy_negative() {
let exp10 = |exponent: i16| 10_f64.powi(exponent.into());
let max_digits = f64::MAX_10_EXP as i16;
let max_up = max_digits + 1;
let max_down = max_digits - 1;
// Special cases.
assert_eq!(rp(f64::INFINITY, -1), f64::INFINITY);
assert_eq!(rp(f64::NEG_INFINITY, -1), f64::NEG_INFINITY);
assert!(rp(f64::NAN, -1).is_nan());
// Max
assert_eq!(rp(f64::MAX, -max_digits), f64::INFINITY);
assert_eq!(rp(f64::MIN, -max_digits), f64::NEG_INFINITY);
assert_eq!(rp(1.66 * exp10(max_digits), -max_digits), f64::INFINITY);
assert_eq!(rp(-1.66 * exp10(max_digits), -max_digits), f64::NEG_INFINITY);
assert_eq!(rp(1.66 * exp10(max_down), -max_digits), 0.0);
assert_eq!(rp(-1.66 * exp10(max_down), -max_digits), -0.0);
assert_eq!(rp(1234.5678, -max_digits), 0.0);
assert_eq!(rp(-1234.5678, -max_digits), -0.0);
// Max + 1
assert_eq!(rp(f64::MAX, -max_up), 0.0);
assert_eq!(rp(f64::MIN, -max_up), -0.0);
assert_eq!(rp(1.66 * exp10(max_digits), -max_up), 0.0);
assert_eq!(rp(-1.66 * exp10(max_digits), -max_up), -0.0);
assert_eq!(rp(1.66 * exp10(max_down), -max_up), 0.0);
assert_eq!(rp(-1.66 * exp10(max_down), -max_up), -0.0);
assert_eq!(rp(1234.5678, -max_up), 0.0);
assert_eq!(rp(-1234.5678, -max_up), -0.0);
// Max - 1
assert_eq!(rp(f64::MAX, -max_down), f64::INFINITY);
assert_eq!(rp(f64::MIN, -max_down), f64::NEG_INFINITY);
assert_eq!(rp(1.66 * exp10(max_down), -max_down), 2.0 * exp10(max_down));
assert_eq!(rp(-1.66 * exp10(max_down), -max_down), -2.0 * exp10(max_down));
assert_eq!(rp(1234.5678, -max_down), 0.0);
assert_eq!(rp(-1234.5678, -max_down), -0.0);
// Must be approx equal to 1.7e308. Using some division and flooring
// to avoid weird results due to imprecision.
assert_eq!(
(rp(1.66 * exp10(max_digits), -max_down) / exp10(max_down)).floor(),
17.0,
);
assert_eq!(
(rp(-1.66 * exp10(max_digits), -max_down) / exp10(max_down)).floor(),
-17.0,
);
}
#[test]
fn test_round_int_with_precision_positive() {
assert_eq!(rip(0, 0), Some(0));
assert_eq!(rip(10, 0), Some(10));
assert_eq!(rip(23, 235), Some(23));
assert_eq!(rip(i64::MAX, 235), Some(i64::MAX));
}
#[test]
fn test_round_int_with_precision_negative_1() {
let round = |value| rip(value, -1);
assert_eq!(round(0), Some(0));
assert_eq!(round(3), Some(0));
assert_eq!(round(5), Some(10));
assert_eq!(round(13), Some(10));
assert_eq!(round(1234), Some(1230));
assert_eq!(round(-1234), Some(-1230));
assert_eq!(round(1245), Some(1250));
assert_eq!(round(-1245), Some(-1250));
assert_eq!(round(i64::MAX), None);
assert_eq!(round(i64::MIN), None);
}
#[test]
fn test_round_int_with_precision_negative_2() {
let round = |value| rip(value, -2);
assert_eq!(round(0), Some(0));
assert_eq!(round(3), Some(0));
assert_eq!(round(5), Some(0));
assert_eq!(round(13), Some(0));
assert_eq!(round(1245), Some(1200));
assert_eq!(round(-1245), Some(-1200));
assert_eq!(round(1253), Some(1300));
assert_eq!(round(-1253), Some(-1300));
assert_eq!(round(i64::MAX), Some(i64::MAX - 7));
assert_eq!(round(i64::MIN), Some(i64::MIN + 8));
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/scalar.rs | crates/typst-utils/src/scalar.rs | use std::cmp::Ordering;
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::iter::Sum;
use std::ops::{
Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign,
};
use crate::Numeric;
/// A 64-bit float that implements `Eq`, `Ord` and `Hash`.
///
/// Panics if it's `NaN` during any of those operations.
#[derive(Default, Copy, Clone)]
pub struct Scalar(f64);
impl Scalar {
/// The scalar containing `0.0`.
pub const ZERO: Self = Self(0.0);
/// The scalar containing `1.0`.
pub const ONE: Self = Self(1.0);
/// The scalar containing `f64::INFINITY`.
pub const INFINITY: Self = Self(f64::INFINITY);
/// Creates a [`Scalar`] with the given value.
///
/// If the value is NaN, then it is set to `0.0` in the result.
pub const fn new(x: f64) -> Self {
Self(if x.is_nan() { 0.0 } else { x })
}
/// Gets the value of this [`Scalar`].
pub const fn get(self) -> f64 {
self.0
}
}
impl Numeric for Scalar {
fn zero() -> Self {
Self(0.0)
}
fn is_finite(self) -> bool {
self.0.is_finite()
}
}
impl Debug for Scalar {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl Eq for Scalar {}
impl PartialEq for Scalar {
fn eq(&self, other: &Self) -> bool {
assert!(!self.0.is_nan() && !other.0.is_nan(), "float is NaN");
self.0 == other.0
}
}
impl PartialEq<f64> for Scalar {
fn eq(&self, other: &f64) -> bool {
self == &Self(*other)
}
}
impl Ord for Scalar {
fn cmp(&self, other: &Self) -> Ordering {
self.0.partial_cmp(&other.0).expect("float is NaN")
}
}
impl PartialOrd for Scalar {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Hash for Scalar {
fn hash<H: Hasher>(&self, state: &mut H) {
debug_assert!(!self.0.is_nan(), "float is NaN");
self.0.to_bits().hash(state);
}
}
impl From<f64> for Scalar {
fn from(float: f64) -> Self {
Self::new(float)
}
}
impl From<Scalar> for f64 {
fn from(scalar: Scalar) -> Self {
scalar.0
}
}
impl Neg for Scalar {
type Output = Self;
fn neg(self) -> Self::Output {
Self::new(-self.0)
}
}
impl<T: Into<Self>> Add<T> for Scalar {
type Output = Self;
fn add(self, rhs: T) -> Self::Output {
Self::new(self.0 + rhs.into().0)
}
}
impl<T: Into<Self>> AddAssign<T> for Scalar {
fn add_assign(&mut self, rhs: T) {
*self = *self + rhs.into();
}
}
impl<T: Into<Self>> Sub<T> for Scalar {
type Output = Self;
fn sub(self, rhs: T) -> Self::Output {
Self::new(self.0 - rhs.into().0)
}
}
impl<T: Into<Self>> SubAssign<T> for Scalar {
fn sub_assign(&mut self, rhs: T) {
*self = *self - rhs.into();
}
}
impl<T: Into<Self>> Mul<T> for Scalar {
type Output = Self;
fn mul(self, rhs: T) -> Self::Output {
Self::new(self.0 * rhs.into().0)
}
}
impl<T: Into<Self>> MulAssign<T> for Scalar {
fn mul_assign(&mut self, rhs: T) {
*self = *self * rhs.into();
}
}
impl<T: Into<Self>> Div<T> for Scalar {
type Output = Self;
fn div(self, rhs: T) -> Self::Output {
Self::new(self.0 / rhs.into().0)
}
}
impl<T: Into<Self>> DivAssign<T> for Scalar {
fn div_assign(&mut self, rhs: T) {
*self = *self / rhs.into();
}
}
impl<T: Into<Self>> Rem<T> for Scalar {
type Output = Self;
fn rem(self, rhs: T) -> Self::Output {
Self::new(self.0 % rhs.into().0)
}
}
impl<T: Into<Self>> RemAssign<T> for Scalar {
fn rem_assign(&mut self, rhs: T) {
*self = *self % rhs.into();
}
}
impl Sum for Scalar {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
Self::new(iter.map(|s| s.0).sum())
}
}
impl<'a> Sum<&'a Self> for Scalar {
fn sum<I: Iterator<Item = &'a Self>>(iter: I) -> Self {
Self::new(iter.map(|s| s.0).sum())
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/macros.rs | crates/typst-utils/src/macros.rs | /// Create a lazy initialized, globally unique `'static` reference to a value.
#[macro_export]
macro_rules! singleton {
($ty:ty, $value:expr) => {{
static VALUE: ::std::sync::LazyLock<$ty> = ::std::sync::LazyLock::new(|| $value);
&*VALUE
}};
}
/// Implement the `Sub` trait based on existing `Neg` and `Add` impls.
#[macro_export]
macro_rules! sub_impl {
($a:ident - $b:ident -> $c:ident) => {
impl ::core::ops::Sub<$b> for $a {
type Output = $c;
fn sub(self, other: $b) -> $c {
self + -other
}
}
};
}
/// Implement an assign trait based on an existing non-assign trait.
#[macro_export]
macro_rules! assign_impl {
($a:ident += $b:ident) => {
impl ::core::ops::AddAssign<$b> for $a {
fn add_assign(&mut self, other: $b) {
*self = *self + other;
}
}
};
($a:ident -= $b:ident) => {
impl ::core::ops::SubAssign<$b> for $a {
fn sub_assign(&mut self, other: $b) {
*self = *self - other;
}
}
};
($a:ident *= $b:ident) => {
impl ::core::ops::MulAssign<$b> for $a {
fn mul_assign(&mut self, other: $b) {
*self = *self * other;
}
}
};
($a:ident /= $b:ident) => {
impl ::core::ops::DivAssign<$b> for $a {
fn div_assign(&mut self, other: $b) {
*self = *self / other;
}
}
};
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/hash.rs | crates/typst-utils/src/hash.rs | use std::fmt::{self, Debug};
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
use std::sync::atomic::Ordering;
use portable_atomic::AtomicU128;
use siphasher::sip128::{Hasher128, SipHasher13};
/// Calculate a 128-bit siphash of a value.
pub fn hash128<T: Hash + ?Sized>(value: &T) -> u128 {
let mut state = SipHasher13::new();
value.hash(&mut state);
state.finish128().as_u128()
}
/// A wrapper type with lazily-computed hash.
///
/// This is useful if you want to pass large values of `T` to memoized
/// functions. Especially recursive structures like trees benefit from
/// intermediate prehashed nodes.
///
/// Note that for a value `v` of type `T`, `hash(v)` is not necessarily equal to
/// `hash(LazyHash::new(v))`. Writing the precomputed hash into a hasher's
/// state produces different output than writing the value's parts directly.
/// However, that seldom matters as you are typically either dealing with values
/// of type `T` or with values of type `LazyHash<T>`, not a mix of both.
///
/// # Equality
/// Because Typst uses high-quality 128 bit hashes in all places, the risk of a
/// hash collision is reduced to an absolute minimum. Therefore, this type
/// additionally provides `PartialEq` and `Eq` implementations that compare by
/// hash instead of by value. For this to be correct, your hash implementation
/// **must feed all information relevant to the `PartialEq` impl to the
/// hasher.**
///
/// # Usage
/// If the value is expected to be cloned, it is best used inside of an `Arc`
/// or `Rc` to best re-use the hash once it has been computed.
///
/// # Unsized coercions
/// The `LazyHash` type supports unsized payload types and coercions to such.
/// For instance, a `LazyHash<&'static str>` can be coerced to a
/// `LazyHash<dyn YourTrait>` when `&'static str: YourTrait`. When it is hashed,
/// a `LazyHash` will always use the [`Hash`] impl of the underlying type. This
/// underlying type changes through an unsized coercion. When coercing a
/// [`LazyHash`] that has an already populated internal hash, you'll thus get a
/// cached hash that was hashed with another impl than a fresh hash would have
/// used. To avoid this, when performing unsized coercions, avoid hashing the
/// value before the coercion and overall try to minimize the timespan in which
/// the original type is active. Typical usages of unsized coercions have a very
/// minimal lifetime of the original type only upon construction.
#[derive(Clone)]
pub struct LazyHash<T: ?Sized> {
/// The hash for the value.
hash: HashLock,
/// The underlying value.
value: T,
}
impl<T: Default> Default for LazyHash<T> {
#[inline]
fn default() -> Self {
Self::new(Default::default())
}
}
impl<T> LazyHash<T> {
/// Wraps an item without pre-computed hash.
#[inline]
pub fn new(value: T) -> Self {
Self { hash: HashLock::new(), value }
}
/// Returns the wrapped value.
#[inline]
pub fn into_inner(self) -> T {
self.value
}
}
impl<T: Hash + ?Sized + 'static> LazyHash<T> {
/// Get the hash or compute it if not set yet.
#[inline]
fn load_or_compute_hash(&self) -> u128 {
self.hash.get_or_insert_with(|| hash128(&self.value))
}
}
impl<T: Hash + ?Sized + 'static> Hash for LazyHash<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.load_or_compute_hash());
}
}
impl<T> From<T> for LazyHash<T> {
#[inline]
fn from(value: T) -> Self {
Self::new(value)
}
}
impl<T: Hash + ?Sized + 'static> Eq for LazyHash<T> {}
impl<T: Hash + ?Sized + 'static> PartialEq for LazyHash<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.load_or_compute_hash() == other.load_or_compute_hash()
}
}
impl<T: ?Sized> Deref for LazyHash<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T: ?Sized + 'static> DerefMut for LazyHash<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.hash.reset();
&mut self.value
}
}
impl<T: Debug> Debug for LazyHash<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.value.fmt(f)
}
}
/// A wrapper type with a manually computed hash.
///
/// This can be used to turn an unhashable type into a hashable one where the
/// hash is provided manually. Typically, the hash is derived from the data
/// which was used to construct to the unhashable type.
///
/// For instance, you could hash the bytes that were parsed into an unhashable
/// data structure.
///
/// # Equality
/// Because Typst uses high-quality 128 bit hashes in all places, the risk of a
/// hash collision is reduced to an absolute minimum. Therefore, this type
/// additionally provides `PartialEq` and `Eq` implementations that compare by
/// hash instead of by value. For this to be correct, your hash implementation
/// **must feed all information relevant to the `PartialEq` impl to the
/// hasher.**
#[derive(Clone)]
pub struct ManuallyHash<T: ?Sized> {
/// A manually computed hash.
hash: u128,
/// The underlying value.
value: T,
}
impl<T> ManuallyHash<T> {
/// Wraps an item with a pre-computed hash.
///
/// The hash should be computed with `typst_utils::hash128`.
#[inline]
pub fn new(value: T, hash: u128) -> Self {
Self { hash, value }
}
/// Returns the wrapped value.
#[inline]
pub fn into_inner(self) -> T {
self.value
}
}
impl<T: ?Sized> Hash for ManuallyHash<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.hash);
}
}
impl<T: ?Sized> Eq for ManuallyHash<T> {}
impl<T: ?Sized> PartialEq for ManuallyHash<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.hash == other.hash
}
}
impl<T: ?Sized> Deref for ManuallyHash<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T: Debug> Debug for ManuallyHash<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.value.fmt(f)
}
}
/// Storage for lazy hash computation.
pub struct HashLock(AtomicU128);
impl HashLock {
/// Create a new unset hash cell.
pub const fn new() -> Self {
Self(AtomicU128::new(0))
}
/// Get the hash or compute it if not set yet.
#[inline]
pub fn get_or_insert_with(&self, f: impl FnOnce() -> u128) -> u128 {
let mut hash = self.get();
if hash == 0 {
hash = f();
self.0.store(hash, Ordering::Relaxed);
}
hash
}
/// Reset the hash to unset.
#[inline]
pub fn reset(&mut self) {
// Because we have a mutable reference, we can skip the atomic.
*self.0.get_mut() = 0;
}
/// Get the hash, returns zero if not computed yet.
#[inline]
fn get(&self) -> u128 {
// We only need atomicity and no synchronization of other operations, so
// `Relaxed` is fine.
self.0.load(Ordering::Relaxed)
}
}
impl Default for HashLock {
fn default() -> Self {
Self::new()
}
}
impl Clone for HashLock {
fn clone(&self) -> Self {
Self(AtomicU128::new(self.get()))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/fat.rs | crates/typst-utils/src/fat.rs | //! Fat pointer handling.
//!
//! This assumes the memory representation of fat pointers. Although it is not
//! guaranteed by Rust, it's improbable that it will change. Still, when the
//! pointer metadata APIs are stable, we should definitely move to them:
//! <https://github.com/rust-lang/rust/issues/81513>
use std::alloc::Layout;
use std::mem;
use std::ptr::NonNull;
/// Create a fat pointer from a data address and a vtable address.
///
/// # Safety
/// Must only be called when `T` is a `dyn Trait`. The data address must point
/// to a value whose type implements the trait of `T` and the `vtable` must have
/// been extracted with [`vtable`].
#[track_caller]
pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *const T {
unsafe {
let fat = FatPointer { data, vtable };
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
mem::transmute_copy::<FatPointer, *const T>(&fat)
}
}
/// Create a mutable fat pointer from a data address and a vtable address.
///
/// # Safety
/// Must only be called when `T` is a `dyn Trait`. The data address must point
/// to a value whose type implements the trait of `T` and the `vtable` must have
/// been extracted with [`vtable`].
#[track_caller]
pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) -> *mut T {
unsafe {
let fat = FatPointer { data, vtable };
debug_assert_eq!(Layout::new::<*mut T>(), Layout::new::<FatPointer>());
mem::transmute_copy::<FatPointer, *mut T>(&fat)
}
}
/// Extract the address to a trait object's vtable.
///
/// # Safety
/// Must only be called when `T` is a `dyn Trait`.
#[track_caller]
pub unsafe fn vtable<T: ?Sized>(ptr: *const T) -> NonNull<()> {
unsafe {
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
NonNull::new_unchecked(
mem::transmute_copy::<*const T, FatPointer>(&ptr).vtable as *mut (),
)
}
}
/// The memory representation of a trait object pointer.
///
/// Although this is not guaranteed by Rust, it's improbable that it will
/// change.
#[repr(C)]
struct FatPointer {
data: *const (),
vtable: *const (),
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/pico.rs | crates/typst-utils/src/pico.rs | use std::borrow::Borrow;
use std::cmp::Ordering;
use std::fmt::{self, Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
use std::num::NonZeroU64;
use std::ops::Deref;
use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
/// Marks a number as a bitcode encoded `PicoStr``.
const MARKER: u64 = 1 << 63;
/// The global runtime string interner.
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
RwLock::new(Interner { seen: FxHashMap::default(), strings: Vec::new() })
});
/// A string interner.
struct Interner {
seen: FxHashMap<&'static str, PicoStr>,
strings: Vec<&'static str>,
}
/// An interned string representation that is cheap to copy and hash, but more
/// expensive to access.
///
/// This type takes up 8 bytes and is copyable and null-optimized (i.e.
/// `Option<PicoStr>` also takes 8 bytes).
///
/// Supports compile-time string interning via [`PicoStr::constant`] in two
/// flavors:
/// - Strings of length at most 12 containing only chars from 'a'-'z', '1'-'4',
/// and '-' are stored inline in the number
/// - Other strings _can_ be compile-time interned the same way, but must first
/// be added to the list in `exceptions::LIST`.
///
/// No such restrictions apply at runtime (via [`PicoStr::intern`]).
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct PicoStr(NonZeroU64);
impl PicoStr {
/// Intern a string at runtime.
pub fn intern(string: &str) -> PicoStr {
// Try to use bitcode or exception representations.
if let Ok(value) = PicoStr::try_constant(string) {
return value;
}
// Try to find an existing entry that we can reuse.
//
// We could check with just a read lock, but if the string is not yet
// present, we would then need to recheck after acquiring a write lock,
// which is probably not worth it.
let mut interner = INTERNER.write().unwrap();
if let Some(&id) = interner.seen.get(string) {
return id;
}
// Create a new entry forever by leaking the string. PicoStr is only
// used for strings that aren't created en masse, so it is okay.
let num = exceptions::LIST.len() + interner.strings.len() + 1;
let id = Self(NonZeroU64::new(num as u64).unwrap());
let string = Box::leak(string.to_string().into_boxed_str());
interner.seen.insert(string, id);
interner.strings.push(string);
id
}
/// Try to create a `PicoStr`, but don't intern it if it does not exist yet.
///
/// This is useful to try to compare against one or multiple `PicoStr`
/// without interning needlessly.
///
/// Will always return `Some(_)` if the string can be represented inline.
pub fn get(string: &str) -> Option<PicoStr> {
// Try to use bitcode or exception representations.
if let Ok(value) = PicoStr::try_constant(string) {
return Some(value);
}
// Try to find an existing entry that we can reuse.
let interner = INTERNER.read().unwrap();
interner.seen.get(string).copied()
}
/// Creates a compile-time constant `PicoStr`.
///
/// Should only be used in const contexts because it can panic.
#[track_caller]
pub const fn constant(string: &'static str) -> PicoStr {
match PicoStr::try_constant(string) {
Ok(value) => value,
Err(err) => failed_to_compile_time_intern(err, string),
}
}
/// Try to intern a string statically at compile-time.
pub const fn try_constant(string: &str) -> Result<PicoStr, bitcode::EncodingError> {
// Try to encode with bitcode.
let value = match bitcode::encode(string) {
// Store representation marker in high bit. Bitcode doesn't use
// 4 high bits.
Ok(v) => v | MARKER,
// If that fails, try to use the exception list.
Err(e) => {
if let Some(i) = exceptions::get(string) {
// Offset by one to make it non-zero.
i as u64 + 1
} else {
return Err(e);
}
}
};
Ok(Self(NonZeroU64::new(value).unwrap()))
}
/// Resolve to a decoded string.
pub fn resolve(self) -> ResolvedPicoStr {
// If high bit is set, this is a bitcode-encoded string.
let value = self.0.get();
if value & MARKER != 0 {
return bitcode::decode(value & !MARKER);
}
let index = (value - 1) as usize;
let string = if let Some(runtime) = index.checked_sub(exceptions::LIST.len()) {
INTERNER.read().unwrap().strings[runtime]
} else {
exceptions::LIST[index]
};
ResolvedPicoStr(ResolvedPicoStrInner::Static(string))
}
}
impl Debug for PicoStr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Debug::fmt(self.resolve().as_str(), f)
}
}
/// A 5-bit encoding for strings with length up two 12 that are restricted to a
/// specific charset.
mod bitcode {
use super::{ResolvedPicoStr, ResolvedPicoStrInner};
/// Maps from encodings to their bytes.
const DECODE: &[u8; 32] = b"\0abcdefghijklmnopqrstuvwxyz-1234";
/// Maps from bytes to their encodings.
const ENCODE: &[u8; 256] = &{
let mut map = [0; 256];
let mut i = 0;
while i < DECODE.len() {
map[DECODE[i] as usize] = i as u8;
i += 1;
}
map
};
/// Try to encode a string as a 64-bit integer.
pub const fn encode(string: &str) -> Result<u64, EncodingError> {
let bytes = string.as_bytes();
if bytes.len() > 12 {
return Err(EncodingError::TooLong);
}
let mut num: u64 = 0;
let mut i = bytes.len();
while i > 0 {
i -= 1;
let b = bytes[i];
let v = ENCODE[b as usize];
if v == 0 {
return Err(EncodingError::BadChar);
}
num <<= 5;
num |= v as u64;
}
Ok(num)
}
/// Decode the string for a 64-bit integer.
pub const fn decode(mut value: u64) -> ResolvedPicoStr {
let mut buf = [0; 12];
let mut len = 0;
while value != 0 {
let v = value & 0b11111;
buf[len as usize] = DECODE[v as usize];
len += 1;
value >>= 5;
}
ResolvedPicoStr(ResolvedPicoStrInner::Inline(buf, len))
}
/// A failure during compile-time interning.
pub enum EncodingError {
TooLong,
BadChar,
}
impl EncodingError {
pub const fn message(&self) -> &'static str {
match self {
Self::TooLong => "the maximum auto-internible string length is 12",
Self::BadChar => {
"can only auto-intern the chars 'a'-'z', '1'-'4', and '-'"
}
}
}
}
}
/// Compile-time interned strings that cannot be encoded with `bitcode`.
mod exceptions {
use std::cmp::Ordering;
/// A global list of non-bitcode-encodable compile-time internible strings.
///
/// Must be sorted.
pub const LIST: &[&str] = &[
"accept-charset",
"allowfullscreen",
"aria-activedescendant",
"aria-autocomplete",
"aria-colcount",
"aria-colindex",
"aria-controls",
"aria-describedby",
"aria-disabled",
"aria-dropeffect",
"aria-errormessage",
"aria-expanded",
"aria-haspopup",
"aria-keyshortcuts",
"aria-labelledby",
"aria-multiline",
"aria-multiselectable",
"aria-orientation",
"aria-placeholder",
"aria-posinset",
"aria-readonly",
"aria-relevant",
"aria-required",
"aria-roledescription",
"aria-rowcount",
"aria-rowindex",
"aria-selected",
"aria-valuemax",
"aria-valuemin",
"aria-valuenow",
"aria-valuetext",
"autocapitalize",
"cjk-latin-spacing",
"contenteditable",
"discretionary-ligatures",
"fetchpriority",
"formnovalidate",
"h5",
"h6",
"historical-ligatures",
"number-clearance",
"number-margin",
"numbering-scope",
"onbeforeprint",
"onbeforeunload",
"onlanguagechange",
"onmessageerror",
"onrejectionhandled",
"onunhandledrejection",
"page-numbering",
"par-line-marker",
"popovertarget",
"popovertargetaction",
"referrerpolicy",
"shadowrootclonable",
"shadowrootcustomelementregistry",
"shadowrootdelegatesfocus",
"shadowrootmode",
"shadowrootserializable",
"transparentize",
"writingsuggestions",
];
/// Try to find the index of an exception if it exists.
pub const fn get(string: &str) -> Option<usize> {
let mut lo = 0;
let mut hi = LIST.len();
while lo < hi {
let mid = (lo + hi) / 2;
match strcmp(string, LIST[mid]) {
Ordering::Less => hi = mid,
Ordering::Greater => lo = mid + 1,
Ordering::Equal => return Some(mid),
}
}
None
}
/// Compare two strings.
const fn strcmp(a: &str, b: &str) -> Ordering {
let a = a.as_bytes();
let b = b.as_bytes();
let l = min(a.len(), b.len());
let mut i = 0;
while i < l {
if a[i] == b[i] {
i += 1;
} else if a[i] < b[i] {
return Ordering::Less;
} else {
return Ordering::Greater;
}
}
if i < b.len() {
Ordering::Less
} else if i < a.len() {
Ordering::Greater
} else {
Ordering::Equal
}
}
/// Determine the minimum of two integers.
const fn min(a: usize, b: usize) -> usize {
if a < b { a } else { b }
}
}
/// This is returned by [`PicoStr::resolve`].
///
/// Dereferences to a `str`.
pub struct ResolvedPicoStr(ResolvedPicoStrInner);
/// The internal representation of a [`ResolvedPicoStr`].
enum ResolvedPicoStrInner {
Inline([u8; 12], u8),
Static(&'static str),
}
impl ResolvedPicoStr {
/// Retrieve the underlying string.
pub fn as_str(&self) -> &str {
match &self.0 {
ResolvedPicoStrInner::Inline(buf, len) => unsafe {
std::str::from_utf8_unchecked(&buf[..*len as usize])
},
ResolvedPicoStrInner::Static(s) => s,
}
}
}
impl Debug for ResolvedPicoStr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Debug::fmt(self.as_str(), f)
}
}
impl Display for ResolvedPicoStr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self.as_str(), f)
}
}
impl Deref for ResolvedPicoStr {
type Target = str;
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
impl AsRef<str> for ResolvedPicoStr {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl Borrow<str> for ResolvedPicoStr {
fn borrow(&self) -> &str {
self.as_str()
}
}
impl Eq for ResolvedPicoStr {}
impl PartialEq for ResolvedPicoStr {
fn eq(&self, other: &Self) -> bool {
self.as_str().eq(other.as_str())
}
}
impl Ord for ResolvedPicoStr {
fn cmp(&self, other: &Self) -> Ordering {
self.as_str().cmp(other.as_str())
}
}
impl PartialOrd for ResolvedPicoStr {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Hash for ResolvedPicoStr {
fn hash<H: Hasher>(&self, state: &mut H) {
self.as_str().hash(state);
}
}
/// The error when a string could not be interned at compile time. Because the
/// normal formatting machinery is not available at compile time, just producing
/// the message is a bit involved ...
#[track_caller]
const fn failed_to_compile_time_intern(
error: bitcode::EncodingError,
string: &'static str,
) -> ! {
const CAPACITY: usize = 512;
const fn push((buf, i): &mut ([u8; CAPACITY], usize), s: &str) {
let mut k = 0;
while k < s.len() && *i < buf.len() {
buf[*i] = s.as_bytes()[k];
k += 1;
*i += 1;
}
}
let mut dest = ([0; CAPACITY], 0);
push(&mut dest, "failed to compile-time intern string \"");
push(&mut dest, string);
push(&mut dest, "\". ");
push(&mut dest, error.message());
push(&mut dest, ". you can add an exception to ");
push(&mut dest, file!());
push(&mut dest, " to intern longer strings.");
let (slice, _) = dest.0.split_at(dest.1);
let Ok(message) = std::str::from_utf8(slice) else { panic!() };
panic!("{}", message);
}
#[cfg(test)]
mod tests {
use super::*;
#[track_caller]
fn roundtrip(s: &str) {
assert_eq!(PicoStr::intern(s).resolve().as_str(), s);
}
#[test]
fn test_pico_str() {
// Test comparing compile-time and runtime-interned bitcode string.
const H1: PicoStr = PicoStr::constant("h1");
assert_eq!(H1, PicoStr::intern("h1"));
assert_eq!(H1.resolve().as_str(), "h1");
// Test comparing compile-time and runtime-interned exception.
const DISC: PicoStr = PicoStr::constant("discretionary-ligatures");
assert_eq!(DISC, PicoStr::intern("discretionary-ligatures"));
assert_eq!(DISC.resolve().as_str(), "discretionary-ligatures");
// Test just roundtripping some strings.
roundtrip("");
roundtrip("hi");
roundtrip("∆@<hi-10_");
roundtrip("you");
roundtrip("discretionary-ligatures");
}
/// Ensures that none of the exceptions is bitcode-encodable.
#[test]
fn test_exceptions_not_bitcode_encodable() {
for s in exceptions::LIST {
assert!(
bitcode::encode(s).is_err(),
"{s:?} can be encoded with bitcode and should not be an exception"
);
}
}
/// Ensures that the exceptions are sorted.
#[test]
fn test_exceptions_sorted() {
for group in exceptions::LIST.windows(2) {
assert!(group[0] < group[1], "{group:?} are out of order");
}
}
/// Ensures that all exceptions can be found.
#[test]
fn test_exception_find() {
for (i, s) in exceptions::LIST.iter().enumerate() {
assert_eq!(exceptions::get(s), Some(i), "wrong index for {s:?}");
}
assert_eq!(exceptions::get("a"), None);
assert_eq!(exceptions::get("another-"), None);
assert_eq!(exceptions::get("z"), None);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/deferred.rs | crates/typst-utils/src/deferred.rs | use std::sync::Arc;
use once_cell::sync::OnceCell;
/// A value that is lazily executed on another thread.
///
/// Execution will be started in the background and can be waited on.
pub struct Deferred<T>(Arc<OnceCell<T>>);
impl<T: Send + Sync + 'static> Deferred<T> {
/// Creates a new deferred value.
///
/// The closure will be called on a secondary thread such that the value
/// can be initialized in parallel.
pub fn new<F>(f: F) -> Self
where
F: FnOnce() -> T + Send + Sync + 'static,
{
let inner = Arc::new(OnceCell::new());
let cloned = Arc::clone(&inner);
rayon::spawn(move || {
// Initialize the value if it hasn't been initialized yet.
// We do this to avoid panicking in case it was set externally.
cloned.get_or_init(f);
});
Self(inner)
}
/// Waits on the value to be initialized.
///
/// If the value has already been initialized, this will return
/// immediately. Otherwise, this will block until the value is
/// initialized in another thread.
pub fn wait(&self) -> &T {
// Fast path if the value is already available. We don't want to yield
// to rayon in that case.
if let Some(value) = self.0.get() {
return value;
}
// Ensure that we yield to give the deferred value a chance to compute
// single-threaded platforms (for WASM compatibility).
while let Some(rayon::Yield::Executed) = rayon::yield_now() {}
self.0.wait()
}
}
impl<T> Clone for Deferred<T> {
fn clone(&self) -> Self {
Self(Arc::clone(&self.0))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-utils/src/duration.rs | crates/typst-utils/src/duration.rs | use std::fmt::{self, Display, Formatter, Write};
use std::time::Duration;
use super::round_with_precision;
/// Formats a duration with a precision suitable for human display.
pub fn format_duration(duration: Duration) -> impl Display {
DurationDisplay(duration)
}
/// Displays a `Duration`.
struct DurationDisplay(Duration);
impl Display for DurationDisplay {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut space = false;
macro_rules! piece {
($($tts:tt)*) => {
if std::mem::replace(&mut space, true) {
f.write_char(' ')?;
}
write!(f, $($tts)*)?;
};
}
let secs = self.0.as_secs();
let (mins, secs) = (secs / 60, (secs % 60));
let (hours, mins) = (mins / 60, (mins % 60));
let (days, hours) = ((hours / 24), (hours % 24));
if days > 0 {
piece!("{days} d");
}
if hours > 0 {
piece!("{hours} h");
}
if mins > 0 {
piece!("{mins} min");
}
// No need to display anything more than minutes at this point.
if days > 0 || hours > 0 {
return Ok(());
}
let order = |exp| 1000u64.pow(exp);
let nanos = secs * order(3) + self.0.subsec_nanos() as u64;
let fract = |exp| round_with_precision(nanos as f64 / order(exp) as f64, 2);
if nanos == 0 || self.0 > Duration::from_secs(1) {
// For durations > 5 min, we drop the fractional part.
if self.0 > Duration::from_secs(300) {
piece!("{secs} s");
} else {
piece!("{} s", fract(3));
}
} else if self.0 > Duration::from_millis(1) {
piece!("{} ms", fract(2));
} else if self.0 > Duration::from_micros(1) {
piece!("{} µs", fract(1));
} else {
piece!("{} ns", fract(0));
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[track_caller]
fn test(duration: Duration, expected: &str) {
assert_eq!(format_duration(duration).to_string(), expected);
}
#[test]
fn test_format_duration() {
test(Duration::from_secs(1000000), "11 d 13 h 46 min");
test(Duration::from_secs(3600 * 24), "1 d");
test(Duration::from_secs(3600), "1 h");
test(Duration::from_secs(3600 + 240), "1 h 4 min");
test(Duration::from_secs_f64(364.77), "6 min 4 s");
test(Duration::from_secs_f64(264.776), "4 min 24.78 s");
test(Duration::from_secs(3), "3 s");
test(Duration::from_secs_f64(2.8492), "2.85 s");
test(Duration::from_micros(734), "734 µs");
test(Duration::from_micros(294816), "294.82 ms");
test(Duration::from_nanos(1), "1 ns");
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-realize/src/lib.rs | crates/typst-realize/src/lib.rs | //! Typst's realization subsystem.
//!
//! *Realization* is the process of recursively applying styling and, in
//! particular, show rules to produce well-known elements that can be processed
//! further.
use std::borrow::Cow;
use std::cell::LazyCell;
use arrayvec::ArrayVec;
use bumpalo::Bump;
use bumpalo::collections::{CollectIn, String as BumpString, Vec as BumpVec};
use comemo::Track;
use ecow::EcoString;
use typst_library::diag::{At, SourceResult, bail, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{
Content, Context, ContextElem, Element, NativeElement, NativeShowRule, Packed,
Recipe, RecipeIndex, Selector, SequenceElem, ShowSet, Style, StyleChain, StyledElem,
Styles, SymbolElem, Synthesize, TargetElem, Transformation,
};
use typst_library::introspection::{
Locatable, LocationKey, SplitLocator, Tag, TagElem, TagFlags, Tagged,
};
use typst_library::layout::{
AlignElem, BoxElem, HElem, InlineElem, PageElem, PagebreakElem, VElem,
};
use typst_library::math::{EquationElem, Mathy};
use typst_library::model::{
CiteElem, CiteGroup, DocumentElem, EnumElem, ListElem, ListItemLike, ListLike,
ParElem, ParbreakElem, TermsElem,
};
use typst_library::routines::{Arenas, FragmentKind, Pair, RealizationKind};
use typst_library::text::{LinebreakElem, SmartQuoteElem, SpaceElem, TextElem};
use typst_syntax::Span;
use typst_utils::{ListSet, SliceExt, SmallBitSet};
/// Realize content into a flat list of well-known, styled items.
#[typst_macros::time(name = "realize")]
pub fn realize<'a>(
kind: RealizationKind,
engine: &mut Engine,
locator: &mut SplitLocator,
arenas: &'a Arenas,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<Vec<Pair<'a>>> {
let mut s = State {
engine,
locator,
arenas,
rules: match kind {
RealizationKind::LayoutDocument { .. } => LAYOUT_RULES,
RealizationKind::LayoutFragment { .. } => LAYOUT_RULES,
RealizationKind::LayoutPar => LAYOUT_PAR_RULES,
RealizationKind::HtmlDocument { .. } => HTML_DOCUMENT_RULES,
RealizationKind::HtmlFragment { .. } => HTML_FRAGMENT_RULES,
RealizationKind::Math => MATH_RULES,
},
sink: vec![],
groupings: ArrayVec::new(),
outside: kind.is_document(),
may_attach: false,
saw_parbreak: false,
kind,
};
visit(&mut s, content, styles)?;
finish(&mut s)?;
Ok(s.sink)
}
/// Mutable state for realization.
///
/// Sadly, we need that many lifetimes because &mut references are invariant and
/// it would force the lifetimes of e.g. engine and locator to be equal if they
/// shared a lifetime. We can get around it by enforcing the lifetimes on
/// `fn realize`, but that makes it less flexible on the call site, which isn't
/// worth it.
///
/// The only interesting lifetime is 'a, which is that of the content that comes
/// in and goes out. It's the same 'a as on `fn realize`.
struct State<'a, 'x, 'y, 'z> {
/// Defines what kind of realization we are performing.
kind: RealizationKind<'x>,
/// The engine.
engine: &'x mut Engine<'y>,
/// Assigns unique locations to elements.
locator: &'x mut SplitLocator<'z>,
/// Temporary storage arenas for lifetime extension during realization.
arenas: &'a Arenas,
/// The output elements of well-known types.
sink: Vec<Pair<'a>>,
/// Grouping rules used for realization.
rules: &'x [&'x GroupingRule],
/// Currently active groupings.
groupings: ArrayVec<Grouping<'x>, MAX_GROUP_NESTING>,
/// Whether we are currently not within any container or show rule output.
/// This is used to determine page styles during layout.
outside: bool,
/// Whether now following attach spacing can survive.
may_attach: bool,
/// Whether we visited any paragraph breaks.
saw_parbreak: bool,
}
/// Defines a rule for how certain elements shall be grouped during realization.
struct GroupingRule {
/// When an element is visited that matches a rule with higher priority
/// than one that is currently grouped, we start a nested group.
priority: u8,
/// Whether the grouping handles tags itself. If this is set to `false`,
/// realization will transparently take care of tags and they will not
/// be visible to `finish`.
tags: bool,
/// Defines which kinds of elements start and make up this kind of grouping.
trigger: fn(&Content, &State) -> bool,
/// Defines elements that may appear in the interior of the grouping, but
/// not at the edges.
inner: fn(&Content) -> bool,
/// Defines whether styles for this kind of element interrupt the grouping.
interrupt: fn(Element) -> bool,
/// Should convert the accumulated elements in `s.sink[start..]` into
/// the grouped element.
finish: fn(Grouped) -> SourceResult<()>,
}
/// A started grouping of some elements.
struct Grouping<'a> {
/// The position in `s.sink` where the group starts.
start: usize,
/// Only applies to `PAR` grouping: Whether this paragraph group is
/// interrupted, but not yet finished because it may be ignored due to being
/// fully inline.
interrupted: bool,
/// The rule used for this grouping.
rule: &'a GroupingRule,
}
/// The result of grouping.
struct Grouped<'a, 'x, 'y, 'z, 's> {
/// The realization state.
s: &'s mut State<'a, 'x, 'y, 'z>,
/// The position in `s.sink` where the group starts.
start: usize,
}
/// What to do with an element when encountering it during realization.
struct Verdict<'a> {
/// Whether the element is already prepared (i.e. things that should only
/// happen once have happened).
prepared: bool,
/// A map of styles to apply to the element.
map: Styles,
/// An optional show rule transformation to apply to the element.
step: Option<ShowStep<'a>>,
}
/// A show rule transformation to apply to the element.
enum ShowStep<'a> {
/// A user-defined transformational show rule.
Recipe(&'a Recipe, RecipeIndex),
/// The built-in show rule.
Builtin(NativeShowRule),
}
/// A match of a regex show rule.
struct RegexMatch<'a> {
/// The offset in the string that matched.
offset: usize,
/// The text that matched.
text: EcoString,
/// The style chain of the matching grouping.
styles: StyleChain<'a>,
/// The index of the recipe that matched.
id: RecipeIndex,
/// The recipe that matched.
recipe: &'a Recipe,
}
/// State kept for space collapsing.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum SpaceState {
/// A following space will be collapsed.
Destructive,
/// A following space will be kept unless a destructive element follows.
Supportive,
/// A space exists at this index.
Space(usize),
}
impl<'a> State<'a, '_, '_, '_> {
/// Lifetime-extends some content.
fn store(&self, content: Content) -> &'a Content {
self.arenas.content.alloc(content)
}
/// Lifetime-extends some pairs.
///
/// By using a `BumpVec` instead of a `alloc_slice_copy` we can reuse
/// the space if no other bump allocations have been made by the time
/// the `BumpVec` is dropped.
fn store_slice(&self, pairs: &[Pair<'a>]) -> BumpVec<'a, Pair<'a>> {
let mut vec = BumpVec::new_in(&self.arenas.bump);
vec.extend_from_slice_copy(pairs);
vec
}
}
impl<'a, 'x, 'y, 'z, 's> Grouped<'a, 'x, 'y, 'z, 's> {
/// Accesses the grouped elements.
fn get(&self) -> &[Pair<'a>] {
&self.s.sink[self.start..]
}
/// Accesses the grouped elements mutably.
fn get_mut(&mut self) -> (&mut Vec<Pair<'a>>, usize) {
(&mut self.s.sink, self.start)
}
/// Removes the grouped elements from the sink and retrieves back the state
/// with which resulting elements can be visited.
fn end(self) -> &'s mut State<'a, 'x, 'y, 'z> {
self.s.sink.truncate(self.start);
self.s
}
}
/// Handles an arbitrary piece of content during realization.
fn visit<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<()> {
// Tags can always simply be pushed.
if content.is::<TagElem>() {
s.sink.push((content, styles));
return Ok(());
}
// Transformations for content based on the realization kind. Needs
// to happen before show rules.
if visit_kind_rules(s, content, styles)? {
return Ok(());
}
// Apply show rules and preparation.
if visit_show_rules(s, content, styles)? {
return Ok(());
}
// Recurse into sequences. Styled elements and sequences can currently also
// have labels, so this needs to happen before they are handled.
if let Some(sequence) = content.to_packed::<SequenceElem>() {
for elem in &sequence.children {
visit(s, elem, styles)?;
}
return Ok(());
}
// Recurse into styled elements.
if let Some(styled) = content.to_packed::<StyledElem>() {
return visit_styled(s, &styled.child, Cow::Borrowed(&styled.styles), styles);
}
// Apply grouping --- where multiple elements are collected and then
// processed together (typically being transformed into one).
if visit_grouping_rules(s, content, styles)? {
return Ok(());
}
// Some elements are skipped based on specific circumstances.
if visit_filter_rules(s, content, styles)? {
return Ok(());
}
// No further transformations to apply, so we can finally just push it to
// the output!
s.sink.push((content, styles));
Ok(())
}
// Handles transformations based on the realization kind.
fn visit_kind_rules<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<bool> {
if let RealizationKind::Math = s.kind {
// Transparently recurse into equations nested in math, so that things
// like this work:
// ```
// #let my = $pi$
// $ my r^2 $
// ```
if let Some(elem) = content.to_packed::<EquationElem>() {
visit(s, &elem.body, styles)?;
return Ok(true);
}
// In normal realization, we apply regex show rules to consecutive
// textual elements via `TEXTUAL` grouping. However, in math, this is
// not desirable, so we just do it on a per-element basis.
if let Some(elem) = content.to_packed::<SymbolElem>() {
if let Some(m) = find_regex_match_in_str(elem.text.as_str(), styles) {
visit_regex_match(s, &[(content, styles)], m)?;
return Ok(true);
}
} else if let Some(elem) = content.to_packed::<TextElem>()
&& let Some(m) = find_regex_match_in_str(&elem.text, styles)
{
visit_regex_match(s, &[(content, styles)], m)?;
return Ok(true);
}
} else {
// Transparently wrap mathy content into equations.
if content.can::<dyn Mathy>() && !content.is::<EquationElem>() {
let eq = EquationElem::new(content.clone()).pack().spanned(content.span());
visit(s, s.store(eq), styles)?;
return Ok(true);
}
// Symbols in non-math content transparently convert to `TextElem` so we
// don't have to handle them in non-math layout.
if let Some(elem) = content.to_packed::<SymbolElem>() {
let mut text = TextElem::packed(elem.text.clone()).spanned(elem.span());
if let Some(label) = elem.label() {
text.set_label(label);
}
visit(s, s.store(text), styles)?;
return Ok(true);
}
}
Ok(false)
}
/// Tries to apply show rules to or prepare content. Returns `true` if the
/// element was handled.
fn visit_show_rules<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<bool> {
// Determines whether and how to proceed with show rule application.
let Some(Verdict { prepared, mut map, step }) = verdict(s.engine, content, styles)
else {
return Ok(false);
};
// Create a fresh copy that we can mutate.
let mut output = Cow::Borrowed(content);
// If the element isn't yet prepared (we're seeing it for the first time),
// prepare it.
let mut tags = None;
if !prepared {
tags = prepare(s.engine, s.locator, output.to_mut(), &mut map, styles)?;
}
// Apply a show rule step, if there is one.
if let Some(step) = step {
let chained = styles.chain(&map);
let result = match step {
// Apply a user-defined show rule.
ShowStep::Recipe(recipe, guard) => {
let context = Context::new(output.location(), Some(chained));
recipe.apply(
s.engine,
context.track(),
output.into_owned().guarded(guard),
)
}
// Apply a built-in show rule.
ShowStep::Builtin(rule) => {
let _scope = typst_timing::TimingScope::new(output.elem().name());
rule.apply(&output, s.engine, chained)
.map(|content| content.spanned(output.span()))
}
};
// Errors in show rules don't terminate compilation immediately. We just
// continue with empty content for them and show all errors together, if
// they remain by the end of the introspection loop.
//
// This way, we can ignore errors that only occur in earlier iterations
// and also show more useful errors at once.
output = Cow::Owned(s.engine.delay(result));
}
// Lifetime-extend the realized content if necessary.
let realized = match output {
Cow::Borrowed(realized) => realized,
Cow::Owned(realized) => s.store(realized),
};
// Push start tag.
let (start, end) = tags.unzip();
if let Some(tag) = start {
visit(s, s.store(TagElem::packed(tag)), styles)?;
}
let prev_outside = s.outside;
s.outside &= content.is::<ContextElem>();
s.engine.route.increase();
s.engine.route.check_show_depth().at(content.span())?;
visit_styled(s, realized, Cow::Owned(map), styles)?;
s.outside = prev_outside;
s.engine.route.decrease();
// Push end tag.
if let Some(tag) = end {
visit(s, s.store(TagElem::packed(tag)), styles)?;
}
Ok(true)
}
/// Inspects an element and the current styles and determines how to proceed
/// with the styling.
fn verdict<'a>(
engine: &mut Engine,
elem: &'a Content,
styles: StyleChain<'a>,
) -> Option<Verdict<'a>> {
let prepared = elem.is_prepared();
let mut map = Styles::new();
let mut step = None;
// Do pre-synthesis on a cloned element to be able to match on synthesized
// fields before real synthesis runs (during preparation). It's really
// unfortunate that we have to do this, but otherwise
// `show figure.where(kind: table)` won't work :(
let mut elem = elem;
let mut slot;
if !prepared && elem.can::<dyn Synthesize>() {
slot = elem.clone();
slot.with_mut::<dyn Synthesize>()
.unwrap()
.synthesize(engine, styles)
.ok();
elem = &slot;
}
// Lazily computes the total number of recipes in the style chain. We need
// it to determine whether a particular show rule was already applied to the
// `elem` previously. For this purpose, show rules are indexed from the
// top of the chain as the chain might grow to the bottom.
let depth = LazyCell::new(|| styles.recipes().count());
for (r, recipe) in styles.recipes().enumerate() {
// We're not interested in recipes that don't match.
if !recipe
.selector()
.is_some_and(|selector| selector.matches(elem, Some(styles)))
{
continue;
}
// Special handling for show-set rules.
if let Transformation::Style(transform) = recipe.transform() {
if !prepared {
map.apply(transform.clone());
}
continue;
}
// If we already have a show step, don't look for one.
if step.is_some() {
continue;
}
// Check whether this show rule was already applied to the element.
let index = RecipeIndex(*depth - r);
if elem.is_guarded(index) {
continue;
}
// We'll apply this recipe.
step = Some(ShowStep::Recipe(recipe, index));
// If we found a show rule and are already prepared, there is nothing
// else to do, so we can just break. If we are not yet prepared,
// continue searching for potential show-set styles.
if prepared {
break;
}
}
// If we found no user-defined rule, also consider the built-in show rule.
if step.is_none() {
let target = styles.get(TargetElem::target);
if let Some(rule) = engine.routines.rules.get(target, elem) {
step = Some(ShowStep::Builtin(rule));
}
}
// If there's no nothing to do, there is also no verdict.
if step.is_none()
&& map.is_empty()
&& (prepared || {
elem.label().is_none()
&& elem.location().is_none()
&& !elem.can::<dyn ShowSet>()
&& !elem.can::<dyn Locatable>()
&& !elem.can::<dyn Tagged>()
&& !elem.can::<dyn Synthesize>()
})
{
return None;
}
Some(Verdict { prepared, map, step })
}
/// This is only executed the first time an element is visited.
fn prepare(
engine: &mut Engine,
locator: &mut SplitLocator,
elem: &mut Content,
map: &mut Styles,
styles: StyleChain,
) -> SourceResult<Option<(Tag, Tag)>> {
// Generate a location for the element, which uniquely identifies it in
// the document. This has some overhead, so we only do it for elements
// that are explicitly marked as locatable and labelled elements.
//
// The element could already have a location even if it is not prepared
// when it stems from a query.
let key = typst_utils::hash128(&elem);
let flags = TagFlags {
introspectable: elem.can::<dyn Locatable>()
|| elem.label().is_some()
|| elem.location().is_some(),
tagged: elem.can::<dyn Tagged>(),
};
if elem.location().is_none() && flags.any() {
let loc = locator.next_location(engine, key, elem.span());
elem.set_location(loc);
}
// Apply built-in show-set rules. User-defined show-set rules are already
// considered in the map built while determining the verdict.
if let Some(show_settable) = elem.with::<dyn ShowSet>() {
map.apply(show_settable.show_set(styles));
}
// If necessary, generated "synthesized" fields (which are derived from
// other fields or queries). Do this after show-set so that show-set styles
// are respected.
if let Some(synthesizable) = elem.with_mut::<dyn Synthesize>() {
synthesizable.synthesize(engine, styles.chain(map))?;
}
// Copy style chain fields into the element itself, so that they are
// available in rules.
elem.materialize(styles.chain(map));
// If the element is locatable, create start and end tags to be able to find
// the element in the frames after layout. Do this after synthesis and
// materialization, so that it includes the synthesized fields. Do it before
// marking as prepared so that show-set rules will apply to this element
// when queried.
let tags = elem
.location()
.map(|loc| (Tag::Start(elem.clone(), flags), Tag::End(loc, key, flags)));
// Ensure that this preparation only runs once by marking the element as
// prepared.
elem.mark_prepared();
Ok(tags)
}
/// Handles a styled element.
fn visit_styled<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
mut local: Cow<'a, Styles>,
outer: StyleChain<'a>,
) -> SourceResult<()> {
// Nothing to do if the styles are actually empty.
if local.is_empty() {
return visit(s, content, outer);
}
// Check for document and page styles.
let mut pagebreak = false;
for style in local.iter() {
let Some(elem) = style.element() else { continue };
if elem == DocumentElem::ELEM {
if let Some(info) = s.kind.as_document_mut() {
info.populate(&local)
} else {
bail!(
style.span(),
"document set rules are not allowed inside of containers",
);
}
} else if elem == TextElem::ELEM {
// Infer the document locale from the first toplevel set rule.
if let Some(info) = s.kind.as_document_mut() {
info.populate_locale(&local)
}
} else if elem == PageElem::ELEM {
match s.kind {
RealizationKind::LayoutDocument { .. } => {
// When there are page styles, we "break free" from our show
// rule cage.
pagebreak = true;
s.outside = true;
}
RealizationKind::HtmlDocument { .. } => s.engine.sink.warn(warning!(
style.span(),
"page set rule was ignored during HTML export"
)),
_ => bail!(
style.span(),
"page configuration is not allowed inside of containers",
),
}
}
}
// If we are not within a container or show rule, mark the styles as
// "outside". This will allow them to be lifted to the page level.
if s.outside {
local = Cow::Owned(local.into_owned().outside());
}
// Lifetime-extend the styles if necessary.
let outer = s.arenas.bump.alloc(outer);
let local = match local {
Cow::Borrowed(map) => map,
Cow::Owned(owned) => &*s.arenas.styles.alloc(owned),
};
// Generate a weak pagebreak if there is a page interruption. For the
// starting pagebreak we only want the styles before and including the
// interruptions, not trailing styles that happen to be in the same `Styles`
// list, so we trim the local styles.
if pagebreak {
let relevant = local
.as_slice()
.trim_end_matches(|style| style.element() != Some(PageElem::ELEM));
visit(s, PagebreakElem::shared_weak(), outer.chain(relevant))?;
}
finish_interrupted(s, local)?;
visit(s, content, outer.chain(local))?;
finish_interrupted(s, local)?;
// Generate a weak "boundary" pagebreak at the end. In comparison to a
// normal weak pagebreak, the styles of this are ignored during layout, so
// it doesn't really matter what we use here.
if pagebreak {
visit(s, PagebreakElem::shared_boundary(), *outer)?;
}
Ok(())
}
/// Tries to group the content in an active group or start a new one if any
/// grouping rule matches. Returns `true` if the element was grouped.
fn visit_grouping_rules<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<bool> {
let matching = s.rules.iter().find(|&rule| (rule.trigger)(content, s));
// Try to continue or finish an existing grouping.
let mut i = 0;
while let Some(active) = s.groupings.last() {
// Start a nested group if a rule with higher priority matches.
if matching.is_some_and(|rule| rule.priority > active.rule.priority) {
break;
}
// If the element can be added to the active grouping, do it.
if !active.interrupted
&& ((active.rule.trigger)(content, s) || (active.rule.inner)(content))
{
s.sink.push((content, styles));
return Ok(true);
}
finish_innermost_grouping(s)?;
i += 1;
if i > 512 {
// It seems like this case is only hit when there is a cycle between
// a show rule and a grouping rule. The show rule produces content
// that is matched by a grouping rule, which is then again processed
// by the show rule, and so on. The two must be at an equilibrium,
// otherwise either the "maximum show rule depth" or "maximum
// grouping depth" errors are triggered.
bail!(content.span(), "maximum grouping depth exceeded");
}
}
// Start a new grouping.
if let Some(rule) = matching {
let start = s.sink.len();
s.groupings.push(Grouping { start, rule, interrupted: false });
s.sink.push((content, styles));
return Ok(true);
}
Ok(false)
}
/// Some elements don't make it to the sink depending on the realization kind
/// and current state.
fn visit_filter_rules<'a>(
s: &mut State<'a, '_, '_, '_>,
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<bool> {
if matches!(s.kind, RealizationKind::LayoutPar | RealizationKind::Math) {
return Ok(false);
}
if content.is::<SpaceElem>() {
// Outside of maths and paragraph realization, spaces that were not
// collected by the paragraph grouper don't interest us.
return Ok(true);
} else if content.is::<ParbreakElem>() {
// Paragraph breaks are only a boundary for paragraph grouping, we don't
// need to store them.
s.may_attach = false;
s.saw_parbreak = true;
return Ok(true);
} else if !s.may_attach
&& content
.to_packed::<VElem>()
.is_some_and(|elem| elem.attach.get(styles))
{
// Attach spacing collapses if not immediately following a paragraph.
return Ok(true);
}
// Remember whether following attach spacing can survive.
s.may_attach = content.is::<ParElem>();
Ok(false)
}
/// Finishes all grouping.
fn finish(s: &mut State) -> SourceResult<()> {
finish_grouping_while(s, |s| {
// If this is a fragment realization and all we've got is inline
// content, don't turn it into a paragraph.
if is_fully_inline(s) {
*s.kind.as_fragment_mut().unwrap() = FragmentKind::Inline;
s.groupings.pop();
collapse_spaces(&mut s.sink, 0);
false
} else {
!s.groupings.is_empty()
}
})?;
// In paragraph and math realization, spaces are top-level.
if matches!(s.kind, RealizationKind::LayoutPar | RealizationKind::Math) {
collapse_spaces(&mut s.sink, 0);
}
Ok(())
}
/// Finishes groupings while any active group is interrupted by the styles.
fn finish_interrupted(s: &mut State, local: &Styles) -> SourceResult<()> {
let mut last = None;
for elem in local.iter().filter_map(|style| style.element()) {
if last == Some(elem) {
continue;
}
finish_grouping_while(s, |s| {
s.groupings.iter().any(|grouping| (grouping.rule.interrupt)(elem))
&& if is_fully_inline(s) {
s.groupings[0].interrupted = true;
false
} else {
true
}
})?;
last = Some(elem);
}
Ok(())
}
/// Finishes groupings while `f` returns `true`.
fn finish_grouping_while<F>(s: &mut State, mut f: F) -> SourceResult<()>
where
F: FnMut(&mut State) -> bool,
{
// Finishing of a group may result in new content and new grouping. This
// can, in theory, go on for a bit. To prevent it from becoming an infinite
// loop, we keep track of the iteration count.
let mut i = 0;
while f(s) {
finish_innermost_grouping(s)?;
i += 1;
if i > 512 {
bail!(Span::detached(), "maximum grouping depth exceeded");
}
}
Ok(())
}
/// Finishes the currently innermost grouping.
fn finish_innermost_grouping(s: &mut State) -> SourceResult<()> {
// The grouping we are interrupting.
let Grouping { mut start, rule, .. } = s.groupings.pop().unwrap();
// Trim trailing non-trigger elements. At the start, they are already not
// included precisely because they are not triggers.
let trimmed = s.sink[start..].trim_end_matches(|(c, _)| !(rule.trigger)(c, s));
let mut end = start + trimmed.len();
// Tags that are opened within or at the start boundary of the grouping
// should have their closing tag included if it is at the end boundary.
// Similarly, tags that are closed within or at the end boundary should have
// their opening tag included if it is at the start boundary. Finally, tags
// that are sandwiched between an opening tag with a matching closing tag
// should also be included.
if rule.tags {
// The trailing part of the sink can contain a mix of inner elements and
// tags. If there is a closing tag with a matching start tag, but there
// is an inner element in between, that's in principle a situation with
// overlapping tags. However, if the inner element would immediately be
// destructed anyways, there isn't really a problem. So we try to
// anticipate that and destruct it eagerly.
if std::ptr::eq(rule, &PAR) {
for _ in s.sink.extract_if(end.., |(c, _)| c.is::<SpaceElem>()) {}
}
// Find tags before, within, and after the grouping range.
let bump = &s.arenas.bump;
let before = tag_set(bump, s.sink[..start].iter().rev().map_while(to_tag));
let within = tag_set(bump, s.sink[start..end].iter().filter_map(to_tag));
let after = tag_set(bump, s.sink[end..].iter().map_while(to_tag));
// Include all tags at the start that are closed within or after.
for (k, (c, _)) in s.sink[..start].iter().enumerate().rev() {
let Some(elem) = c.to_packed::<TagElem>() else { break };
let key = elem.tag.location().into();
if within.contains(&key) || after.contains(&key) {
start = k;
}
}
// Include all tags at the end that are opened within or before.
for (k, (c, _)) in s.sink.iter().enumerate().skip(end) {
let Some(elem) = c.to_packed::<TagElem>() else { break };
let key = elem.tag.location().into();
if within.contains(&key) || before.contains(&key) {
end = k + 1;
}
}
}
let tail = s.store_slice(&s.sink[end..]);
s.sink.truncate(end);
// If the grouping is not interested in tags, remove and collect them.
let mut tags = BumpVec::<Pair>::new_in(&s.arenas.bump);
if !rule.tags {
let mut k = start;
for i in start..end {
if s.sink[i].0.is::<TagElem>() {
tags.push(s.sink[i]);
continue;
}
if k < i {
s.sink[k] = s.sink[i];
}
k += 1;
}
s.sink.truncate(k);
}
// Execute the grouping's finisher rule.
(rule.finish)(Grouped { s, start })?;
// Visit the tags and staged elements again.
for &(content, styles) in tags.iter().chain(&tail) {
visit(s, content, styles)?;
}
Ok(())
}
/// Extracts the locations of all tags in the given `list` into a bump-allocated
/// set.
fn tag_set<'a>(
bump: &'a Bump,
iter: impl IntoIterator<Item = &'a Packed<TagElem>>,
) -> ListSet<BumpVec<'a, LocationKey>> {
ListSet::new(
iter.into_iter()
.map(|elem| LocationKey::new(elem.tag.location()))
.collect_in::<BumpVec<_>>(bump),
)
}
/// Tries to convert a pair to a tag.
fn to_tag<'a>((c, _): &Pair<'a>) -> Option<&'a Packed<TagElem>> {
c.to_packed::<TagElem>()
}
/// The maximum number of nested groups that are possible. Corresponds to the
/// number of unique priority levels.
const MAX_GROUP_NESTING: usize = 3;
/// Grouping rules used in layout realization.
static LAYOUT_RULES: &[&GroupingRule] = &[&TEXTUAL, &PAR, &CITES, &LIST, &ENUM, &TERMS];
/// Grouping rules used in paragraph layout realization.
static LAYOUT_PAR_RULES: &[&GroupingRule] = &[&TEXTUAL, &CITES, &LIST, &ENUM, &TERMS];
/// Grouping rules used in HTML root realization.
static HTML_DOCUMENT_RULES: &[&GroupingRule] =
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/repeat.rs | crates/typst-layout/src/repeat.rs | use typst_library::diag::{SourceResult, bail};
use typst_library::engine::Engine;
use typst_library::foundations::{Packed, Resolve, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, AlignElem, Axes, Frame, Point, Region, RepeatElem, Size,
};
use typst_utils::Numeric;
/// Layout the repeated content.
#[typst_macros::time(span = elem.span())]
pub fn layout_repeat(
elem: &Packed<RepeatElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let pod = Region::new(region.size, Axes::new(false, false));
let piece = crate::layout_frame(engine, &elem.body, locator, styles, pod)?;
let size = Size::new(region.size.x, piece.height());
if !size.is_finite() {
bail!(elem.span(), "repeat with no size restrictions");
}
let mut frame = Frame::soft(size);
if piece.has_baseline() {
frame.set_baseline(piece.baseline());
}
let mut gap = elem.gap.resolve(styles);
let fill = region.size.x;
let width = piece.width();
// We need to fit the body N times, but the number of gaps is (N - 1):
// N * w + (N - 1) * g ≤ F
// where N - body count (count)
// w - body width (width)
// g - gap width (gap)
// F - available space to fill (fill)
//
// N * w + N * g - g ≤ F
// N * (w + g) ≤ F + g
// N ≤ (F + g) / (w + g)
// N = ⌊(F + g) / (w + g)⌋
let count = ((fill + gap) / (width + gap)).floor();
let remaining = (fill + gap) % (width + gap);
let justify = elem.justify.get(styles);
if justify {
gap += remaining / (count - 1.0);
}
let align = styles.get(AlignElem::alignment).resolve(styles);
let mut offset = Abs::zero();
if count == 1.0 || !justify {
offset += align.x.position(remaining);
}
if width > Abs::zero() {
for _ in 0..(count as usize).min(1000) {
frame.push_frame(Point::with_x(offset), piece.clone());
offset += width + gap;
}
}
Ok(frame)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/modifiers.rs | crates/typst-layout/src/modifiers.rs | use typst_library::foundations::StyleChain;
use typst_library::layout::{Abs, Fragment, Frame, FrameItem, HideElem, Point, Sides};
use typst_library::model::{Destination, LinkElem, ParElem};
/// Frame-level modifications resulting from styles that do not impose any
/// layout structure.
///
/// These are always applied at the highest level of style uniformity.
/// Consequently, they must be applied by all layouters that manually manage
/// styles of their children (because they can produce children with varying
/// styles). This currently includes flow, inline, and math layout.
///
/// Other layouters don't manually need to handle it because their parents that
/// result from realization will take care of it and the styles can only apply
/// to them as a whole, not part of it (since they don't manage styles).
///
/// Currently existing frame modifiers are:
/// - `HideElem::hidden`
/// - `LinkElem::dests`
#[derive(Debug, Clone)]
pub struct FrameModifiers {
/// A destination to link to.
dest: Option<Destination>,
/// Whether the contents of the frame should be hidden.
hidden: bool,
}
impl FrameModifiers {
/// Retrieve all modifications that should be applied per-frame.
pub fn get_in(styles: StyleChain) -> Self {
Self {
dest: styles.get_cloned(LinkElem::current),
hidden: styles.get(HideElem::hidden),
}
}
}
/// Applies [`FrameModifiers`].
pub trait FrameModify {
/// Apply the modifiers in-place.
fn modify(&mut self, modifiers: &FrameModifiers);
/// Apply the modifiers, and return the modified result.
fn modified(mut self, modifiers: &FrameModifiers) -> Self
where
Self: Sized,
{
self.modify(modifiers);
self
}
}
impl FrameModify for Frame {
fn modify(&mut self, modifiers: &FrameModifiers) {
modify_frame(self, modifiers, None);
}
}
impl FrameModify for Fragment {
fn modify(&mut self, modifiers: &FrameModifiers) {
for frame in self.iter_mut() {
frame.modify(modifiers);
}
}
}
impl<T, E> FrameModify for Result<T, E>
where
T: FrameModify,
{
fn modify(&mut self, props: &FrameModifiers) {
if let Ok(inner) = self {
inner.modify(props);
}
}
}
pub trait FrameModifyText {
/// Resolve and apply [`FrameModifiers`] for this text frame.
fn modify_text(&mut self, styles: StyleChain);
}
impl FrameModifyText for Frame {
fn modify_text(&mut self, styles: StyleChain) {
let modifiers = FrameModifiers::get_in(styles);
let expand_y = 0.5 * styles.resolve(ParElem::leading);
let outset = Sides::new(Abs::zero(), expand_y, Abs::zero(), expand_y);
modify_frame(self, &modifiers, Some(outset));
}
}
fn modify_frame(
frame: &mut Frame,
modifiers: &FrameModifiers,
link_box_outset: Option<Sides<Abs>>,
) {
if let Some(dest) = &modifiers.dest {
let mut pos = Point::zero();
let mut size = frame.size();
if let Some(outset) = link_box_outset {
pos.y -= outset.top;
pos.x -= outset.left;
size += outset.sum_by_axis();
}
frame.push(pos, FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
frame.hide();
}
}
/// Performs layout and modification in one step.
///
/// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`,
/// but with the additional step that redundant modifiers (which are already
/// applied here) are removed from the `styles` passed to `layout`. This is used
/// for the layout of containers like `block`.
pub fn layout_and_modify<F, R>(styles: StyleChain, layout: F) -> R
where
F: FnOnce(StyleChain) -> R,
R: FrameModify,
{
let modifiers = FrameModifiers::get_in(styles);
// Disable the current link internally since it's already applied at this
// level of layout. This means we don't generate redundant nested links,
// which may bloat the output considerably.
let reset;
let outer = styles;
let mut styles = styles;
if modifiers.dest.is_some() {
reset = LinkElem::current.set(None).wrap();
styles = outer.chain(&reset);
}
layout(styles).modified(&modifiers)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/lib.rs | crates/typst-layout/src/lib.rs | //! Typst's layout engine.
mod flow;
mod grid;
mod image;
mod inline;
mod lists;
mod math;
mod modifiers;
mod pad;
mod pages;
mod repeat;
mod rules;
mod shapes;
mod stack;
mod transforms;
pub use self::flow::{layout_fragment, layout_frame};
pub use self::pages::layout_document;
pub use self::rules::register;
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/image.rs | crates/typst-layout/src/image.rs | use typst_library::diag::SourceResult;
use typst_library::engine::Engine;
use typst_library::foundations::{Packed, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, Axes, FixedAlignment, Frame, FrameItem, Point, Region, Size,
};
use typst_library::visualize::{Curve, Image, ImageElem, ImageFit};
/// Layout the image.
#[typst_macros::time(span = elem.span())]
pub fn layout_image(
elem: &Packed<ImageElem>,
engine: &mut Engine,
_: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let image = elem.decode(engine, styles)?;
// Determine the image's pixel aspect ratio.
let pxw = image.width();
let pxh = image.height();
let px_ratio = pxw / pxh;
// Determine the region's aspect ratio.
let region_ratio = region.size.x / region.size.y;
// Find out whether the image is wider or taller than the region.
let wide = px_ratio > region_ratio;
// The space into which the image will be placed according to its fit.
let target = if region.expand.x && region.expand.y {
// If both width and height are forced, take them.
region.size
} else if region.expand.x {
// If just width is forced, take it.
Size::new(region.size.x, region.size.y.min(region.size.x / px_ratio))
} else if region.expand.y {
// If just height is forced, take it.
Size::new(region.size.x.min(region.size.y * px_ratio), region.size.y)
} else {
// If neither is forced, take the natural image size at the image's
// DPI bounded by the available space.
//
// Division by DPI is fine since it's guaranteed to be positive.
let dpi = image.dpi().unwrap_or(Image::DEFAULT_DPI);
let natural = Axes::new(pxw, pxh).map(|v| Abs::inches(v / dpi));
Size::new(
natural.x.min(region.size.x).min(region.size.y * px_ratio),
natural.y.min(region.size.y).min(region.size.x / px_ratio),
)
};
// Compute the actual size of the fitted image.
let fit = elem.fit.get(styles);
let fitted = match fit {
ImageFit::Cover | ImageFit::Contain => {
if wide == (fit == ImageFit::Contain) {
Size::new(target.x, target.x / px_ratio)
} else {
Size::new(target.y * px_ratio, target.y)
}
}
ImageFit::Stretch => target,
};
// First, place the image in a frame of exactly its size and then resize
// the frame to the target size, center aligning the image in the
// process.
let mut frame = Frame::soft(fitted);
frame.push(Point::zero(), FrameItem::Image(image, fitted, elem.span()));
frame.resize(target, Axes::splat(FixedAlignment::Center));
// Create a clipping group if only part of the image should be visible.
if fit == ImageFit::Cover && !target.fits(fitted) {
frame.clip(Curve::rect(frame.size()));
}
Ok(frame)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/pad.rs | crates/typst-layout/src/pad.rs | use typst_library::diag::SourceResult;
use typst_library::engine::Engine;
use typst_library::foundations::{Packed, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, Fragment, Frame, PadElem, Point, Regions, Rel, Sides, Size,
};
/// Layout the padded content.
#[typst_macros::time(span = elem.span())]
pub fn layout_pad(
elem: &Packed<PadElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let padding = Sides::new(
elem.left.resolve(styles),
elem.top.resolve(styles),
elem.right.resolve(styles),
elem.bottom.resolve(styles),
);
let mut backlog = vec![];
let pod = regions.map(&mut backlog, |size| shrink(size, &padding));
// Layout child into padded regions.
let mut fragment = crate::layout_fragment(engine, &elem.body, locator, styles, pod)?;
for frame in &mut fragment {
grow(frame, &padding);
}
Ok(fragment)
}
/// Shrink a region size by an inset relative to the size itself.
pub fn shrink(size: Size, inset: &Sides<Rel<Abs>>) -> Size {
size - inset.sum_by_axis().relative_to(size)
}
/// Shrink the components of possibly multiple `Regions` by an inset relative to
/// the regions themselves.
pub fn shrink_multiple(
size: &mut Size,
full: &mut Abs,
backlog: &mut [Abs],
last: &mut Option<Abs>,
inset: &Sides<Rel<Abs>>,
) {
let summed = inset.sum_by_axis();
*size -= summed.relative_to(*size);
*full -= summed.y.relative_to(*full);
for item in backlog {
*item -= summed.y.relative_to(*item);
}
*last = last.map(|v| v - summed.y.relative_to(v));
}
/// Grow a frame's size by an inset relative to the grown size.
/// This is the inverse operation to `shrink()`.
///
/// For the horizontal axis the derivation looks as follows.
/// (Vertical axis is analogous.)
///
/// Let w be the grown target width,
/// s be the given width,
/// l be the left inset,
/// r be the right inset,
/// p = l + r.
///
/// We want that: w - l.resolve(w) - r.resolve(w) = s
///
/// Thus: w - l.resolve(w) - r.resolve(w) = s
/// <=> w - p.resolve(w) = s
/// <=> w - p.rel * w - p.abs = s
/// <=> (1 - p.rel) * w = s + p.abs
/// <=> w = (s + p.abs) / (1 - p.rel)
pub fn grow(frame: &mut Frame, inset: &Sides<Rel<Abs>>) {
// Apply the padding inversely such that the grown size padded
// yields the frame's size.
let padded = frame
.size()
.zip_map(inset.sum_by_axis(), |s, p| (s + p.abs) / (1.0 - p.rel.get()));
let inset = inset.relative_to(padded);
let offset = Point::new(inset.left, inset.top);
// Grow the frame and translate everything in the frame inwards.
frame.set_size(padded);
frame.translate(offset);
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/lists.rs | crates/typst-layout/src/lists.rs | use comemo::Track;
use smallvec::smallvec;
use typst_library::diag::SourceResult;
use typst_library::engine::Engine;
use typst_library::foundations::{Content, Context, Depth, Packed, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::grid::resolve::{Cell, CellGrid};
use typst_library::layout::{Axes, Fragment, HAlignment, Regions, Sizing, VAlignment};
use typst_library::model::{EnumElem, ListElem, Numbering, ParElem, ParbreakElem};
use typst_library::pdf::PdfMarkerTag;
use typst_library::text::TextElem;
use crate::grid::GridLayouter;
/// Layout the list.
#[typst_macros::time(span = elem.span())]
pub fn layout_list(
elem: &Packed<ListElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let indent = elem.indent.get(styles);
let body_indent = elem.body_indent.get(styles);
let tight = elem.tight.get(styles);
let gutter = elem.spacing.get(styles).unwrap_or_else(|| {
if tight { styles.get(ParElem::leading) } else { styles.get(ParElem::spacing) }
});
let Depth(depth) = styles.get(ListElem::depth);
let marker = elem
.marker
.get_ref(styles)
.resolve(engine, styles, depth)?
// avoid '#set align' interference with the list
.aligned(HAlignment::Start + VAlignment::Top);
let mut cells = vec![];
for item in &elem.children {
// Text in wide lists shall always turn into paragraphs.
let mut body = item.body.clone();
if !tight {
body += ParbreakElem::shared();
}
let body = body.set(ListElem::depth, Depth(1));
cells.push(Cell::new(Content::empty()));
cells.push(Cell::new(PdfMarkerTag::ListItemLabel(marker.clone())));
cells.push(Cell::new(Content::empty()));
cells.push(Cell::new(PdfMarkerTag::ListItemBody(body)));
}
let grid = CellGrid::new(
Axes::with_x(&[
Sizing::Rel(indent.into()),
Sizing::Auto,
Sizing::Rel(body_indent.into()),
Sizing::Auto,
]),
Axes::with_y(&[gutter.into()]),
cells,
);
let layouter = GridLayouter::new(&grid, regions, locator, styles, elem.span());
layouter.layout(engine)
}
/// Layout the enumeration.
#[typst_macros::time(span = elem.span())]
pub fn layout_enum(
elem: &Packed<EnumElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let numbering = elem.numbering.get_ref(styles);
let reversed = elem.reversed.get(styles);
let indent = elem.indent.get(styles);
let body_indent = elem.body_indent.get(styles);
let tight = elem.tight.get(styles);
let gutter = elem.spacing.get(styles).unwrap_or_else(|| {
if tight { styles.get(ParElem::leading) } else { styles.get(ParElem::spacing) }
});
let mut cells = vec![];
let mut number = elem
.start
.get(styles)
.unwrap_or_else(|| if reversed { elem.children.len() as u64 } else { 1 });
let mut parents = styles.get_cloned(EnumElem::parents);
let full = elem.full.get(styles);
// Horizontally align based on the given respective parameter.
// Vertically align to the top to avoid inheriting `horizon` or `bottom`
// alignment from the context and having the number be displaced in
// relation to the item it refers to.
let number_align = elem.number_align.get(styles);
for item in &elem.children {
number = item.number.get(styles).unwrap_or(number);
let context = Context::new(None, Some(styles));
let resolved = if full {
parents.push(number);
let content = numbering.apply(engine, context.track(), &parents)?.display();
parents.pop();
content
} else {
match numbering {
Numbering::Pattern(pattern) => {
TextElem::packed(pattern.apply_kth(parents.len(), number))
}
other => other.apply(engine, context.track(), &[number])?.display(),
}
};
// Disable overhang as a workaround to end-aligned dots glitching
// and decreasing spacing between numbers and items.
let resolved = resolved.aligned(number_align).set(TextElem::overhang, false);
// Text in wide enums shall always turn into paragraphs.
let mut body = item.body.clone();
if !tight {
body += ParbreakElem::shared();
}
let body = body.set(EnumElem::parents, smallvec![number]);
cells.push(Cell::new(Content::empty()));
cells.push(Cell::new(PdfMarkerTag::ListItemLabel(resolved)));
cells.push(Cell::new(Content::empty()));
cells.push(Cell::new(PdfMarkerTag::ListItemBody(body)));
number =
if reversed { number.saturating_sub(1) } else { number.saturating_add(1) };
}
let grid = CellGrid::new(
Axes::with_x(&[
Sizing::Rel(indent.into()),
Sizing::Auto,
Sizing::Rel(body_indent.into()),
Sizing::Auto,
]),
Axes::with_y(&[gutter.into()]),
cells,
);
let layouter = GridLayouter::new(&grid, regions, locator, styles, elem.span());
layouter.layout(engine)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/transforms.rs | crates/typst-layout/src/transforms.rs | use std::cell::LazyCell;
use typst_library::diag::{SourceResult, bail};
use typst_library::engine::Engine;
use typst_library::foundations::{Content, Packed, Resolve, Smart, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, Axes, FixedAlignment, Frame, MoveElem, Point, Ratio, Region, Rel, RotateElem,
ScaleAmount, ScaleElem, Size, SkewElem, Transform,
};
use typst_utils::Numeric;
/// Layout the moved content.
#[typst_macros::time(span = elem.span())]
pub fn layout_move(
elem: &Packed<MoveElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let mut frame = crate::layout_frame(engine, &elem.body, locator, styles, region)?;
let delta = Axes::new(elem.dx.resolve(styles), elem.dy.resolve(styles));
let delta = delta.zip_map(region.size, Rel::relative_to);
frame.translate(delta.to_point());
Ok(frame)
}
/// Layout the rotated content.
#[typst_macros::time(span = elem.span())]
pub fn layout_rotate(
elem: &Packed<RotateElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let angle = elem.angle.get(styles);
let align = elem.origin.resolve(styles);
// Compute the new region's approximate size.
let is_finite = region.size.is_finite();
let size = if is_finite {
compute_bounding_box(region.size, Transform::rotate(-angle)).1
} else {
Size::splat(Abs::inf())
};
measure_and_layout(
engine,
locator,
region,
size,
styles,
&elem.body,
Transform::rotate(angle),
align,
elem.reflow.get(styles),
)
}
/// Layout the scaled content.
#[typst_macros::time(span = elem.span())]
pub fn layout_scale(
elem: &Packed<ScaleElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
// Compute the new region's approximate size.
let scale = resolve_scale(elem, engine, locator.relayout(), region.size, styles)?;
let size = region
.size
.zip_map(scale, |r, s| if r.is_finite() { Ratio::new(1.0 / s).of(r) } else { r })
.map(Abs::abs);
measure_and_layout(
engine,
locator,
region,
size,
styles,
&elem.body,
Transform::scale(scale.x, scale.y),
elem.origin.resolve(styles),
elem.reflow.get(styles),
)
}
/// Resolves scale parameters, preserving aspect ratio if one of the scales
/// is set to `auto`.
fn resolve_scale(
elem: &Packed<ScaleElem>,
engine: &mut Engine,
locator: Locator,
container: Size,
styles: StyleChain,
) -> SourceResult<Axes<Ratio>> {
fn resolve_axis(
axis: Smart<ScaleAmount>,
body: impl Fn() -> SourceResult<Abs>,
styles: StyleChain,
) -> SourceResult<Smart<Ratio>> {
Ok(match axis {
Smart::Auto => Smart::Auto,
Smart::Custom(amt) => Smart::Custom(match amt {
ScaleAmount::Ratio(ratio) => ratio,
ScaleAmount::Length(length) => {
let length = length.resolve(styles);
Ratio::new(length / body()?)
}
}),
})
}
let size = LazyCell::new(|| {
let pod = Region::new(container, Axes::splat(false));
let frame = crate::layout_frame(engine, &elem.body, locator, styles, pod)?;
SourceResult::Ok(frame.size())
});
let x = resolve_axis(
elem.x.get(styles),
|| size.as_ref().map(|size| size.x).map_err(Clone::clone),
styles,
)?;
let y = resolve_axis(
elem.y.get(styles),
|| size.as_ref().map(|size| size.y).map_err(Clone::clone),
styles,
)?;
match (x, y) {
(Smart::Auto, Smart::Auto) => {
bail!(elem.span(), "x and y cannot both be auto")
}
(Smart::Custom(x), Smart::Custom(y)) => Ok(Axes::new(x, y)),
(Smart::Auto, Smart::Custom(v)) | (Smart::Custom(v), Smart::Auto) => {
Ok(Axes::splat(v))
}
}
}
/// Layout the skewed content.
#[typst_macros::time(span = elem.span())]
pub fn layout_skew(
elem: &Packed<SkewElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let ax = elem.ax.get(styles);
let ay = elem.ay.get(styles);
let align = elem.origin.resolve(styles);
// Compute the new region's approximate size.
let size = if region.size.is_finite() {
compute_bounding_box(region.size, Transform::skew(ax, ay)).1
} else {
Size::splat(Abs::inf())
};
measure_and_layout(
engine,
locator,
region,
size,
styles,
&elem.body,
Transform::skew(ax, ay),
align,
elem.reflow.get(styles),
)
}
/// Applies a transformation to a frame, reflowing the layout if necessary.
#[allow(clippy::too_many_arguments)]
fn measure_and_layout(
engine: &mut Engine,
locator: Locator,
region: Region,
size: Size,
styles: StyleChain,
body: &Content,
transform: Transform,
align: Axes<FixedAlignment>,
reflow: bool,
) -> SourceResult<Frame> {
if reflow {
// Measure the size of the body.
let pod = Region::new(size, Axes::splat(false));
let frame = crate::layout_frame(engine, body, locator.relayout(), styles, pod)?;
// Actually perform the layout.
let pod = Region::new(frame.size(), Axes::splat(true));
let mut frame = crate::layout_frame(engine, body, locator, styles, pod)?;
let Axes { x, y } = align.zip_map(frame.size(), FixedAlignment::position);
// Compute the transform.
let ts = Transform::translate(x, y)
.pre_concat(transform)
.pre_concat(Transform::translate(-x, -y));
// Compute the bounding box and offset and wrap in a new frame.
let (offset, size) = compute_bounding_box(frame.size(), ts);
frame.transform(ts);
frame.translate(offset);
frame.set_size(size);
Ok(frame)
} else {
// Layout the body.
let mut frame = crate::layout_frame(engine, body, locator, styles, region)?;
let Axes { x, y } = align.zip_map(frame.size(), FixedAlignment::position);
// Compute the transform.
let ts = Transform::translate(x, y)
.pre_concat(transform)
.pre_concat(Transform::translate(-x, -y));
// Apply the transform.
frame.transform(ts);
Ok(frame)
}
}
/// Computes the bounding box and offset of a transformed area.
fn compute_bounding_box(size: Size, ts: Transform) -> (Point, Size) {
let top_left = Point::zero().transform_inf(ts);
let top_right = Point::with_x(size.x).transform_inf(ts);
let bottom_left = Point::with_y(size.y).transform_inf(ts);
let bottom_right = size.to_point().transform_inf(ts);
// We first compute the new bounding box of the rotated area.
let min_x = top_left.x.min(top_right.x).min(bottom_left.x).min(bottom_right.x);
let min_y = top_left.y.min(top_right.y).min(bottom_left.y).min(bottom_right.y);
let max_x = top_left.x.max(top_right.x).max(bottom_left.x).max(bottom_right.x);
let max_y = top_left.y.max(top_right.y).max(bottom_left.y).max(bottom_right.y);
// Then we compute the new size of the area.
let width = max_x - min_x;
let height = max_y - min_y;
(Point::new(-min_x, -min_y), Size::new(width.abs(), height.abs()))
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/stack.rs | crates/typst-layout/src/stack.rs | use typst_library::diag::{SourceResult, bail};
use typst_library::engine::Engine;
use typst_library::foundations::{Content, Packed, Resolve, StyleChain, StyledElem};
use typst_library::introspection::{Locator, SplitLocator};
use typst_library::layout::{
Abs, AlignElem, Axes, Axis, Dir, FixedAlignment, Fr, Fragment, Frame, HElem, Point,
Regions, Size, Spacing, StackChild, StackElem, VElem,
};
use typst_syntax::Span;
use typst_utils::{Get, Numeric};
/// Layout the stack.
#[typst_macros::time(span = elem.span())]
pub fn layout_stack(
elem: &Packed<StackElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let mut layouter =
StackLayouter::new(elem.span(), elem.dir.get(styles), locator, styles, regions);
let axis = layouter.dir.axis();
// Spacing to insert before the next block.
let spacing = elem.spacing.get(styles);
let mut deferred = None;
for child in &elem.children {
match child {
StackChild::Spacing(kind) => {
layouter.layout_spacing(*kind);
deferred = None;
}
StackChild::Block(block) => {
// Transparently handle `h`.
if let (Axis::X, Some(h)) = (axis, block.to_packed::<HElem>()) {
layouter.layout_spacing(h.amount);
deferred = None;
continue;
}
// Transparently handle `v`.
if let (Axis::Y, Some(v)) = (axis, block.to_packed::<VElem>()) {
layouter.layout_spacing(v.amount);
deferred = None;
continue;
}
if let Some(kind) = deferred {
layouter.layout_spacing(kind);
}
layouter.layout_block(engine, block, styles)?;
deferred = spacing;
}
}
}
layouter.finish()
}
/// Performs stack layout.
struct StackLayouter<'a> {
/// The span to raise errors at during layout.
span: Span,
/// The stacking direction.
dir: Dir,
/// The axis of the stacking direction.
axis: Axis,
/// Provides unique locations to the stack's children.
locator: SplitLocator<'a>,
/// The inherited styles.
styles: StyleChain<'a>,
/// The regions to layout children into.
regions: Regions<'a>,
/// Whether the stack itself should expand to fill the region.
expand: Axes<bool>,
/// The initial size of the current region before we started subtracting.
initial: Size,
/// The generic size used by the frames for the current region.
used: GenericSize<Abs>,
/// The sum of fractions in the current region.
fr: Fr,
/// Already layouted items whose exact positions are not yet known due to
/// fractional spacing.
items: Vec<StackItem>,
/// Finished frames for previous regions.
finished: Vec<Frame>,
}
/// A prepared item in a stack layout.
enum StackItem {
/// Absolute spacing between other items.
Absolute(Abs),
/// Fractional spacing between other items.
Fractional(Fr),
/// A frame for a layouted block.
Frame(Frame, Axes<FixedAlignment>),
}
impl<'a> StackLayouter<'a> {
/// Create a new stack layouter.
fn new(
span: Span,
dir: Dir,
locator: Locator<'a>,
styles: StyleChain<'a>,
mut regions: Regions<'a>,
) -> Self {
let axis = dir.axis();
let expand = regions.expand;
// Disable expansion along the block axis for children.
regions.expand.set(axis, false);
Self {
span,
dir,
axis,
locator: locator.split(),
styles,
regions,
expand,
initial: regions.size,
used: GenericSize::zero(),
fr: Fr::zero(),
items: vec![],
finished: vec![],
}
}
/// Add spacing along the spacing direction.
fn layout_spacing(&mut self, spacing: Spacing) {
match spacing {
Spacing::Rel(v) => {
// Resolve the spacing and limit it to the remaining space.
let resolved = v
.resolve(self.styles)
.relative_to(self.regions.base().get(self.axis));
let remaining = self.regions.size.get_mut(self.axis);
let limited = resolved.min(*remaining);
if self.dir.axis() == Axis::Y {
*remaining -= limited;
}
self.used.main += limited;
self.items.push(StackItem::Absolute(resolved));
}
Spacing::Fr(v) => {
self.fr += v;
self.items.push(StackItem::Fractional(v));
}
}
}
/// Layout an arbitrary block.
fn layout_block(
&mut self,
engine: &mut Engine,
block: &Content,
styles: StyleChain,
) -> SourceResult<()> {
if self.regions.is_full() {
self.finish_region()?;
}
// Block-axis alignment of the `AlignElem` is respected by stacks.
let align = if let Some(align) = block.to_packed::<AlignElem>() {
align.alignment.get(styles)
} else if let Some(styled) = block.to_packed::<StyledElem>() {
styles.chain(&styled.styles).get(AlignElem::alignment)
} else {
styles.get(AlignElem::alignment)
}
.resolve(styles);
let fragment = crate::layout_fragment(
engine,
block,
self.locator.next(&block.span()),
styles,
self.regions,
)?;
let len = fragment.len();
for (i, frame) in fragment.into_iter().enumerate() {
// Grow our size, shrink the region and save the frame for later.
let specific_size = frame.size();
if self.dir.axis() == Axis::Y {
self.regions.size.y -= specific_size.y;
}
let generic_size = match self.axis {
Axis::X => GenericSize::new(specific_size.y, specific_size.x),
Axis::Y => GenericSize::new(specific_size.x, specific_size.y),
};
self.used.main += generic_size.main;
self.used.cross.set_max(generic_size.cross);
self.items.push(StackItem::Frame(frame, align));
if i + 1 < len {
self.finish_region()?;
}
}
Ok(())
}
/// Advance to the next region.
fn finish_region(&mut self) -> SourceResult<()> {
// Determine the size of the stack in this region depending on whether
// the region expands.
let mut size = self
.expand
.select(self.initial, self.used.into_axes(self.axis))
.min(self.initial);
// Expand fully if there are fr spacings.
let full = self.initial.get(self.axis);
let remaining = full - self.used.main;
if self.fr.get() > 0.0 && full.is_finite() {
self.used.main = full;
size.set(self.axis, full);
}
if !size.is_finite() {
bail!(self.span, "stack spacing is infinite");
}
let mut output = Frame::hard(size);
let mut cursor = Abs::zero();
let mut ruler: FixedAlignment = self.dir.start().into();
// Place all frames.
for item in self.items.drain(..) {
match item {
StackItem::Absolute(v) => cursor += v,
StackItem::Fractional(v) => cursor += v.share(self.fr, remaining),
StackItem::Frame(frame, align) => {
if self.dir.is_positive() {
ruler = ruler.max(align.get(self.axis));
} else {
ruler = ruler.min(align.get(self.axis));
}
// Align along the main axis.
let parent = size.get(self.axis);
let child = frame.size().get(self.axis);
let main = ruler.position(parent - self.used.main)
+ if self.dir.is_positive() {
cursor
} else {
self.used.main - child - cursor
};
// Align along the cross axis.
let other = self.axis.other();
let cross = align
.get(other)
.position(size.get(other) - frame.size().get(other));
let pos = GenericSize::new(cross, main).to_point(self.axis);
cursor += child;
output.push_frame(pos, frame);
}
}
}
// Advance to the next region.
self.regions.next();
self.initial = self.regions.size;
self.used = GenericSize::zero();
self.fr = Fr::zero();
self.finished.push(output);
Ok(())
}
/// Finish layouting and return the resulting frames.
fn finish(mut self) -> SourceResult<Fragment> {
self.finish_region()?;
Ok(Fragment::frames(self.finished))
}
}
/// A generic size with main and cross axes. The axes are generic, meaning the
/// main axis could correspond to either the X or the Y axis.
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
struct GenericSize<T> {
/// The cross component, along the axis perpendicular to the main.
pub cross: T,
/// The main component.
pub main: T,
}
impl<T> GenericSize<T> {
/// Create a new instance from the two components.
const fn new(cross: T, main: T) -> Self {
Self { cross, main }
}
/// Convert to the specific representation, given the current main axis.
fn into_axes(self, main: Axis) -> Axes<T> {
match main {
Axis::X => Axes::new(self.main, self.cross),
Axis::Y => Axes::new(self.cross, self.main),
}
}
}
impl GenericSize<Abs> {
/// The zero value.
fn zero() -> Self {
Self { cross: Abs::zero(), main: Abs::zero() }
}
/// Convert to a point.
fn to_point(self, main: Axis) -> Point {
self.into_axes(main).to_point()
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/rules.rs | crates/typst-layout/src/rules.rs | use comemo::Track;
use ecow::{EcoVec, eco_format};
use smallvec::smallvec;
use typst_library::diag::{At, SourceResult, bail};
use typst_library::foundations::{
Content, Context, NativeElement, NativeRuleMap, Packed, Resolve, ShowFn, Smart,
StyleChain, Synthesize, Target, dict,
};
use typst_library::introspection::{Counter, Locator, LocatorLink};
use typst_library::layout::{
Abs, AlignElem, Alignment, Axes, BlockBody, BlockElem, ColumnsElem, Em,
FixedAlignment, GridCell, GridChild, GridElem, GridItem, HAlignment, HElem, HideElem,
InlineElem, LayoutElem, Length, MoveElem, OuterVAlignment, PadElem, PageElem,
PlaceElem, PlacementScope, Region, Rel, RepeatElem, RotateElem, ScaleElem, Sides,
Size, Sizing, SkewElem, Spacing, StackChild, StackElem, TrackSizings, VElem,
};
use typst_library::math::EquationElem;
use typst_library::model::{
Attribution, BibliographyElem, CiteElem, CiteGroup, CslIndentElem, CslLightElem,
Destination, DirectLinkElem, EmphElem, EnumElem, FigureCaption, FigureElem,
FootnoteElem, FootnoteEntry, HeadingElem, LinkElem, LinkMarker, ListElem,
OutlineElem, OutlineEntry, ParElem, ParbreakElem, QuoteElem, RefElem, StrongElem,
TableCell, TableElem, TermsElem, TitleElem, Works,
};
use typst_library::pdf::{ArtifactElem, ArtifactKind, AttachElem, PdfMarkerTag};
use typst_library::text::{
DecoLine, Decoration, HighlightElem, ItalicToggle, LinebreakElem, LocalName,
OverlineElem, RawElem, RawLine, ScriptKind, ShiftSettings, Smallcaps, SmallcapsElem,
SmartQuoteElem, SmartQuotes, SpaceElem, StrikeElem, SubElem, SuperElem, TextElem,
TextSize, UnderlineElem, WeightDelta,
};
use typst_library::visualize::{
CircleElem, CurveElem, EllipseElem, ImageElem, LineElem, PathElem, PolygonElem,
RectElem, SquareElem, Stroke,
};
use typst_utils::{Get, Numeric};
/// Register show rules for the [paged target](Target::Paged).
pub fn register(rules: &mut NativeRuleMap) {
use Target::Paged;
// Model.
rules.register(Paged, STRONG_RULE);
rules.register(Paged, EMPH_RULE);
rules.register(Paged, LIST_RULE);
rules.register(Paged, ENUM_RULE);
rules.register(Paged, TERMS_RULE);
rules.register(Paged, LINK_MARKER_RULE);
rules.register(Paged, LINK_RULE);
rules.register(Paged, DIRECT_LINK_RULE);
rules.register(Paged, TITLE_RULE);
rules.register(Paged, HEADING_RULE);
rules.register(Paged, FIGURE_RULE);
rules.register(Paged, FIGURE_CAPTION_RULE);
rules.register(Paged, QUOTE_RULE);
rules.register(Paged, FOOTNOTE_RULE);
rules.register(Paged, FOOTNOTE_ENTRY_RULE);
rules.register(Paged, OUTLINE_RULE);
rules.register(Paged, OUTLINE_ENTRY_RULE);
rules.register(Paged, REF_RULE);
rules.register(Paged, CITE_GROUP_RULE);
rules.register(Paged, BIBLIOGRAPHY_RULE);
rules.register(Paged, CSL_LIGHT_RULE);
rules.register(Paged, CSL_INDENT_RULE);
rules.register(Paged, TABLE_RULE);
rules.register(Paged, TABLE_CELL_RULE);
// Text.
rules.register(Paged, SUB_RULE);
rules.register(Paged, SUPER_RULE);
rules.register(Paged, UNDERLINE_RULE);
rules.register(Paged, OVERLINE_RULE);
rules.register(Paged, STRIKE_RULE);
rules.register(Paged, HIGHLIGHT_RULE);
rules.register(Paged, SMALLCAPS_RULE);
rules.register(Paged, RAW_RULE);
rules.register(Paged, RAW_LINE_RULE);
// Layout.
rules.register(Paged, ALIGN_RULE);
rules.register(Paged, PAD_RULE);
rules.register(Paged, COLUMNS_RULE);
rules.register(Paged, STACK_RULE);
rules.register(Paged, GRID_RULE);
rules.register(Paged, GRID_CELL_RULE);
rules.register(Paged, MOVE_RULE);
rules.register(Paged, SCALE_RULE);
rules.register(Paged, ROTATE_RULE);
rules.register(Paged, SKEW_RULE);
rules.register(Paged, REPEAT_RULE);
rules.register(Paged, HIDE_RULE);
rules.register(Paged, LAYOUT_RULE);
// Visualize.
rules.register(Paged, IMAGE_RULE);
rules.register(Paged, LINE_RULE);
rules.register(Paged, RECT_RULE);
rules.register(Paged, SQUARE_RULE);
rules.register(Paged, ELLIPSE_RULE);
rules.register(Paged, CIRCLE_RULE);
rules.register(Paged, POLYGON_RULE);
rules.register(Paged, CURVE_RULE);
rules.register(Paged, PATH_RULE);
// Math.
rules.register(Paged, EQUATION_RULE);
// PDF.
rules.register(Paged, ATTACH_RULE);
rules.register(Paged, ARTIFACT_RULE);
rules.register(Paged, PDF_MARKER_TAG_RULE);
}
const STRONG_RULE: ShowFn<StrongElem> = |elem, _, styles| {
Ok(elem
.body
.clone()
.set(TextElem::delta, WeightDelta(elem.delta.get(styles))))
};
const EMPH_RULE: ShowFn<EmphElem> =
|elem, _, _| Ok(elem.body.clone().set(TextElem::emph, ItalicToggle(true)));
const LIST_RULE: ShowFn<ListElem> = |elem, _, styles| {
let tight = elem.tight.get(styles);
let mut realized = BlockElem::multi_layouter(elem.clone(), crate::lists::layout_list)
.pack()
.spanned(elem.span());
if tight {
let spacing = elem
.spacing
.get(styles)
.unwrap_or_else(|| styles.get(ParElem::leading));
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
}
Ok(realized)
};
const ENUM_RULE: ShowFn<EnumElem> = |elem, _, styles| {
let tight = elem.tight.get(styles);
let mut realized = BlockElem::multi_layouter(elem.clone(), crate::lists::layout_enum)
.pack()
.spanned(elem.span());
if tight {
let spacing = elem
.spacing
.get(styles)
.unwrap_or_else(|| styles.get(ParElem::leading));
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
}
Ok(realized)
};
const TERMS_RULE: ShowFn<TermsElem> = |elem, _, styles| {
let span = elem.span();
let tight = elem.tight.get(styles);
let separator = elem.separator.get_ref(styles);
let indent = elem.indent.get(styles);
let hanging_indent = elem.hanging_indent.get(styles);
let gutter = elem.spacing.get(styles).unwrap_or_else(|| {
if tight { styles.get(ParElem::leading) } else { styles.get(ParElem::spacing) }
});
let pad = hanging_indent + indent;
let unpad = (!hanging_indent.is_zero())
.then(|| HElem::new((-hanging_indent).into()).pack().spanned(span));
let mut children = vec![];
for child in elem.children.iter() {
let mut seq = vec![];
seq.extend(unpad.clone());
seq.push(PdfMarkerTag::TermsItemLabel(child.term.clone().strong()));
seq.push(separator.clone().artifact(ArtifactKind::Other));
seq.push(child.description.clone());
// Text in wide term lists shall always turn into paragraphs.
if !tight {
seq.push(ParbreakElem::shared().clone());
}
let item = Content::sequence(seq).spanned(child.span());
children.push(StackChild::Block(PdfMarkerTag::TermsItemBody(item)));
}
let padding =
Sides::default().with(styles.resolve(TextElem::dir).start(), pad.into());
let mut realized = StackElem::new(children)
.with_spacing(Some(gutter.into()))
.pack()
.spanned(span)
.padded(padding)
.set(TermsElem::within, true);
if tight {
let spacing = elem
.spacing
.get(styles)
.unwrap_or_else(|| styles.get(ParElem::leading));
let v = VElem::new(spacing.into())
.with_weak(true)
.with_attach(true)
.pack()
.spanned(span);
realized = v + realized;
}
Ok(realized)
};
const LINK_MARKER_RULE: ShowFn<LinkMarker> = |elem, _, _| Ok(elem.body.clone());
const LINK_RULE: ShowFn<LinkElem> = |elem, engine, styles| {
let span = elem.span();
let body = elem.body.clone();
let dest = elem.dest.resolve(engine, span)?;
let alt = dest.alt_text(engine, styles, span)?;
// Manually construct link marker that spans the whole link elem, not just
// the body.
Ok(LinkMarker::new(body, Some(alt))
.pack()
.spanned(span)
.set(LinkElem::current, Some(dest)))
};
const DIRECT_LINK_RULE: ShowFn<DirectLinkElem> = |elem, _, _| {
let dest = Destination::Location(elem.loc);
Ok(elem.body.clone().linked(dest, elem.alt.clone()))
};
const TITLE_RULE: ShowFn<TitleElem> = |elem, _, styles| {
Ok(BlockElem::new()
.with_body(Some(BlockBody::Content(elem.resolve_body(styles).at(elem.span())?)))
.pack())
};
const HEADING_RULE: ShowFn<HeadingElem> = |elem, engine, styles| {
const SPACING_TO_NUMBERING: Em = Em::new(0.3);
let span = elem.span();
let mut realized = elem.body.clone();
let hanging_indent = elem.hanging_indent.get(styles);
let mut indent = match hanging_indent {
Smart::Custom(length) => length.resolve(styles),
Smart::Auto => Abs::zero(),
};
if let Some(numbering) = elem.numbering.get_ref(styles).as_ref() {
let location = elem.location().unwrap();
let numbering = Counter::of(HeadingElem::ELEM)
.display_at(engine, location, styles, numbering, span)?
.spanned(span);
let align = styles.resolve(AlignElem::alignment);
if hanging_indent.is_auto() && align.x == FixedAlignment::Start {
let pod = Region::new(Axes::splat(Abs::inf()), Axes::splat(false));
// We don't have a locator for the numbering here, so we just
// use the measurement infrastructure for now.
let link = LocatorLink::measure(location, span);
let size = (engine.routines.layout_frame)(
engine,
&numbering,
Locator::link(&link),
styles,
pod,
)?
.size();
indent = size.x + SPACING_TO_NUMBERING.resolve(styles);
}
let spacing = HElem::new(SPACING_TO_NUMBERING.into()).with_weak(true).pack();
realized = numbering + spacing + realized;
}
let block = if indent != Abs::zero() {
let body = HElem::new((-indent).into()).pack() + realized;
let inset = Sides::default()
.with(styles.resolve(TextElem::dir).start(), Some(indent.into()));
BlockElem::new()
.with_body(Some(BlockBody::Content(body)))
.with_inset(inset)
} else {
BlockElem::new().with_body(Some(BlockBody::Content(realized)))
};
Ok(block.pack())
};
const FIGURE_RULE: ShowFn<FigureElem> = |elem, _, styles| {
let span = elem.span();
let mut realized = elem.body.clone();
// Build the caption, if any.
if let Some(caption) = elem.caption.get_cloned(styles) {
let (first, second) = match caption.position.get(styles) {
OuterVAlignment::Top => (caption.pack(), realized),
OuterVAlignment::Bottom => (realized, caption.pack()),
};
realized = Content::sequence(vec![
first,
VElem::new(elem.gap.get(styles).into())
.with_weak(true)
.pack()
.spanned(span),
second,
]);
}
// Ensure that the body is considered a paragraph.
realized += ParbreakElem::shared().clone().spanned(span);
// Wrap the contents in a block.
realized = BlockElem::new()
.with_body(Some(BlockBody::Content(realized)))
.pack()
.spanned(span);
// Wrap in a float.
if let Some(align) = elem.placement.get(styles) {
realized = PlaceElem::new(realized)
.with_alignment(align.map(|align| HAlignment::Center + align))
.with_scope(elem.scope.get(styles))
.with_float(true)
.pack()
.spanned(span);
} else if elem.scope.get(styles) == PlacementScope::Parent {
bail!(
span,
"parent-scoped placement is only available for floating figures";
hint: "you can enable floating placement with `figure(placement: auto, ..)`";
);
}
Ok(realized)
};
const FIGURE_CAPTION_RULE: ShowFn<FigureCaption> = |elem, engine, styles| {
Ok(BlockElem::new()
.with_body(Some(BlockBody::Content(elem.realize(engine, styles)?)))
.pack())
};
const QUOTE_RULE: ShowFn<QuoteElem> = |elem, _, styles| {
let span = elem.span();
let block = elem.block.get(styles);
let mut realized = elem.body.clone();
if elem.quotes.get(styles).unwrap_or(!block) {
// Add zero-width weak spacing to make the quotes "sticky".
let hole = HElem::hole();
let sticky = Content::sequence([hole.clone(), realized, hole.clone()]);
realized = QuoteElem::quoted(sticky, styles);
}
let attribution = elem.attribution.get_ref(styles);
if block {
realized = BlockElem::new()
.with_body(Some(BlockBody::Content(realized)))
.pack()
.spanned(span);
if let Some(attribution) = attribution.as_ref() {
// Bring the attribution a bit closer to the quote.
let gap = Spacing::Rel(Em::new(0.9).into());
let v = VElem::new(gap).with_weak(true).pack();
realized += v;
realized += BlockElem::new()
.with_body(Some(BlockBody::Content(attribution.realize(span))))
.pack()
.aligned(Alignment::END);
}
realized = PadElem::new(realized).pack();
} else if let Some(Attribution::Label(label)) = attribution {
realized += SpaceElem::shared().clone();
realized += CiteElem::new(*label).pack().spanned(span);
}
Ok(realized)
};
const FOOTNOTE_RULE: ShowFn<FootnoteElem> = |elem, engine, styles| {
let span = elem.span();
let (dest, num) = elem.realize(engine, styles)?;
let alt = FootnoteElem::alt_text(styles, &num.plain_text());
let sup = SuperElem::new(num).pack().spanned(span).linked(dest, Some(alt));
Ok(HElem::hole().clone() + PdfMarkerTag::Label(sup))
};
const FOOTNOTE_ENTRY_RULE: ShowFn<FootnoteEntry> = |elem, engine, styles| {
let number_gap = Em::new(0.05);
let (prefix, body) = elem.realize(engine, styles)?;
Ok(Content::sequence([
HElem::new(elem.indent.get(styles).into()).pack(),
PdfMarkerTag::Label(prefix),
HElem::new(number_gap.into()).with_weak(true).pack(),
body,
]))
};
const OUTLINE_RULE: ShowFn<OutlineElem> = |elem, engine, styles| {
let title = elem.realize_title(styles);
let entries = elem.realize_flat(engine, styles)?;
let entries = entries.into_iter().map(|entry| entry.pack());
let body = PdfMarkerTag::OutlineBody(Content::sequence(entries));
Ok(Content::sequence(title.into_iter().chain(Some(body))))
};
const OUTLINE_ENTRY_RULE: ShowFn<OutlineEntry> = |elem, engine, styles| {
let span = elem.span();
let context = Context::new(None, Some(styles));
let context = context.track();
let prefix = elem.prefix(engine, context, span)?;
let body = elem.body().at(span)?;
let page = elem.page(engine, context, span)?;
let alt = {
let prefix = prefix.as_ref().map(|p| p.plain_text()).unwrap_or_default();
let body = body.plain_text();
let page_str = PageElem::local_name_in(styles);
let page_nr = page.plain_text();
let quotes = SmartQuotes::get(
styles.get_ref(SmartQuoteElem::quotes),
styles.get(TextElem::lang),
styles.get(TextElem::region),
styles.get(SmartQuoteElem::alternative),
);
let open = quotes.double_open;
let close = quotes.double_close;
eco_format!("{prefix} {open}{body}{close} {page_str} {page_nr}",)
};
let inner = elem.build_inner(context, span, body, page)?;
let block = if elem.element.is::<EquationElem>() {
// Equation has no body and no levels, so indenting makes no sense.
let body = prefix.unwrap_or_default() + inner;
BlockElem::new()
.with_body(Some(BlockBody::Content(body)))
.pack()
.spanned(span)
} else {
elem.indented(engine, context, span, prefix, inner, Em::new(0.5).into())?
};
let loc = elem.element_location().at(span)?;
Ok(block.linked(Destination::Location(loc), Some(alt)))
};
const REF_RULE: ShowFn<RefElem> = |elem, engine, styles| elem.realize(engine, styles);
const CITE_GROUP_RULE: ShowFn<CiteGroup> = |elem, engine, _| elem.realize(engine);
const BIBLIOGRAPHY_RULE: ShowFn<BibliographyElem> = |elem, engine, styles| {
const COLUMN_GUTTER: Em = Em::new(0.65);
const INDENT: Em = Em::new(1.5);
let span = elem.span();
let mut seq = vec![];
seq.extend(elem.realize_title(styles));
let works = Works::with_bibliography(engine, elem.clone())?;
let references = works.references(elem, styles)?;
if references.iter().any(|(prefix, ..)| prefix.is_some()) {
let row_gutter = styles.get(ParElem::spacing);
let mut cells = vec![];
for (prefix, reference, loc) in references {
let prefix = PdfMarkerTag::ListItemLabel(
prefix.clone().unwrap_or_default().located(*loc),
);
cells.push(GridChild::Item(GridItem::Cell(
Packed::new(GridCell::new(prefix)).spanned(span),
)));
let reference = PdfMarkerTag::BibEntry(reference.clone());
cells.push(GridChild::Item(GridItem::Cell(
Packed::new(GridCell::new(reference)).spanned(span),
)));
}
let grid = GridElem::new(cells)
.with_columns(TrackSizings(smallvec![Sizing::Auto; 2]))
.with_column_gutter(TrackSizings(smallvec![COLUMN_GUTTER.into()]))
.with_row_gutter(TrackSizings(smallvec![row_gutter.into()]));
let mut packed = Packed::new(grid).spanned(span);
packed.synthesize(engine, styles)?;
// Directly build the block element to avoid the show step for the grid
// element. This will not generate introspection tags for the element.
let block = BlockElem::multi_layouter(packed, crate::grid::layout_grid).pack();
// TODO(accessibility): infer list numbering from style?
seq.push(PdfMarkerTag::Bibliography(true, block));
} else {
let mut body = vec![];
for (_, reference, loc) in references {
let realized = PdfMarkerTag::BibEntry(reference.clone().located(*loc));
let block = if works.hanging_indent {
let body = HElem::new((-INDENT).into()).pack() + realized;
let inset = Sides::default()
.with(styles.resolve(TextElem::dir).start(), Some(INDENT.into()));
BlockElem::new()
.with_body(Some(BlockBody::Content(body)))
.with_inset(inset)
} else {
BlockElem::new().with_body(Some(BlockBody::Content(realized)))
};
body.push(block.pack().spanned(span));
}
seq.push(PdfMarkerTag::Bibliography(false, Content::sequence(body)));
}
Ok(Content::sequence(seq))
};
const CSL_LIGHT_RULE: ShowFn<CslLightElem> =
|elem, _, _| Ok(elem.body.clone().set(TextElem::delta, WeightDelta(-100)));
const CSL_INDENT_RULE: ShowFn<CslIndentElem> =
|elem, _, _| Ok(PadElem::new(elem.body.clone()).pack());
const TABLE_RULE: ShowFn<TableElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(elem.clone(), crate::grid::layout_table).pack())
};
const TABLE_CELL_RULE: ShowFn<TableCell> = |elem, _, styles| {
show_cell(elem.body.clone(), elem.inset.get(styles), elem.align.get(styles))
};
const SUB_RULE: ShowFn<SubElem> = |elem, _, styles| {
show_script(
styles,
elem.body.clone(),
elem.typographic.get(styles),
elem.baseline.get(styles),
elem.size.get(styles),
ScriptKind::Sub,
)
};
const SUPER_RULE: ShowFn<SuperElem> = |elem, _, styles| {
show_script(
styles,
elem.body.clone(),
elem.typographic.get(styles),
elem.baseline.get(styles),
elem.size.get(styles),
ScriptKind::Super,
)
};
fn show_script(
styles: StyleChain,
body: Content,
typographic: bool,
baseline: Smart<Length>,
size: Smart<TextSize>,
kind: ScriptKind,
) -> SourceResult<Content> {
let font_size = styles.resolve(TextElem::size);
Ok(body.set(
TextElem::shift_settings,
Some(ShiftSettings {
typographic,
shift: baseline.map(|l| -Em::from_length(l, font_size)),
size: size.map(|t| Em::from_length(t.0, font_size)),
kind,
}),
))
}
const UNDERLINE_RULE: ShowFn<UnderlineElem> = |elem, _, styles| {
Ok(elem.body.clone().set(
TextElem::deco,
smallvec![Decoration {
line: DecoLine::Underline {
stroke: elem.stroke.resolve(styles).unwrap_or_default(),
offset: elem.offset.resolve(styles),
evade: elem.evade.get(styles),
background: elem.background.get(styles),
},
extent: elem.extent.resolve(styles),
}],
))
};
const OVERLINE_RULE: ShowFn<OverlineElem> = |elem, _, styles| {
Ok(elem.body.clone().set(
TextElem::deco,
smallvec![Decoration {
line: DecoLine::Overline {
stroke: elem.stroke.resolve(styles).unwrap_or_default(),
offset: elem.offset.resolve(styles),
evade: elem.evade.get(styles),
background: elem.background.get(styles),
},
extent: elem.extent.resolve(styles),
}],
))
};
const STRIKE_RULE: ShowFn<StrikeElem> = |elem, _, styles| {
Ok(elem.body.clone().set(
TextElem::deco,
smallvec![Decoration {
// Note that we do not support evade option for strikethrough.
line: DecoLine::Strikethrough {
stroke: elem.stroke.resolve(styles).unwrap_or_default(),
offset: elem.offset.resolve(styles),
background: elem.background.get(styles),
},
extent: elem.extent.resolve(styles),
}],
))
};
const HIGHLIGHT_RULE: ShowFn<HighlightElem> = |elem, _, styles| {
Ok(elem.body.clone().set(
TextElem::deco,
smallvec![Decoration {
line: DecoLine::Highlight {
fill: elem.fill.get_cloned(styles),
stroke: elem
.stroke
.resolve(styles)
.unwrap_or_default()
.map(|stroke| stroke.map(Stroke::unwrap_or_default)),
top_edge: elem.top_edge.get(styles),
bottom_edge: elem.bottom_edge.get(styles),
radius: elem.radius.resolve(styles).unwrap_or_default(),
},
extent: elem.extent.resolve(styles),
}],
))
};
const SMALLCAPS_RULE: ShowFn<SmallcapsElem> = |elem, _, styles| {
let sc = if elem.all.get(styles) { Smallcaps::All } else { Smallcaps::Minuscules };
Ok(elem.body.clone().set(TextElem::smallcaps, Some(sc)))
};
const RAW_RULE: ShowFn<RawElem> = |elem, _, styles| {
let lines = elem.lines.as_deref().unwrap_or_default();
let mut seq = EcoVec::with_capacity((2 * lines.len()).saturating_sub(1));
for (i, line) in lines.iter().enumerate() {
if i != 0 {
seq.push(LinebreakElem::shared().clone());
}
seq.push(line.clone().pack());
}
let mut realized = Content::sequence(seq);
if elem.block.get(styles) {
// Align the text before inserting it into the block.
realized = realized.aligned(elem.align.get(styles).into());
realized = BlockElem::new()
.with_body(Some(BlockBody::Content(realized)))
.pack()
.spanned(elem.span());
}
Ok(realized)
};
const RAW_LINE_RULE: ShowFn<RawLine> = |elem, _, _| Ok(elem.body.clone());
const ALIGN_RULE: ShowFn<AlignElem> =
|elem, _, styles| Ok(elem.body.clone().aligned(elem.alignment.get(styles)));
const PAD_RULE: ShowFn<PadElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(elem.clone(), crate::pad::layout_pad).pack())
};
const COLUMNS_RULE: ShowFn<ColumnsElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(elem.clone(), crate::flow::layout_columns).pack())
};
const STACK_RULE: ShowFn<StackElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(elem.clone(), crate::stack::layout_stack).pack())
};
const GRID_RULE: ShowFn<GridElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(elem.clone(), crate::grid::layout_grid).pack())
};
const GRID_CELL_RULE: ShowFn<GridCell> = |elem, _, styles| {
show_cell(elem.body.clone(), elem.inset.get(styles), elem.align.get(styles))
};
/// Function with common code to display a grid cell or table cell.
fn show_cell(
mut body: Content,
inset: Smart<Sides<Option<Rel<Length>>>>,
align: Smart<Alignment>,
) -> SourceResult<Content> {
let inset = inset.unwrap_or_default().map(Option::unwrap_or_default);
if inset != Sides::default() {
// Only pad if some inset is not 0pt.
// Avoids a bug where using .padded() in any way inside Show causes
// alignment in align(...) to break.
body = body.padded(inset);
}
if let Smart::Custom(alignment) = align {
body = body.aligned(alignment);
}
Ok(body)
}
const MOVE_RULE: ShowFn<MoveElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::transforms::layout_move).pack())
};
const SCALE_RULE: ShowFn<ScaleElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::transforms::layout_scale).pack())
};
const ROTATE_RULE: ShowFn<RotateElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::transforms::layout_rotate).pack())
};
const SKEW_RULE: ShowFn<SkewElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::transforms::layout_skew).pack())
};
const REPEAT_RULE: ShowFn<RepeatElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::repeat::layout_repeat).pack())
};
const HIDE_RULE: ShowFn<HideElem> =
|elem, _, _| Ok(elem.body.clone().set(HideElem::hidden, true));
const LAYOUT_RULE: ShowFn<LayoutElem> = |elem, _, _| {
Ok(BlockElem::multi_layouter(
elem.clone(),
|elem, engine, locator, styles, regions| {
// Gets the current region's base size, which will be the size of the
// outer container, or of the page if there is no such container.
let Size { x, y } = regions.base();
let loc = elem.location().unwrap();
let context = Context::new(Some(loc), Some(styles));
let result = elem
.func
.call(engine, context.track(), [dict! { "width" => x, "height" => y }])?
.display();
crate::flow::layout_fragment(engine, &result, locator, styles, regions)
},
)
.pack())
};
const IMAGE_RULE: ShowFn<ImageElem> = |elem, _, styles| {
Ok(BlockElem::single_layouter(elem.clone(), crate::image::layout_image)
.with_width(elem.width.get(styles))
.with_height(elem.height.get(styles))
.pack())
};
const LINE_RULE: ShowFn<LineElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_line).pack())
};
const RECT_RULE: ShowFn<RectElem> = |elem, _, styles| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_rect)
.with_width(elem.width.get(styles))
.with_height(elem.height.get(styles))
.pack())
};
const SQUARE_RULE: ShowFn<SquareElem> = |elem, _, styles| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_square)
.with_width(elem.width.get(styles))
.with_height(elem.height.get(styles))
.pack())
};
const ELLIPSE_RULE: ShowFn<EllipseElem> = |elem, _, styles| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_ellipse)
.with_width(elem.width.get(styles))
.with_height(elem.height.get(styles))
.pack())
};
const CIRCLE_RULE: ShowFn<CircleElem> = |elem, _, styles| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_circle)
.with_width(elem.width.get(styles))
.with_height(elem.height.get(styles))
.pack())
};
const POLYGON_RULE: ShowFn<PolygonElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_polygon).pack())
};
const CURVE_RULE: ShowFn<CurveElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_curve).pack())
};
const PATH_RULE: ShowFn<PathElem> = |elem, _, _| {
Ok(BlockElem::single_layouter(elem.clone(), crate::shapes::layout_path).pack())
};
const EQUATION_RULE: ShowFn<EquationElem> = |elem, _, styles| {
if elem.block.get(styles) {
Ok(BlockElem::multi_layouter(elem.clone(), crate::math::layout_equation_block)
.pack())
} else {
Ok(InlineElem::layouter(elem.clone(), crate::math::layout_equation_inline).pack())
}
};
const ATTACH_RULE: ShowFn<AttachElem> = |_, _, _| Ok(Content::empty());
const ARTIFACT_RULE: ShowFn<ArtifactElem> = |elem, _, _| Ok(elem.body.clone());
const PDF_MARKER_TAG_RULE: ShowFn<PdfMarkerTag> = |elem, _, _| Ok(elem.body.clone());
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/shapes.rs | crates/typst-layout/src/shapes.rs | use std::f64::consts::SQRT_2;
use kurbo::{CubicBez, ParamCurveExtrema};
use typst_library::diag::{SourceResult, bail};
use typst_library::engine::Engine;
use typst_library::foundations::{Content, Packed, Resolve, Smart, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, Axes, Corner, Corners, Frame, FrameItem, Length, Point, Ratio, Region, Rel,
Sides, Size,
};
use typst_library::visualize::{
CircleElem, CloseMode, Curve, CurveComponent, CurveElem, EllipseElem, FillRule,
FixedStroke, Geometry, LineCap, LineElem, Paint, PathElem, PathVertex, PolygonElem,
RectElem, Shape, SquareElem, Stroke,
};
use typst_syntax::Span;
use typst_utils::{Get, Numeric};
/// Layout the line.
#[typst_macros::time(span = elem.span())]
pub fn layout_line(
elem: &Packed<LineElem>,
_: &mut Engine,
_: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let resolve = |axes: Axes<Rel<Abs>>| axes.zip_map(region.size, Rel::relative_to);
let start = resolve(elem.start.resolve(styles));
let delta = elem
.end
.resolve(styles)
.map(|end| resolve(end) - start)
.unwrap_or_else(|| {
let length = elem.length.resolve(styles);
let angle = elem.angle.get(styles);
let x = angle.cos() * length;
let y = angle.sin() * length;
resolve(Axes::new(x, y))
});
let stroke = elem.stroke.resolve(styles).unwrap_or_default();
let size = start.max(start + delta).max(Size::zero());
if !size.is_finite() {
bail!(elem.span(), "cannot create line with infinite length");
}
let mut frame = Frame::soft(size);
let shape = Geometry::Line(delta.to_point()).stroked(stroke);
frame.push(start.to_point(), FrameItem::Shape(shape, elem.span()));
Ok(frame)
}
/// Layout the path.
#[typst_macros::time(span = elem.span())]
pub fn layout_path(
elem: &Packed<PathElem>,
_: &mut Engine,
_: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let resolve = |axes: Axes<Rel<Length>>| {
axes.resolve(styles).zip_map(region.size, Rel::relative_to).to_point()
};
let vertices = &elem.vertices;
let points: Vec<Point> = vertices.iter().map(|c| resolve(c.vertex())).collect();
let mut size = Size::zero();
if points.is_empty() {
return Ok(Frame::soft(size));
}
// Only create a path if there are more than zero points.
// Construct a closed path given all points.
let mut curve = Curve::new();
curve.move_(points[0]);
let mut add_cubic = |from_point: Point,
to_point: Point,
from: PathVertex,
to: PathVertex| {
let from_control_point = resolve(from.control_point_from()) + from_point;
let to_control_point = resolve(to.control_point_to()) + to_point;
curve.cubic(from_control_point, to_control_point, to_point);
let p0 = kurbo::Point::new(from_point.x.to_raw(), from_point.y.to_raw());
let p1 = kurbo::Point::new(
from_control_point.x.to_raw(),
from_control_point.y.to_raw(),
);
let p2 =
kurbo::Point::new(to_control_point.x.to_raw(), to_control_point.y.to_raw());
let p3 = kurbo::Point::new(to_point.x.to_raw(), to_point.y.to_raw());
let extrema = kurbo::CubicBez::new(p0, p1, p2, p3).bounding_box();
size.x.set_max(Abs::raw(extrema.x1));
size.y.set_max(Abs::raw(extrema.y1));
};
for (vertex_window, point_window) in vertices.windows(2).zip(points.windows(2)) {
let from = vertex_window[0];
let to = vertex_window[1];
let from_point = point_window[0];
let to_point = point_window[1];
add_cubic(from_point, to_point, from, to);
}
if elem.closed.get(styles) {
let from = *vertices.last().unwrap(); // We checked that we have at least one element.
let to = vertices[0];
let from_point = *points.last().unwrap();
let to_point = points[0];
add_cubic(from_point, to_point, from, to);
curve.close();
}
if !size.is_finite() {
bail!(elem.span(), "cannot create path with infinite length");
}
// Prepare fill and stroke.
let fill = elem.fill.get_cloned(styles);
let fill_rule = elem.fill_rule.get(styles);
let stroke = match elem.stroke.resolve(styles) {
Smart::Auto if fill.is_none() => Some(FixedStroke::default()),
Smart::Auto => None,
Smart::Custom(stroke) => stroke.map(Stroke::unwrap_or_default),
};
let mut frame = Frame::soft(size);
let shape = Shape {
geometry: Geometry::Curve(curve),
stroke,
fill,
fill_rule,
};
frame.push(Point::zero(), FrameItem::Shape(shape, elem.span()));
Ok(frame)
}
/// Layout the curve.
#[typst_macros::time(span = elem.span())]
pub fn layout_curve(
elem: &Packed<CurveElem>,
_: &mut Engine,
_: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let mut builder = CurveBuilder::new(region, styles);
for item in &elem.components {
match item {
CurveComponent::Move(element) => {
let relative = element.relative.get(styles);
let point = builder.resolve_point(element.start, relative);
builder.move_(point);
}
CurveComponent::Line(element) => {
let relative = element.relative.get(styles);
let point = builder.resolve_point(element.end, relative);
builder.line(point);
}
CurveComponent::Quad(element) => {
let relative = element.relative.get(styles);
let end = builder.resolve_point(element.end, relative);
let control = match element.control {
Smart::Auto => {
control_c2q(builder.last_point, builder.last_control_from)
}
Smart::Custom(Some(p)) => builder.resolve_point(p, relative),
Smart::Custom(None) => end,
};
builder.quad(control, end);
}
CurveComponent::Cubic(element) => {
let relative = element.relative.get(styles);
let end = builder.resolve_point(element.end, relative);
let c1 = match element.control_start {
Some(Smart::Custom(p)) => builder.resolve_point(p, relative),
Some(Smart::Auto) => builder.last_control_from,
None => builder.last_point,
};
let c2 = match element.control_end {
Some(p) => builder.resolve_point(p, relative),
None => end,
};
builder.cubic(c1, c2, end);
}
CurveComponent::Close(element) => {
builder.close(element.mode.get(styles));
}
}
}
let (curve, size) = builder.finish();
if curve.is_empty() {
return Ok(Frame::soft(size));
}
if !size.is_finite() {
bail!(elem.span(), "cannot create curve with infinite size");
}
// Prepare fill and stroke.
let fill = elem.fill.get_cloned(styles);
let fill_rule = elem.fill_rule.get(styles);
let stroke = match elem.stroke.resolve(styles) {
Smart::Auto if fill.is_none() => Some(FixedStroke::default()),
Smart::Auto => None,
Smart::Custom(stroke) => stroke.map(Stroke::unwrap_or_default),
};
let mut frame = Frame::soft(size);
let shape = Shape {
geometry: Geometry::Curve(curve),
stroke,
fill,
fill_rule,
};
frame.push(Point::zero(), FrameItem::Shape(shape, elem.span()));
Ok(frame)
}
/// Builds a `Curve` from a [`CurveElem`]'s parts.
struct CurveBuilder<'a> {
/// The output curve.
curve: Curve,
/// The curve's bounds.
size: Size,
/// The region relative to which points are resolved.
region: Region,
/// The styles for the curve.
styles: StyleChain<'a>,
/// The next start point.
start_point: Point,
/// Mirror of the first cubic start control point (for closing).
start_control_into: Point,
/// The point we previously ended on.
last_point: Point,
/// Mirror of the last cubic control point (for auto control points).
last_control_from: Point,
/// Whether a component has been start. This does not mean that something
/// has been added to `self.curve` yet.
is_started: bool,
/// Whether anything was added to `self.curve` for the current component.
is_empty: bool,
}
impl<'a> CurveBuilder<'a> {
/// Create a new curve builder.
fn new(region: Region, styles: StyleChain<'a>) -> Self {
Self {
curve: Curve::new(),
size: Size::zero(),
region,
styles,
start_point: Point::zero(),
start_control_into: Point::zero(),
last_point: Point::zero(),
last_control_from: Point::zero(),
is_started: false,
is_empty: true,
}
}
/// Finish building, returning the curve and its bounding size.
fn finish(self) -> (Curve, Size) {
(self.curve, self.size)
}
/// Move to a point, starting a new segment.
fn move_(&mut self, point: Point) {
// Delay calling `curve.move` in case there is another move element
// before any actual drawing.
self.expand_bounds(point);
self.start_point = point;
self.start_control_into = point;
self.last_point = point;
self.last_control_from = point;
self.is_started = true;
self.is_empty = true;
}
/// Add a line segment.
fn line(&mut self, point: Point) {
if self.is_empty {
self.start_component();
self.start_control_into = self.start_point;
}
self.curve.line(point);
self.expand_bounds(point);
self.last_point = point;
self.last_control_from = point;
}
/// Add a quadratic curve segment.
fn quad(&mut self, control: Point, end: Point) {
let c1 = control_q2c(self.last_point, control);
let c2 = control_q2c(end, control);
self.cubic(c1, c2, end);
}
/// Add a cubic curve segment.
fn cubic(&mut self, c1: Point, c2: Point, end: Point) {
if self.is_empty {
self.start_component();
self.start_control_into = mirror_c(self.start_point, c1);
}
self.curve.cubic(c1, c2, end);
let p0 = point_to_kurbo(self.last_point);
let p1 = point_to_kurbo(c1);
let p2 = point_to_kurbo(c2);
let p3 = point_to_kurbo(end);
let extrema = CubicBez::new(p0, p1, p2, p3).bounding_box();
self.size.x.set_max(Abs::raw(extrema.x1));
self.size.y.set_max(Abs::raw(extrema.y1));
self.last_point = end;
self.last_control_from = mirror_c(end, c2);
}
/// Close the curve if it was opened.
fn close(&mut self, mode: CloseMode) {
if self.is_started && !self.is_empty {
if mode == CloseMode::Smooth {
self.cubic(
self.last_control_from,
self.start_control_into,
self.start_point,
);
}
self.curve.close();
self.last_point = self.start_point;
self.last_control_from = self.start_point;
}
self.is_started = false;
self.is_empty = true;
}
/// Push the initial move component.
fn start_component(&mut self) {
self.curve.move_(self.start_point);
self.is_empty = false;
self.is_started = true;
}
/// Expand the curve's bounding box.
fn expand_bounds(&mut self, point: Point) {
self.size.x.set_max(point.x);
self.size.y.set_max(point.y);
}
/// Resolve the point relative to the region.
fn resolve_point(&self, point: Axes<Rel>, relative: bool) -> Point {
let mut p = point
.resolve(self.styles)
.zip_map(self.region.size, Rel::relative_to)
.to_point();
if relative {
p += self.last_point;
}
p
}
}
/// Convert a cubic control point into a quadratic one.
fn control_c2q(p: Point, c: Point) -> Point {
1.5 * c - 0.5 * p
}
/// Convert a quadratic control point into a cubic one.
fn control_q2c(p: Point, c: Point) -> Point {
(p + 2.0 * c) / 3.0
}
/// Mirror a control point.
fn mirror_c(p: Point, c: Point) -> Point {
2.0 * p - c
}
/// Convert a point to a `kurbo::Point`.
fn point_to_kurbo(point: Point) -> kurbo::Point {
kurbo::Point::new(point.x.to_raw(), point.y.to_raw())
}
/// Layout the polygon.
#[typst_macros::time(span = elem.span())]
pub fn layout_polygon(
elem: &Packed<PolygonElem>,
_: &mut Engine,
_: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let points: Vec<Point> = elem
.vertices
.iter()
.map(|c| c.resolve(styles).zip_map(region.size, Rel::relative_to).to_point())
.collect();
let size = points.iter().fold(Point::zero(), |max, c| c.max(max)).to_size();
if !size.is_finite() {
bail!(elem.span(), "cannot create polygon with infinite size");
}
let mut frame = Frame::hard(size);
// Only create a curve if there are more than zero points.
if points.is_empty() {
return Ok(frame);
}
// Prepare fill and stroke.
let fill = elem.fill.get_cloned(styles);
let fill_rule = elem.fill_rule.get(styles);
let stroke = match elem.stroke.resolve(styles) {
Smart::Auto if fill.is_none() => Some(FixedStroke::default()),
Smart::Auto => None,
Smart::Custom(stroke) => stroke.map(Stroke::unwrap_or_default),
};
// Construct a closed curve given all points.
let mut curve = Curve::new();
curve.move_(points[0]);
for &point in &points[1..] {
curve.line(point);
}
curve.close();
let shape = Shape {
geometry: Geometry::Curve(curve),
stroke,
fill,
fill_rule,
};
frame.push(Point::zero(), FrameItem::Shape(shape, elem.span()));
Ok(frame)
}
/// Lay out the rectangle.
#[typst_macros::time(span = elem.span())]
pub fn layout_rect(
elem: &Packed<RectElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
layout_shape(
engine,
locator,
styles,
region,
ShapeKind::Rect,
elem.body.get_ref(styles),
elem.fill.get_cloned(styles),
elem.stroke.resolve(styles),
elem.inset.resolve(styles),
elem.outset.resolve(styles),
elem.radius.resolve(styles),
elem.span(),
)
}
/// Lay out the square.
#[typst_macros::time(span = elem.span())]
pub fn layout_square(
elem: &Packed<SquareElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
layout_shape(
engine,
locator,
styles,
region,
ShapeKind::Square,
elem.body.get_ref(styles),
elem.fill.get_cloned(styles),
elem.stroke.resolve(styles),
elem.inset.resolve(styles),
elem.outset.resolve(styles),
elem.radius.resolve(styles),
elem.span(),
)
}
/// Lay out the ellipse.
#[typst_macros::time(span = elem.span())]
pub fn layout_ellipse(
elem: &Packed<EllipseElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
layout_shape(
engine,
locator,
styles,
region,
ShapeKind::Ellipse,
elem.body.get_ref(styles),
elem.fill.get_cloned(styles),
elem.stroke.resolve(styles).map(|s| Sides::splat(Some(s))),
elem.inset.resolve(styles),
elem.outset.resolve(styles),
Corners::splat(None),
elem.span(),
)
}
/// Lay out the circle.
#[typst_macros::time(span = elem.span())]
pub fn layout_circle(
elem: &Packed<CircleElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
layout_shape(
engine,
locator,
styles,
region,
ShapeKind::Circle,
elem.body.get_ref(styles),
elem.fill.get_cloned(styles),
elem.stroke.resolve(styles).map(|s| Sides::splat(Some(s))),
elem.inset.resolve(styles),
elem.outset.resolve(styles),
Corners::splat(None),
elem.span(),
)
}
/// A category of shape.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
enum ShapeKind {
/// A rectangle with equal side lengths.
Square,
/// A quadrilateral with four right angles.
Rect,
/// An ellipse with coinciding foci.
Circle,
/// A curve around two focal points.
Ellipse,
}
impl ShapeKind {
/// Whether this shape kind is curvy.
fn is_round(self) -> bool {
matches!(self, Self::Circle | Self::Ellipse)
}
/// Whether this shape kind has equal side length.
fn is_quadratic(self) -> bool {
matches!(self, Self::Square | Self::Circle)
}
}
/// Layout a shape.
#[allow(clippy::too_many_arguments)]
fn layout_shape(
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
kind: ShapeKind,
body: &Option<Content>,
fill: Option<Paint>,
stroke: Smart<Sides<Option<Option<Stroke<Abs>>>>>,
inset: Sides<Option<Rel<Abs>>>,
outset: Sides<Option<Rel<Abs>>>,
radius: Corners<Option<Rel<Abs>>>,
span: Span,
) -> SourceResult<Frame> {
let mut frame;
if let Some(child) = body {
let mut inset = inset.unwrap_or_default();
if kind.is_round() {
// Apply extra inset to round shapes.
inset = inset.map(|v| v + Ratio::new(0.5 - SQRT_2 / 4.0));
}
let has_inset = !inset.is_zero();
// Take the inset, if any, into account.
let mut pod = region;
if has_inset {
pod.size = crate::pad::shrink(region.size, &inset);
}
// If the shape is quadratic, we first measure it to determine its size
// and then layout with full expansion to force the aspect ratio and
// make sure it's really quadratic.
if kind.is_quadratic() {
let length = match quadratic_size(pod) {
Some(length) => length,
None => {
// Take as much as the child wants, but without overflowing.
crate::layout_frame(engine, child, locator.relayout(), styles, pod)?
.size()
.max_by_side()
.min(pod.size.min_by_side())
}
};
pod = Region::new(Size::splat(length), Axes::splat(true));
}
// Layout the child.
frame = crate::layout_frame(engine, child, locator, styles, pod)?;
// Apply the inset.
if has_inset {
crate::pad::grow(&mut frame, &inset);
}
} else {
// The default size that a shape takes on if it has no child and no
// forced sizes.
let default = Size::new(Abs::pt(45.0), Abs::pt(30.0)).min(region.size);
let size = if kind.is_quadratic() {
Size::splat(match quadratic_size(region) {
Some(length) => length,
None => default.min_by_side(),
})
} else {
// For each dimension, pick the region size if forced, otherwise
// use the default size (or the region size if the default
// is too large for the region).
region.expand.select(region.size, default)
};
frame = Frame::soft(size);
}
// Prepare stroke.
let stroke = match stroke {
Smart::Auto if fill.is_none() => Sides::splat(Some(FixedStroke::default())),
Smart::Auto => Sides::splat(None),
Smart::Custom(strokes) => {
strokes.unwrap_or_default().map(|s| s.map(Stroke::unwrap_or_default))
}
};
// Add fill and/or stroke.
if fill.is_some() || stroke.iter().any(Option::is_some) {
if kind.is_round() {
let outset = outset.unwrap_or_default().relative_to(frame.size());
let size = frame.size() + outset.sum_by_axis();
let pos = Point::new(-outset.left, -outset.top);
let shape = Shape {
geometry: Geometry::Curve(Curve::ellipse(size)),
fill,
stroke: stroke.left,
fill_rule: FillRule::default(),
};
frame.prepend(pos, FrameItem::Shape(shape, span));
} else {
fill_and_stroke(
&mut frame,
fill,
&stroke,
&outset.unwrap_or_default(),
&radius.unwrap_or_default(),
span,
);
}
}
Ok(frame)
}
/// Determines the forced size of a quadratic shape based on the region, if any.
///
/// The size is forced if at least one axis is expanded because `expand` is
/// `true` for axes whose size was manually specified by the user.
fn quadratic_size(region: Region) -> Option<Abs> {
if region.expand.x && region.expand.y {
// If both `width` and `height` are specified, we choose the
// smaller one.
Some(region.size.x.min(region.size.y))
} else if region.expand.x {
Some(region.size.x)
} else if region.expand.y {
Some(region.size.y)
} else {
None
}
}
/// Creates a new rectangle as a curve.
pub fn clip_rect(
size: Size,
radius: &Corners<Rel<Abs>>,
stroke: &Sides<Option<FixedStroke>>,
outset: &Sides<Rel<Abs>>,
) -> Curve {
let outset = outset.relative_to(size);
let size = size + outset.sum_by_axis();
let stroke_widths = stroke.as_ref().map(|s| s.as_ref().map(|s| s.thickness / 2.0));
let base_radius = size.x.min(size.y) / 2.0;
let corner_max =
stroke_widths.map_corners(|a, b| base_radius + a.min(b).unwrap_or(Abs::zero()));
let radius = radius
.zip(corner_max)
.map(|(value, max)| value.relative_to(max * 2.0).min(max));
let corners = corners_control_points(size, &radius, stroke, &stroke_widths);
let mut curve = Curve::new();
if corners.top_left.arc_inner() {
curve.arc_move(
corners.top_left.start_inner(),
corners.top_left.center_inner(),
corners.top_left.end_inner(),
);
} else {
curve.move_(corners.top_left.center_inner());
}
for corner in [&corners.top_right, &corners.bottom_right, &corners.bottom_left] {
if corner.arc_inner() {
curve.arc_line(
corner.start_inner(),
corner.center_inner(),
corner.end_inner(),
)
} else {
curve.line(corner.center_inner());
}
}
curve.close();
curve.translate(Point::new(-outset.left, -outset.top));
curve
}
/// Add a fill and stroke with optional radius and outset to the frame.
pub fn fill_and_stroke(
frame: &mut Frame,
fill: Option<Paint>,
stroke: &Sides<Option<FixedStroke>>,
outset: &Sides<Rel<Abs>>,
radius: &Corners<Rel<Abs>>,
span: Span,
) {
let outset = outset.relative_to(frame.size());
let size = frame.size() + outset.sum_by_axis();
let pos = Point::new(-outset.left, -outset.top);
frame.prepend_multiple(
styled_rect(size, radius, fill, stroke)
.into_iter()
.map(|x| (pos, FrameItem::Shape(x, span))),
);
}
/// Create a styled rectangle with shapes.
/// - use rect primitive for simple rectangles
/// - stroke sides if possible
/// - use fill for sides for best looks
pub fn styled_rect(
size: Size,
radius: &Corners<Rel<Abs>>,
fill: Option<Paint>,
stroke: &Sides<Option<FixedStroke>>,
) -> Vec<Shape> {
if stroke.is_uniform() && radius.iter().cloned().all(Rel::is_zero) {
simple_rect(size, fill, stroke.top.clone())
} else {
segmented_rect(size, radius, fill, stroke)
}
}
/// Use rect primitive for the rectangle
fn simple_rect(
size: Size,
fill: Option<Paint>,
stroke: Option<FixedStroke>,
) -> Vec<Shape> {
vec![Shape {
geometry: Geometry::Rect(size),
fill,
stroke,
fill_rule: FillRule::default(),
}]
}
fn corners_control_points(
size: Size,
radius: &Corners<Abs>,
strokes: &Sides<Option<FixedStroke>>,
stroke_widths: &Sides<Option<Abs>>,
) -> Corners<ControlPoints> {
Corners {
top_left: Corner::TopLeft,
top_right: Corner::TopRight,
bottom_right: Corner::BottomRight,
bottom_left: Corner::BottomLeft,
}
.map(|corner| ControlPoints {
radius: radius.get(corner),
stroke_before: stroke_widths.get(corner.side_ccw()),
stroke_after: stroke_widths.get(corner.side_cw()),
corner,
size,
same: match (
strokes.get_ref(corner.side_ccw()),
strokes.get_ref(corner.side_cw()),
) {
(Some(a), Some(b)) => {
// Solid strokes can be drawn as `fill_segment`s.
let solid =
a.dash.as_ref().map(|dash| dash.array.is_empty()).unwrap_or(true);
// For solid strokes the caps are only relevant for the end of
// the strokes, and they can be filled. For dashed strokes the
// cap determines how the entire line is drawn, thus there
// should be two different segments if the cap differs.
let filled_segment_same = a.paint == b.paint && a.dash == b.dash;
let stroke_segment_same = a.cap == b.cap && a.thickness == b.thickness;
filled_segment_same && (solid || stroke_segment_same)
}
(None, None) => true,
_ => false,
},
})
}
/// Use stroke and fill for the rectangle
fn segmented_rect(
size: Size,
radius: &Corners<Rel<Abs>>,
fill: Option<Paint>,
strokes: &Sides<Option<FixedStroke>>,
) -> Vec<Shape> {
let mut res = vec![];
let stroke_widths = strokes.as_ref().map(|s| s.as_ref().map(|s| s.thickness / 2.0));
let base_radius = size.x.min(size.y) / 2.0;
let corner_max =
stroke_widths.map_corners(|a, b| base_radius + a.min(b).unwrap_or(Abs::zero()));
let radius = radius
.zip(corner_max)
.map(|(value, max)| value.relative_to(max * 2.0).min(max));
let corners = corners_control_points(size, &radius, strokes, &stroke_widths);
// insert stroked sides below filled sides
let mut stroke_insert = 0;
// fill shape with inner curve
if let Some(fill) = fill {
let mut curve = Curve::new();
let c = corners.get_ref(Corner::TopLeft);
if c.arc() {
curve.arc_move(c.start(), c.center(), c.end());
} else {
curve.move_(c.center());
};
for corner in [Corner::TopRight, Corner::BottomRight, Corner::BottomLeft] {
let c = corners.get_ref(corner);
if c.arc() {
curve.arc_line(c.start(), c.center(), c.end());
} else {
curve.line(c.center());
}
}
curve.close();
res.push(Shape {
geometry: Geometry::Curve(curve),
fill: Some(fill),
fill_rule: FillRule::default(),
stroke: None,
});
stroke_insert += 1;
}
let current = corners.iter().find(|c| !c.same).map(|c| c.corner);
if let Some(mut current) = current {
// multiple segments
// start at a corner with a change between sides and iterate clockwise all other corners
let mut last = current;
for _ in 0..4 {
current = current.next_cw();
if corners.get_ref(current).same {
continue;
}
// create segment
let start = last;
let end = current;
last = current;
let Some(stroke) = strokes.get_ref(start.side_cw()) else { continue };
let start_cap = stroke.cap;
let end_cap = match strokes.get_ref(end.side_ccw()) {
Some(stroke) => stroke.cap,
None => start_cap,
};
let (shape, ontop) =
segment(start, end, start_cap, end_cap, &corners, stroke);
if ontop {
res.push(shape);
} else {
res.insert(stroke_insert, shape);
stroke_insert += 1;
}
}
} else if let Some(stroke) = &strokes.top {
// single segment
let (shape, _) = segment(
Corner::TopLeft,
Corner::TopLeft,
stroke.cap,
stroke.cap,
&corners,
stroke,
);
res.push(shape);
}
res
}
fn curve_segment(
start: Corner,
end: Corner,
corners: &Corners<ControlPoints>,
curve: &mut Curve,
) {
// create start corner
let c = corners.get_ref(start);
if start == end || !c.arc() {
curve.move_(c.end());
} else {
curve.arc_move(c.mid(), c.center(), c.end());
}
// create corners between start and end
let mut current = start.next_cw();
while current != end {
let c = corners.get_ref(current);
if c.arc() {
curve.arc_line(c.start(), c.center(), c.end());
} else {
curve.line(c.end());
}
current = current.next_cw();
}
// create end corner
let c = corners.get_ref(end);
if !c.arc() {
curve.line(c.start());
} else if start == end {
curve.arc_line(c.start(), c.center(), c.end());
} else {
curve.arc_line(c.start(), c.center(), c.mid());
}
}
/// Returns the shape for the segment and whether the shape should be drawn on top.
fn segment(
start: Corner,
end: Corner,
start_cap: LineCap,
end_cap: LineCap,
corners: &Corners<ControlPoints>,
stroke: &FixedStroke,
) -> (Shape, bool) {
fn fill_corner(corner: &ControlPoints) -> bool {
corner.stroke_before != corner.stroke_after
|| corner.radius() < corner.stroke_width_before()
}
fn fill_corners(
start: Corner,
end: Corner,
corners: &Corners<ControlPoints>,
) -> bool {
if fill_corner(corners.get_ref(start)) {
return true;
}
if fill_corner(corners.get_ref(end)) {
return true;
}
let mut current = start.next_cw();
while current != end {
if fill_corner(corners.get_ref(current)) {
return true;
}
current = current.next_cw();
}
false
}
let solid = stroke.dash.as_ref().map(|dash| dash.array.is_empty()).unwrap_or(true);
let use_fill = solid && fill_corners(start, end, corners);
let shape = if use_fill {
fill_segment(start, end, start_cap, end_cap, corners, stroke)
} else {
stroke_segment(start, end, corners, stroke.clone())
};
(shape, use_fill)
}
/// Stroke the sides from `start` to `end` clockwise.
fn stroke_segment(
start: Corner,
end: Corner,
corners: &Corners<ControlPoints>,
stroke: FixedStroke,
) -> Shape {
// Create start corner.
let mut curve = Curve::new();
curve_segment(start, end, corners, &mut curve);
Shape {
geometry: Geometry::Curve(curve),
stroke: Some(stroke),
fill: None,
fill_rule: FillRule::default(),
}
}
/// Fill the sides from `start` to `end` clockwise.
fn fill_segment(
start: Corner,
end: Corner,
start_cap: LineCap,
end_cap: LineCap,
corners: &Corners<ControlPoints>,
stroke: &FixedStroke,
) -> Shape {
let mut curve = Curve::new();
// create the start corner
// begin on the inside and finish on the outside
// no corner if start and end are equal
// half corner if different
if start == end {
let c = corners.get_ref(start);
curve.move_(c.end_inner());
curve.line(c.end_outer());
} else {
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/flow/collect.rs | crates/typst-layout/src/flow/collect.rs | use std::cell::{LazyCell, RefCell};
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use bumpalo::Bump;
use bumpalo::boxed::Box as BumpBox;
use comemo::{Track, Tracked, TrackedMut};
use typst_library::World;
use typst_library::diag::{SourceResult, bail, warning};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Packed, Resolve, Smart, StyleChain};
use typst_library::introspection::{
Introspector, Location, Locator, LocatorLink, SplitLocator, Tag, TagElem,
};
use typst_library::layout::{
Abs, AlignElem, Alignment, Axes, BlockElem, ColbreakElem, FixedAlignment, FlushElem,
Fr, Fragment, Frame, FrameParent, Inherit, PagebreakElem, PlaceElem, PlacementScope,
Ratio, Region, Regions, Rel, Size, Sizing, Spacing, VElem,
};
use typst_library::model::ParElem;
use typst_library::routines::{Pair, Routines};
use typst_library::text::TextElem;
use typst_utils::{Protected, SliceExt};
use super::{FlowMode, layout_multi_block, layout_single_block};
use crate::inline::ParSituation;
use crate::modifiers::layout_and_modify;
/// Collects all elements of the flow into prepared children. These are much
/// simpler to handle than the raw elements.
#[typst_macros::time]
#[allow(clippy::too_many_arguments)]
pub fn collect<'a>(
engine: &mut Engine,
bump: &'a Bump,
children: &[Pair<'a>],
locator: Locator<'a>,
base: Size,
expand: bool,
mode: FlowMode,
) -> SourceResult<Vec<Child<'a>>> {
Collector {
engine,
bump,
children,
locator: locator.split(),
base,
expand,
output: Vec::with_capacity(children.len()),
par_situation: ParSituation::First,
}
.run(mode)
}
/// State for collection.
struct Collector<'a, 'x, 'y> {
engine: &'x mut Engine<'y>,
bump: &'a Bump,
children: &'x [Pair<'a>],
base: Size,
expand: bool,
locator: SplitLocator<'a>,
output: Vec<Child<'a>>,
par_situation: ParSituation,
}
impl<'a> Collector<'a, '_, '_> {
/// Perform the collection.
fn run(self, mode: FlowMode) -> SourceResult<Vec<Child<'a>>> {
match mode {
FlowMode::Root | FlowMode::Block => self.run_block(),
FlowMode::Inline => self.run_inline(),
}
}
/// Perform collection for block-level children.
fn run_block(mut self) -> SourceResult<Vec<Child<'a>>> {
for &(child, styles) in self.children {
if let Some(elem) = child.to_packed::<TagElem>() {
self.output.push(Child::Tag(&elem.tag));
} else if let Some(elem) = child.to_packed::<VElem>() {
self.v(elem, styles);
} else if let Some(elem) = child.to_packed::<ParElem>() {
self.par(elem, styles)?;
} else if let Some(elem) = child.to_packed::<BlockElem>() {
self.block(elem, styles);
} else if let Some(elem) = child.to_packed::<PlaceElem>() {
self.place(elem, styles)?;
} else if child.is::<FlushElem>() {
self.output.push(Child::Flush);
} else if let Some(elem) = child.to_packed::<ColbreakElem>() {
self.output.push(Child::Break(elem.weak.get(styles)));
} else if child.is::<PagebreakElem>() {
bail!(
child.span(), "pagebreaks are not allowed inside of containers";
hint: "try using a `#colbreak()` instead";
);
} else {
self.engine.sink.warn(warning!(
child.span(),
"{} was ignored during paged export",
child.func().name(),
));
}
}
Ok(self.output)
}
/// Perform collection for inline-level children.
fn run_inline(mut self) -> SourceResult<Vec<Child<'a>>> {
// Extract leading and trailing tags.
let (start, end) = self.children.split_prefix_suffix(|(c, _)| c.is::<TagElem>());
let inner = &self.children[start..end];
// Compute the shared styles.
let styles = StyleChain::trunk_from_pairs(inner).unwrap_or_default();
// Layout the lines.
let lines = crate::inline::layout_inline(
self.engine,
inner,
&mut self.locator,
styles,
self.base,
self.expand,
)?
.into_frames();
for (c, _) in &self.children[..start] {
let elem = c.to_packed::<TagElem>().unwrap();
self.output.push(Child::Tag(&elem.tag));
}
let leading = styles.resolve(ParElem::leading);
self.lines(lines, leading, styles);
for (c, _) in &self.children[end..] {
let elem = c.to_packed::<TagElem>().unwrap();
self.output.push(Child::Tag(&elem.tag));
}
Ok(self.output)
}
/// Collect vertical spacing into a relative or fractional child.
fn v(&mut self, elem: &'a Packed<VElem>, styles: StyleChain<'a>) {
self.output.push(match elem.amount {
Spacing::Rel(rel) => {
Child::Rel(rel.resolve(styles), elem.weak.get(styles) as u8)
}
Spacing::Fr(fr) => Child::Fr(fr),
});
}
/// Collect a paragraph into [`LineChild`]ren. This already performs line
/// layout since it is not dependent on the concrete regions.
fn par(
&mut self,
elem: &'a Packed<ParElem>,
styles: StyleChain<'a>,
) -> SourceResult<()> {
let lines = crate::inline::layout_par(
elem,
self.engine,
self.locator.next(&elem.span()),
styles,
self.base,
self.expand,
self.par_situation,
)?
.into_frames();
let spacing = elem.spacing.resolve(styles);
let leading = elem.leading.resolve(styles);
self.output.push(Child::Rel(spacing.into(), 4));
self.lines(lines, leading, styles);
self.output.push(Child::Rel(spacing.into(), 4));
self.par_situation = ParSituation::Consecutive;
Ok(())
}
/// Collect laid-out lines.
fn lines(&mut self, lines: Vec<Frame>, leading: Abs, styles: StyleChain<'a>) {
let align = styles.resolve(AlignElem::alignment);
let costs = styles.get(TextElem::costs);
// Determine whether to prevent widow and orphans.
let len = lines.len();
let prevent_orphans =
costs.orphan() > Ratio::zero() && len >= 2 && !lines[1].is_empty();
let prevent_widows =
costs.widow() > Ratio::zero() && len >= 2 && !lines[len - 2].is_empty();
let prevent_all = len == 3 && prevent_orphans && prevent_widows;
// Store the heights of lines at the edges because we'll potentially
// need these later when `lines` is already moved.
let height_at = |i| lines.get(i).map(Frame::height).unwrap_or_default();
let front_1 = height_at(0);
let front_2 = height_at(1);
let back_2 = height_at(len.saturating_sub(2));
let back_1 = height_at(len.saturating_sub(1));
for (i, frame) in lines.into_iter().enumerate() {
if i > 0 {
self.output.push(Child::Rel(leading.into(), 5));
}
// To prevent widows and orphans, we require enough space for
// - all lines if it's just three
// - the first two lines if we're at the first line
// - the last two lines if we're at the second to last line
let need = if prevent_all && i == 0 {
front_1 + leading + front_2 + leading + back_1
} else if prevent_orphans && i == 0 {
front_1 + leading + front_2
} else if prevent_widows && i >= 2 && i + 2 == len {
back_2 + leading + back_1
} else {
frame.height()
};
self.output
.push(Child::Line(self.boxed(LineChild { frame, align, need })));
}
}
/// Collect a block into a [`SingleChild`] or [`MultiChild`] depending on
/// whether it is breakable.
fn block(&mut self, elem: &'a Packed<BlockElem>, styles: StyleChain<'a>) {
let locator = self.locator.next(&elem.span());
let align = styles.resolve(AlignElem::alignment);
let alone = self.children.len() == 1;
let sticky = elem.sticky.get(styles);
let breakable = elem.breakable.get(styles);
let fr = match elem.height.get(styles) {
Sizing::Fr(fr) => Some(fr),
_ => None,
};
let fallback = LazyCell::new(|| styles.resolve(ParElem::spacing));
let spacing = |amount| match amount {
Smart::Auto => Child::Rel((*fallback).into(), 4),
Smart::Custom(Spacing::Rel(rel)) => Child::Rel(rel.resolve(styles), 3),
Smart::Custom(Spacing::Fr(fr)) => Child::Fr(fr),
};
self.output.push(spacing(elem.above.get(styles)));
if !breakable || fr.is_some() {
self.output.push(Child::Single(self.boxed(SingleChild {
align,
sticky,
alone,
fr,
elem,
styles,
locator,
cell: CachedCell::new(),
})));
} else {
self.output.push(Child::Multi(self.boxed(MultiChild {
align,
sticky,
alone,
elem,
styles,
locator,
cell: CachedCell::new(),
})));
};
self.output.push(spacing(elem.below.get(styles)));
self.par_situation = ParSituation::Other;
}
/// Collects a placed element into a [`PlacedChild`].
fn place(
&mut self,
elem: &'a Packed<PlaceElem>,
styles: StyleChain<'a>,
) -> SourceResult<()> {
let alignment = elem.alignment.get(styles);
let align_x = alignment.map_or(FixedAlignment::Center, |align| {
align.x().unwrap_or_default().resolve(styles)
});
let align_y = alignment.map(|align| align.y().map(|y| y.resolve(styles)));
let scope = elem.scope.get(styles);
let float = elem.float.get(styles);
match (float, align_y) {
(true, Smart::Custom(None | Some(FixedAlignment::Center))) => bail!(
elem.span(),
"vertical floating placement must be `auto`, `top`, or `bottom`"
),
(false, Smart::Auto) => bail!(
elem.span(),
"automatic positioning is only available for floating placement";
hint: "you can enable floating placement with `place(float: true, ..)`";
),
_ => {}
}
if !float && scope == PlacementScope::Parent {
bail!(
elem.span(),
"parent-scoped positioning is currently only available for floating placement";
hint: "you can enable floating placement with `place(float: true, ..)`";
);
}
let locator = self.locator.next(&elem.span());
let clearance = elem.clearance.resolve(styles);
let delta = Axes::new(elem.dx.get(styles), elem.dy.get(styles)).resolve(styles);
self.output.push(Child::Placed(self.boxed(PlacedChild {
align_x,
align_y,
scope,
float,
clearance,
delta,
elem,
styles,
locator,
alignment,
cell: CachedCell::new(),
})));
Ok(())
}
/// Wraps a value in a bump-allocated box to reduce its footprint in the
/// [`Child`] enum.
fn boxed<T>(&self, value: T) -> BumpBox<'a, T> {
BumpBox::new_in(value, self.bump)
}
}
/// A prepared child in flow layout.
///
/// The larger variants are bump-boxed to keep the enum size down.
#[derive(Debug)]
pub enum Child<'a> {
/// An introspection tag.
Tag(&'a Tag),
/// Relative spacing with a specific weakness level.
Rel(Rel<Abs>, u8),
/// Fractional spacing.
Fr(Fr),
/// An already layouted line of a paragraph.
Line(BumpBox<'a, LineChild>),
/// An unbreakable block.
Single(BumpBox<'a, SingleChild<'a>>),
/// A breakable block.
Multi(BumpBox<'a, MultiChild<'a>>),
/// An absolutely or floatingly placed element.
Placed(BumpBox<'a, PlacedChild<'a>>),
/// A place flush.
Flush,
/// An explicit column break.
Break(bool),
}
/// A child that encapsulates a layouted line of a paragraph.
#[derive(Debug)]
pub struct LineChild {
pub frame: Frame,
pub align: Axes<FixedAlignment>,
pub need: Abs,
}
/// A child that encapsulates a prepared unbreakable block.
#[derive(Debug)]
pub struct SingleChild<'a> {
pub align: Axes<FixedAlignment>,
pub sticky: bool,
pub alone: bool,
pub fr: Option<Fr>,
elem: &'a Packed<BlockElem>,
styles: StyleChain<'a>,
locator: Locator<'a>,
cell: CachedCell<SourceResult<Frame>>,
}
impl SingleChild<'_> {
/// Build the child's frame given the region's base size.
pub fn layout(&self, engine: &mut Engine, region: Region) -> SourceResult<Frame> {
self.cell.get_or_init(region, |mut region| {
// Vertical expansion is only kept if this block is the only child.
region.expand.y &= self.alone;
layout_single_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
self.elem,
self.locator.track(),
self.styles,
region,
)
})
}
}
/// The cached, internal implementation of [`SingleChild::layout`].
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
fn layout_single_impl(
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
elem: &Packed<BlockElem>,
locator: Tracked<Locator>,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
let introspector = Protected::from_raw(introspector);
let link = LocatorLink::new(locator);
let locator = Locator::link(&link);
let mut engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route),
};
layout_and_modify(styles, |styles| {
layout_single_block(elem, &mut engine, locator, styles, region)
})
}
/// A child that encapsulates a prepared breakable block.
#[derive(Debug)]
pub struct MultiChild<'a> {
pub align: Axes<FixedAlignment>,
pub sticky: bool,
alone: bool,
elem: &'a Packed<BlockElem>,
styles: StyleChain<'a>,
locator: Locator<'a>,
cell: CachedCell<SourceResult<Fragment>>,
}
impl<'a> MultiChild<'a> {
/// Build the child's frames given regions.
pub fn layout<'b>(
&'b self,
engine: &mut Engine,
regions: Regions,
) -> SourceResult<(Frame, Option<MultiSpill<'a, 'b>>)> {
let fragment = self.layout_full(engine, regions)?;
let exist_non_empty_frame = fragment.iter().any(|f| !f.is_empty());
// Extract the first frame.
let mut frames = fragment.into_iter();
let frame = frames.next().unwrap();
// If there's more, return a `spill`.
let mut spill = None;
if frames.next().is_some() {
spill = Some(MultiSpill {
exist_non_empty_frame,
multi: self,
full: regions.full,
first: regions.size.y,
backlog: vec![],
min_backlog_len: regions.backlog.len(),
});
}
Ok((frame, spill))
}
/// The shared internal implementation of [`Self::layout`] and
/// [`MultiSpill::layout`].
fn layout_full(
&self,
engine: &mut Engine,
regions: Regions,
) -> SourceResult<Fragment> {
self.cell.get_or_init(regions, |mut regions| {
// Vertical expansion is only kept if this block is the only child.
regions.expand.y &= self.alone;
layout_multi_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
self.elem,
self.locator.track(),
self.styles,
regions,
)
})
}
}
/// The cached, internal implementation of [`MultiChild::layout_full`].
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
fn layout_multi_impl(
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
elem: &Packed<BlockElem>,
locator: Tracked<Locator>,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let introspector = Protected::from_raw(introspector);
let link = LocatorLink::new(locator);
let locator = Locator::link(&link);
let mut engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route),
};
layout_and_modify(styles, |styles| {
layout_multi_block(elem, &mut engine, locator, styles, regions)
})
}
/// The spilled remains of a `MultiChild` that broke across two regions.
#[derive(Debug, Clone)]
pub struct MultiSpill<'a, 'b> {
pub(super) exist_non_empty_frame: bool,
multi: &'b MultiChild<'a>,
first: Abs,
full: Abs,
backlog: Vec<Abs>,
min_backlog_len: usize,
}
impl MultiSpill<'_, '_> {
/// Build the spill's frames given regions.
pub fn layout(
mut self,
engine: &mut Engine,
regions: Regions,
) -> SourceResult<(Frame, Option<Self>)> {
// The first region becomes unchangeable and committed to our backlog.
self.backlog.push(regions.size.y);
// The remaining regions are ephemeral and may be replaced.
let mut backlog: Vec<_> =
self.backlog.iter().chain(regions.backlog).copied().collect();
// Remove unnecessary backlog items to prevent it from growing
// unnecessarily, changing the region's hash.
while backlog.len() > self.min_backlog_len
&& backlog.last().copied() == regions.last
{
backlog.pop();
}
// Build the pod with the merged regions.
let pod = Regions {
size: Size::new(regions.size.x, self.first),
expand: regions.expand,
full: self.full,
backlog: &backlog,
last: regions.last,
};
// Extract the not-yet-processed frames.
let mut frames = self
.multi
.layout_full(engine, pod)?
.into_iter()
.skip(self.backlog.len());
// Ensure that the backlog never shrinks, so that unwrapping below is at
// least fairly safe. Note that the whole region juggling here is
// fundamentally not ideal: It is a compatibility layer between the old
// (all regions provided upfront) & new (each region provided on-demand,
// like an iterator) layout model. This approach is not 100% correct, as
// in the old model later regions could have an effect on earlier
// frames, but it's the best we can do for now, until the multi
// layouters are refactored to the new model.
self.min_backlog_len = self.min_backlog_len.max(backlog.len());
// Save the first frame.
let frame = frames.next().unwrap();
// If there's more, return a `spill`.
let mut spill = None;
if frames.next().is_some() {
spill = Some(self);
}
Ok((frame, spill))
}
/// The alignment of the breakable block.
pub fn align(&self) -> Axes<FixedAlignment> {
self.multi.align
}
}
/// A child that encapsulates a prepared placed element.
#[derive(Debug)]
pub struct PlacedChild<'a> {
pub align_x: FixedAlignment,
pub align_y: Smart<Option<FixedAlignment>>,
pub scope: PlacementScope,
pub float: bool,
pub clearance: Abs,
pub delta: Axes<Rel<Abs>>,
elem: &'a Packed<PlaceElem>,
styles: StyleChain<'a>,
locator: Locator<'a>,
alignment: Smart<Alignment>,
cell: CachedCell<SourceResult<Frame>>,
}
impl PlacedChild<'_> {
/// Build the child's frame given the region's base size.
pub fn layout(&self, engine: &mut Engine, base: Size) -> SourceResult<Frame> {
self.cell.get_or_init(base, |base| {
let align = self.alignment.unwrap_or_else(|| Alignment::CENTER);
let aligned = AlignElem::alignment.set(align).wrap();
let styles = self.styles.chain(&aligned);
let mut frame = layout_and_modify(styles, |styles| {
crate::layout_frame(
engine,
&self.elem.body,
self.locator.relayout(),
styles,
Region::new(base, Axes::splat(false)),
)
})?;
if self.float {
frame.set_parent(FrameParent::new(
self.elem.location().unwrap(),
Inherit::Yes,
));
}
Ok(frame)
})
}
/// The element's location.
pub fn location(&self) -> Location {
self.elem.location().unwrap()
}
}
/// Wraps a parameterized computation and caches its latest output.
///
/// - When the computation is performed multiple times consecutively with the
/// same argument, reuses the cache.
/// - When the argument changes, the new output is cached.
#[derive(Clone)]
struct CachedCell<T>(RefCell<Option<(u128, T)>>);
impl<T> CachedCell<T> {
/// Create an empty cached cell.
fn new() -> Self {
Self(RefCell::new(None))
}
/// Perform the computation `f` with caching.
fn get_or_init<F, I>(&self, input: I, f: F) -> T
where
I: Hash,
T: Clone,
F: FnOnce(I) -> T,
{
let input_hash = typst_utils::hash128(&input);
let mut slot = self.0.borrow_mut();
if let Some((hash, output)) = &*slot
&& *hash == input_hash
{
return output.clone();
}
let output = f(input);
*slot = Some((input_hash, output.clone()));
output
}
}
impl<T> Default for CachedCell<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> Debug for CachedCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.pad("CachedCell(..)")
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/flow/compose.rs | crates/typst-layout/src/flow/compose.rs | use std::num::NonZeroUsize;
use typst_library::diag::SourceResult;
use typst_library::engine::Engine;
use typst_library::foundations::{Content, NativeElement, Packed, Resolve, Smart};
use typst_library::introspection::{
Counter, CounterDisplayElem, CounterState, CounterUpdate, Location, Locator,
SplitLocator, Tag,
};
use typst_library::layout::{
Abs, Axes, Dir, FixedAlignment, Fragment, Frame, FrameItem, FrameParent, Inherit,
OuterHAlignment, PlacementScope, Point, Region, Regions, Rel, Size,
};
use typst_library::model::{
FootnoteElem, FootnoteEntry, LineNumberingScope, Numbering, ParLineMarker,
};
use typst_syntax::Span;
use typst_utils::{NonZeroExt, Numeric};
use super::{
Config, FlowMode, FlowResult, LineNumberConfig, PlacedChild, Stop, Work, distribute,
};
/// Composes the contents of a single page/region. A region can have multiple
/// columns/subregions.
///
/// The composer is primarily concerned with layout of out-of-flow insertions
/// (floats and footnotes). It does this in per-page and per-column loops that
/// rerun when a new float is added (since it affects the regions available to
/// the distributor).
///
/// To lay out the in-flow contents of individual subregions, the composer
/// invokes [distribution](distribute).
pub fn compose(
engine: &mut Engine,
work: &mut Work,
config: &Config,
locator: Locator,
regions: Regions,
) -> SourceResult<Frame> {
Composer {
engine,
config,
page_base: regions.base(),
column: 0,
page_insertions: Insertions::default(),
column_insertions: Insertions::default(),
work,
footnote_spill: None,
footnote_queue: vec![],
}
.page(locator, regions)
}
/// State for composition.
///
/// Sadly, we need that many lifetimes because &mut references are invariant and
/// it would force the lifetimes of various things to be equal if they
/// shared a lifetime.
///
/// The only interesting lifetimes are 'a and 'b. See [Work] for more details
/// about them.
pub struct Composer<'a, 'b, 'x, 'y> {
pub engine: &'x mut Engine<'y>,
pub work: &'x mut Work<'a, 'b>,
pub config: &'x Config<'x>,
column: usize,
page_base: Size,
page_insertions: Insertions<'a, 'b>,
column_insertions: Insertions<'a, 'b>,
// These are here because they have to survive relayout (we could lose the
// footnotes otherwise). For floats, we revisit them anyway, so it's okay to
// use `work.floats` directly. This is not super clean; probably there's a
// better way.
footnote_spill: Option<std::vec::IntoIter<Frame>>,
footnote_queue: Vec<Packed<FootnoteElem>>,
}
impl<'a, 'b> Composer<'a, 'b, '_, '_> {
/// Lay out a container/page region, including container/page insertions.
fn page(mut self, locator: Locator, regions: Regions) -> SourceResult<Frame> {
// This loop can restart region layout when requested to do so by a
// `Stop`. This happens when there is a parent-scoped float.
let checkpoint = self.work.clone();
let output = loop {
// Shrink the available space by the space used by page
// insertions.
let mut pod = regions;
pod.size.y -= self.page_insertions.height();
match self.page_contents(locator.relayout(), pod) {
Ok(frame) => break frame,
Err(Stop::Finish(_)) => unreachable!(),
Err(Stop::Relayout(PlacementScope::Column)) => unreachable!(),
Err(Stop::Relayout(PlacementScope::Parent)) => {
*self.work = checkpoint.clone();
continue;
}
Err(Stop::Error(err)) => return Err(err),
};
};
drop(checkpoint);
Ok(self.page_insertions.finalize(self.work, self.config, output))
}
/// Lay out the inner contents of a container/page.
fn page_contents(&mut self, locator: Locator, regions: Regions) -> FlowResult<Frame> {
// No point in create column regions, if there's just one!
if self.config.columns.count == 1 {
return self.column(locator, regions);
}
// Create a backlog for multi-column layout.
let column_height = regions.size.y;
let backlog: Vec<_> = std::iter::once(&column_height)
.chain(regions.backlog)
.flat_map(|&h| std::iter::repeat_n(h, self.config.columns.count))
.skip(1)
.collect();
// Subregions for column layout.
let mut inner = Regions {
size: Size::new(self.config.columns.width, column_height),
backlog: &backlog,
expand: Axes::new(true, regions.expand.y),
..regions
};
// The size of the merged frame hosting multiple columns.
let size = Size::new(
regions.size.x,
if regions.expand.y { regions.size.y } else { Abs::zero() },
);
let mut output = Frame::hard(size);
let mut offset = Abs::zero();
let mut locator = locator.split();
// Lay out the columns and stitch them together.
for i in 0..self.config.columns.count {
self.column = i;
let frame = self.column(locator.next(&()), inner)?;
if !regions.expand.y {
output.size_mut().y.set_max(frame.height());
}
let width = frame.width();
let x = if self.config.columns.dir == Dir::LTR {
offset
} else {
regions.size.x - offset - width
};
offset += width + self.config.columns.gutter;
output.push_frame(Point::with_x(x), frame);
inner.next();
}
Ok(output)
}
/// Lay out a column, including column insertions.
fn column(&mut self, locator: Locator, regions: Regions) -> FlowResult<Frame> {
// Reset column insertion when starting a new column.
self.column_insertions = Insertions::default();
// Process footnote spill.
if let Some(spill) = self.work.footnote_spill.take() {
self.footnote_spill(spill, regions.base())?;
}
// This loop can restart column layout when requested to do so by a
// `Stop`. This happens when there is a column-scoped float.
let checkpoint = self.work.clone();
let inner = loop {
// Shrink the available space by the space used by column
// insertions.
let mut pod = regions;
pod.size.y -= self.column_insertions.height();
match self.column_contents(pod) {
Ok(frame) => break frame,
Err(Stop::Finish(_)) => unreachable!(),
Err(Stop::Relayout(PlacementScope::Column)) => {
*self.work = checkpoint.clone();
continue;
}
err => return err,
}
};
drop(checkpoint);
self.work.footnotes.extend(self.footnote_queue.drain(..));
if let Some(spill) = self.footnote_spill.take() {
self.work.footnote_spill = Some(spill);
}
let insertions = std::mem::take(&mut self.column_insertions);
let mut output = insertions.finalize(self.work, self.config, inner);
// Lay out per-column line numbers.
if let Some(line_config) = &self.config.line_numbers {
layout_line_numbers(
self.engine,
self.config,
line_config,
locator,
self.column,
&mut output,
)?;
}
Ok(output)
}
/// Lay out the inner contents of a column.
///
/// Pending floats and footnotes are also laid out at this step. For those,
/// however, we forbid footnote migration (moving the frame containing the
/// footnote reference if the corresponding entry doesn't fit), allowing
/// the footnote invariant to be broken, as it would require handling a
/// [`Stop::Finish`] at this point, but that is exclusively handled by the
/// distributor.
fn column_contents(&mut self, regions: Regions) -> FlowResult<Frame> {
// Process pending footnotes.
for note in std::mem::take(&mut self.work.footnotes) {
self.footnote(note, &mut regions.clone(), Abs::zero(), false)?;
}
// Process pending floats.
for placed in std::mem::take(&mut self.work.floats) {
self.float(placed, ®ions, false, false)?;
}
distribute(self, regions)
}
/// Lays out an item with floating placement.
///
/// This is called from within [`distribute`]. When the float fits, this
/// returns an `Err(Stop::Relayout(..))`, which bubbles all the way through
/// distribution and is handled in [`Self::page`] or [`Self::column`]
/// (depending on `placed.scope`).
///
/// When the float does not fit, it is queued into `work.floats`. The
/// value of `clearance` indicates that between the float and flow content
/// is needed --- it is set if there are already distributed items.
///
/// The value of `migratable` determines whether footnotes within the float
/// should be allowed to prompt its migration if they don't fit in order to
/// respect the footnote invariant (entries in the same page as the
/// references), triggering [`Stop::Finish`]. This is usually `true` within
/// the distributor, as it can handle that particular flow event, and
/// `false` elsewhere.
pub fn float(
&mut self,
placed: &'b PlacedChild<'a>,
regions: &Regions,
clearance: bool,
migratable: bool,
) -> FlowResult<()> {
// If the float is already processed, skip it.
let loc = placed.location();
if self.skipped(loc) {
return Ok(());
}
// If there is already a queued float, queue this one as well. We
// don't want to disrupt the order.
if !self.work.floats.is_empty() {
self.work.floats.push(placed);
return Ok(());
}
// Determine the base size of the chosen scope.
let base = match placed.scope {
PlacementScope::Column => regions.base(),
PlacementScope::Parent => self.page_base,
};
// Lay out the placed element.
let frame = placed.layout(self.engine, base)?;
// Determine the remaining space in the scope. This is exact for column
// placement, but only an approximation for page placement.
let remaining = match placed.scope {
PlacementScope::Column => regions.size.y,
PlacementScope::Parent => {
let remaining: Abs = regions
.iter()
.map(|size| size.y)
.take(self.config.columns.count - self.column)
.sum();
remaining / self.config.columns.count as f64
}
};
// We only require clearance if there is other content.
let clearance = if clearance { placed.clearance } else { Abs::zero() };
let need = frame.height() + clearance;
// If the float doesn't fit, queue it for the next region.
if !remaining.fits(need) && regions.may_progress() {
self.work.floats.push(placed);
return Ok(());
}
// Handle footnotes in the float.
self.footnotes(regions, &frame, need, false, migratable)?;
// Determine the float's vertical alignment. We can unwrap the inner
// `Option` because `Custom(None)` is checked for during collection.
let align_y = placed.align_y.map(Option::unwrap).unwrap_or_else(|| {
// When the float's vertical midpoint would be above the middle of
// the page if it were layouted in-flow, we use top alignment.
// Otherwise, we use bottom alignment.
let used = base.y - remaining;
let half = need / 2.0;
let ratio = (used + half) / base.y;
if ratio <= 0.5 { FixedAlignment::Start } else { FixedAlignment::End }
});
// Select the insertion area where we'll put this float.
let area = match placed.scope {
PlacementScope::Column => &mut self.column_insertions,
PlacementScope::Parent => &mut self.page_insertions,
};
// Put the float there.
area.push_float(placed, frame, align_y);
area.skips.push(loc);
// Trigger relayout.
Err(Stop::Relayout(placed.scope))
}
/// Lays out footnotes in the `frame` if this is the root flow and there are
/// any. The value of `breakable` indicates whether the element that
/// produced the frame is breakable. If not, the frame is treated as atomic.
///
/// The value of `migratable` indicates whether footnote migration should be
/// possible (at least for the first footnote found in the frame, as it is
/// forbidden for the second footnote onwards). It is usually `true` within
/// the distributor and `false` elsewhere, as the distributor can handle
/// [`Stop::Finish`] which is returned when migration is requested.
pub fn footnotes(
&mut self,
regions: &Regions,
frame: &Frame,
flow_need: Abs,
breakable: bool,
migratable: bool,
) -> FlowResult<()> {
// Footnotes are only supported at the root level.
if self.config.mode != FlowMode::Root {
return Ok(());
}
// Search for footnotes.
let mut notes = vec![];
for tag in &self.work.tags {
let Tag::Start(elem, _) = tag else { continue };
let Some(note) = elem.to_packed::<FootnoteElem>() else { continue };
notes.push((Abs::zero(), note.clone()));
}
find_in_frame_impl::<FootnoteElem>(&mut notes, frame, Abs::zero());
if notes.is_empty() {
return Ok(());
}
let mut relayout = false;
let mut regions = *regions;
// The first footnote's origin frame should be migratable if the region
// may progress (already checked by the footnote function) and if the
// origin frame isn't breakable (checked here).
let mut migratable = migratable && !breakable;
for (y, elem) in notes {
// The amount of space used by the in-flow content that contains the
// footnote marker. For a breakable frame, it's the y position of
// the marker. For an unbreakable frame, it's the full height.
let flow_need = if breakable { y } else { flow_need };
// Process the footnote.
match self.footnote(elem, &mut regions, flow_need, migratable) {
// The footnote was already processed or queued.
Ok(()) => {}
// First handle more footnotes before relayouting.
Err(Stop::Relayout(_)) => relayout = true,
// Either of
// - A `Stop::Finish` indicating that the frame's origin element
// should migrate to uphold the footnote invariant.
// - A fatal error.
err => return err,
}
// We only migrate the origin frame if the first footnote's first
// line didn't fit.
migratable = false;
}
// If this is set, we laid out at least one footnote, so we need a
// relayout.
if relayout {
return Err(Stop::Relayout(PlacementScope::Column));
}
Ok(())
}
/// Handles a single footnote.
fn footnote(
&mut self,
elem: Packed<FootnoteElem>,
regions: &mut Regions,
flow_need: Abs,
migratable: bool,
) -> FlowResult<()> {
// Ignore reference footnotes and already processed ones.
let loc = elem.location().unwrap();
if elem.is_ref() || self.skipped(loc) {
return Ok(());
}
// If there is already a queued spill or footnote, queue this one as
// well. We don't want to disrupt the order.
let area = &mut self.column_insertions;
if self.footnote_spill.is_some() || !self.footnote_queue.is_empty() {
self.footnote_queue.push(elem);
return Ok(());
}
// If there weren't any footnotes so far, account for the footnote
// separator.
let mut separator = None;
let mut separator_need = Abs::zero();
if area.footnotes.is_empty() {
let frame =
layout_footnote_separator(self.engine, self.config, regions.base())?;
separator_need += self.config.footnote.clearance + frame.height();
separator = Some(frame);
}
// Prepare regions for the footnote.
let mut pod = *regions;
pod.expand.y = false;
pod.size.y -= flow_need + separator_need + self.config.footnote.gap;
// Layout the footnote entry.
let frames = layout_footnote(self.engine, self.config, &elem, pod)?.into_frames();
// Find nested footnotes in the entry.
let nested = find_in_frames::<FootnoteElem>(&frames);
// Check if there are any non-empty frames.
let exist_non_empty_frame = frames.iter().any(|f| !f.is_empty());
// Extract the first frame.
let mut iter = frames.into_iter();
let first = iter.next().unwrap();
let note_need = self.config.footnote.gap + first.height();
// If the first frame is empty, then none of its content fit. If
// possible, we then migrate the origin frame to the next region to
// uphold the footnote invariant (that marker and entry are on the same
// page). If not, we just queue the footnote for the next page, but
// only if that would actually make a difference (that is, if the
// footnote isn't alone in the page after not fitting in any previous
// pages, as it probably won't ever fit then).
//
// Note that a non-zero flow need also indicates that queueing would
// make a difference, because the flow need is subtracted from the
// available height in the entry's pod even if what caused that need
// wasn't considered for the input `regions`. For example, floats just
// pass the `regions` they received along to their footnotes, which
// don't take into account the space occupied by the floats themselves,
// but they do indicate their footnotes have a non-zero flow need, so
// queueing them can matter as, in the following pages, the flow need
// will be set to zero and the footnote will be alone in the page.
// Then, `may_progress()` will also be false (this time, correctly) and
// the footnote is laid out, as queueing wouldn't improve the lack of
// space anymore and would result in an infinite loop.
//
// However, it is worth noting that migration does take into account
// the original region, before inserting what prompted the flow need.
// Logically, if moving the original frame can't improve the lack of
// space, then migration should be inhibited. The space occupied by the
// original frame is not relevant for that check. Therefore,
// `regions.may_progress()` must still be checked separately for
// migration, regardless of the presence of flow need.
if first.is_empty() && exist_non_empty_frame {
if migratable && regions.may_progress() {
return Err(Stop::Finish(false));
} else if regions.may_progress() || !flow_need.is_zero() {
self.footnote_queue.push(elem);
return Ok(());
}
}
// Save the separator.
if let Some(frame) = separator {
area.push_footnote_separator(self.config, frame);
regions.size.y -= separator_need;
}
// Save the footnote's frame.
area.push_footnote(self.config, first);
area.skips.push(loc);
regions.size.y -= note_need;
// Save the spill.
if !iter.as_slice().is_empty() {
self.footnote_spill = Some(iter);
}
// Lay out nested footnotes.
for (_, note) in nested {
match self.footnote(note, regions, flow_need, migratable) {
// This footnote was already processed or queued.
Ok(_) => {}
// Footnotes always request a relayout when processed for the
// first time, so we ignore a relayout request since we're
// about to do so afterwards. Without this check, the first
// inner footnote interrupts processing of the following ones.
Err(Stop::Relayout(_)) => {}
// Either of
// - A `Stop::Finish` indicating that the frame's origin element
// should migrate to uphold the footnote invariant.
// - A fatal error.
err => return err,
}
}
// Since we laid out a footnote, we need a relayout.
Err(Stop::Relayout(PlacementScope::Column))
}
/// Handles spillover from a footnote.
fn footnote_spill(
&mut self,
mut iter: std::vec::IntoIter<Frame>,
base: Size,
) -> SourceResult<()> {
let area = &mut self.column_insertions;
// Create and save the separator.
let separator = layout_footnote_separator(self.engine, self.config, base)?;
area.push_footnote_separator(self.config, separator);
// Save the footnote's frame.
let frame = iter.next().unwrap();
area.push_footnote(self.config, frame);
// Save the spill.
if !iter.as_slice().is_empty() {
self.footnote_spill = Some(iter);
}
Ok(())
}
/// Checks whether an insertion was already processed and doesn't need to be
/// handled again.
fn skipped(&self, loc: Location) -> bool {
self.work.skips.contains(&loc)
|| self.page_insertions.skips.contains(&loc)
|| self.column_insertions.skips.contains(&loc)
}
/// The amount of width needed by insertions.
pub fn insertion_width(&self) -> Abs {
self.column_insertions.width.max(self.page_insertions.width)
}
}
/// Lay out the footnote separator, typically a line.
fn layout_footnote_separator(
engine: &mut Engine,
config: &Config,
base: Size,
) -> SourceResult<Frame> {
crate::layout_frame(
engine,
&config.footnote.separator,
Locator::root(),
config.shared,
Region::new(base, Axes::new(config.footnote.expand, false)),
)
}
/// Lay out a footnote.
fn layout_footnote(
engine: &mut Engine,
config: &Config,
elem: &Packed<FootnoteElem>,
pod: Regions,
) -> SourceResult<Fragment> {
let loc = elem.location().unwrap();
crate::layout_fragment(
engine,
&FootnoteEntry::new(elem.clone())
.pack()
.spanned(elem.span())
// We attach a well-known derived location to the entry so that the
// note can link to this entry without first querying for it.
.located(loc.variant(1)),
Locator::synthesize(loc),
config.shared,
pod,
)
.map(|mut fragment| {
for frame in &mut fragment {
frame.set_parent(FrameParent::new(loc, Inherit::No));
}
fragment
})
}
/// An additive list of insertions.
#[derive(Default)]
struct Insertions<'a, 'b> {
top_floats: Vec<(&'b PlacedChild<'a>, Frame)>,
bottom_floats: Vec<(&'b PlacedChild<'a>, Frame)>,
footnotes: Vec<Frame>,
footnote_separator: Option<Frame>,
top_size: Abs,
bottom_size: Abs,
width: Abs,
skips: Vec<Location>,
}
impl<'a, 'b> Insertions<'a, 'b> {
/// Add a float to the top or bottom area.
fn push_float(
&mut self,
placed: &'b PlacedChild<'a>,
frame: Frame,
align_y: FixedAlignment,
) {
self.width.set_max(frame.width());
let amount = frame.height() + placed.clearance;
let pair = (placed, frame);
if align_y == FixedAlignment::Start {
self.top_size += amount;
self.top_floats.push(pair);
} else {
self.bottom_size += amount;
self.bottom_floats.push(pair);
}
}
/// Add a footnote to the bottom area.
fn push_footnote(&mut self, config: &Config, frame: Frame) {
self.width.set_max(frame.width());
self.bottom_size += config.footnote.gap + frame.height();
self.footnotes.push(frame);
}
/// Add a footnote separator to the bottom area.
fn push_footnote_separator(&mut self, config: &Config, frame: Frame) {
self.width.set_max(frame.width());
self.bottom_size += config.footnote.clearance + frame.height();
self.footnote_separator = Some(frame);
}
/// The combined height of the top and bottom area (including clearances).
/// Subtracting this from the total region size yields the available space
/// for distribution.
fn height(&self) -> Abs {
self.top_size + self.bottom_size
}
/// Produce a frame for the full region based on the `inner` frame produced
/// by distribution or column layout.
fn finalize(self, work: &mut Work, config: &Config, inner: Frame) -> Frame {
work.extend_skips(&self.skips);
if self.top_floats.is_empty()
&& self.bottom_floats.is_empty()
&& self.footnote_separator.is_none()
&& self.footnotes.is_empty()
{
return inner;
}
let size = inner.size() + Size::with_y(self.height());
let mut output = Frame::soft(size);
let mut offset_top = Abs::zero();
let mut offset_bottom = size.y - self.bottom_size;
for (placed, frame) in self.top_floats {
let x = placed.align_x.position(size.x - frame.width());
let y = offset_top;
let delta = placed.delta.zip_map(size, Rel::relative_to).to_point();
offset_top += frame.height() + placed.clearance;
output.push_frame(Point::new(x, y) + delta, frame);
}
output.push_frame(Point::with_y(self.top_size), inner);
// We put floats first and then footnotes. This differs from what LaTeX
// does and is a little inconsistent w.r.t column vs page floats (page
// floats are below footnotes because footnotes are per column), but
// it's what most people (including myself) seem to intuitively expect.
// We experimented with the LaTeX ordering in 0.12.0-rc1, but folks were
// surprised and considered this strange. In LaTeX, it can be changed
// with `\usepackage[bottom]{footmisc}`. We could also consider adding
// configuration in the future.
for (placed, frame) in self.bottom_floats {
offset_bottom += placed.clearance;
let x = placed.align_x.position(size.x - frame.width());
let y = offset_bottom;
let delta = placed.delta.zip_map(size, Rel::relative_to).to_point();
offset_bottom += frame.height();
output.push_frame(Point::new(x, y) + delta, frame);
}
if let Some(frame) = self.footnote_separator {
offset_bottom += config.footnote.clearance;
let y = offset_bottom;
offset_bottom += frame.height();
output.push_frame(Point::with_y(y), frame);
}
for frame in self.footnotes {
offset_bottom += config.footnote.gap;
let y = offset_bottom;
offset_bottom += frame.height();
output.push_frame(Point::with_y(y), frame);
}
output
}
}
/// Lay out the given collected lines' line numbers to an output frame.
///
/// The numbers are placed either on the left margin (left border of the frame)
/// or on the right margin (right border). Before they are placed, a line number
/// counter reset is inserted if we're in the first column of the page being
/// currently laid out and the user requested for line numbers to be reset at
/// the start of every page.
fn layout_line_numbers(
engine: &mut Engine,
config: &Config,
line_config: &LineNumberConfig,
locator: Locator,
column: usize,
output: &mut Frame,
) -> SourceResult<()> {
let mut locator = locator.split();
// Reset page-scoped line numbers if currently at the first column.
if column == 0 && line_config.scope == LineNumberingScope::Page {
let reset = layout_line_number_reset(engine, config, &mut locator)?;
output.push_frame(Point::zero(), reset);
}
// Find all line markers.
let mut lines = find_in_frame::<ParLineMarker>(output);
if lines.is_empty() {
return Ok(());
}
// Assume the line numbers aren't sorted by height. They must be sorted so
// we can deduplicate line numbers below based on vertical proximity.
lines.sort_by_key(|&(y, _)| y);
// Used for horizontal alignment.
let mut max_number_width = Abs::zero();
// This is used to skip lines that are too close together.
let mut prev_bottom = None;
// Buffer line number frames so we can align them horizontally later before
// placing, based on the width of the largest line number.
let mut line_numbers = vec![];
// Layout the lines.
for &(y, ref marker) in &lines {
if prev_bottom.is_some_and(|bottom| y < bottom) {
// Lines are too close together. Display as the same line number.
continue;
}
// Layout the number and record its width in search of the maximum.
let frame = layout_line_number(engine, config, &mut locator, &marker.numbering)?;
// Note that this line.y is larger than the previous due to sorting.
// Therefore, the check at the top of the loop ensures no line numbers
// will reasonably intersect with each other. We enforce a minimum
// spacing of 1pt between consecutive line numbers in case a zero-height
// frame is used.
prev_bottom = Some(y + frame.height().max(Abs::pt(1.0)));
max_number_width.set_max(frame.width());
line_numbers.push((y, marker, frame));
}
for (y, marker, frame) in line_numbers {
// The last column will always place line numbers at the end
// margin. This should become configurable in the future.
let margin = {
let opposite =
config.columns.count >= 2 && column + 1 == config.columns.count;
if opposite { OuterHAlignment::End } else { marker.number_margin }
.resolve(config.shared)
};
// Determine how much space to leave between the column and the number.
let clearance = match marker.number_clearance {
Smart::Auto => line_config.default_clearance,
Smart::Custom(rel) => rel.resolve(config.shared),
};
// Compute the base X position.
let x = match margin {
// Move the number to the left of the left edge (at 0pt) by the maximum
// width and the clearance.
FixedAlignment::Start => -max_number_width - clearance,
// Move the number to the right edge and add clearance.
FixedAlignment::End => output.width() + clearance,
// Can't happen due to `OuterHAlignment`.
FixedAlignment::Center => unreachable!(),
};
// Determine how much to shift the number due to its alignment.
let shift = {
let align = marker
.number_align
.map(|align| align.resolve(config.shared))
.unwrap_or_else(|| margin.inv());
align.position(max_number_width - frame.width())
};
// Compute the final position of the number and add it to the output.
let pos = Point::new(x + shift, y);
output.push_frame(pos, frame);
}
Ok(())
}
/// Creates a frame that resets the line number counter.
fn layout_line_number_reset(
engine: &mut Engine,
config: &Config,
locator: &mut SplitLocator,
) -> SourceResult<Frame> {
let counter = Counter::of(ParLineMarker::ELEM);
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | true |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/flow/block.rs | crates/typst-layout/src/flow/block.rs | use std::cell::LazyCell;
use smallvec::SmallVec;
use typst_library::diag::SourceResult;
use typst_library::engine::Engine;
use typst_library::foundations::{Packed, Resolve, StyleChain};
use typst_library::introspection::Locator;
use typst_library::layout::{
Abs, Axes, BlockBody, BlockElem, Fragment, Frame, FrameKind, Region, Regions, Rel,
Sides, Size, Sizing,
};
use typst_library::visualize::Stroke;
use typst_utils::Numeric;
use crate::shapes::{clip_rect, fill_and_stroke};
/// Lay this out as an unbreakable block.
#[typst_macros::time(name = "block", span = elem.span())]
pub fn layout_single_block(
elem: &Packed<BlockElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
// Fetch sizing properties.
let width = elem.width.get(styles);
let height = elem.height.get(styles);
let inset = elem.inset.resolve(styles).unwrap_or_default();
// Build the pod regions.
let pod = unbreakable_pod(&width.into(), &height, &inset, styles, region.size);
// Layout the body.
let body = elem.body.get_ref(styles);
let mut frame = match body {
// If we have no body, just create one frame. Its size will be
// adjusted below.
None => Frame::hard(Size::zero()),
// If we have content as our body, just layout it.
Some(BlockBody::Content(body)) => {
crate::layout_frame(engine, body, locator.relayout(), styles, pod)?
}
// If we have a child that wants to layout with just access to the
// base region, give it that.
Some(BlockBody::SingleLayouter(callback)) => {
callback.call(engine, locator, styles, pod)?
}
// If we have a child that wants to layout with full region access,
// we layout it.
Some(BlockBody::MultiLayouter(callback)) => {
let expand = (pod.expand | region.expand) & pod.size.map(Abs::is_finite);
let pod = Region { expand, ..pod };
callback.call(engine, locator, styles, pod.into())?.into_frame()
}
};
// Explicit blocks are boundaries for gradient relativeness.
if matches!(body, None | Some(BlockBody::Content(_))) {
frame.set_kind(FrameKind::Hard);
}
// Enforce a correct frame size on the expanded axes. Do this before
// applying the inset, since the pod shrunk.
frame.set_size(pod.expand.select(pod.size, frame.size()));
// Apply the inset.
if !inset.is_zero() {
crate::pad::grow(&mut frame, &inset);
}
// Prepare fill and stroke.
let fill = elem.fill.get_cloned(styles);
let stroke = elem
.stroke
.resolve(styles)
.unwrap_or_default()
.map(|s| s.map(Stroke::unwrap_or_default));
// Only fetch these if necessary (for clipping or filling/stroking).
let outset = LazyCell::new(|| elem.outset.resolve(styles).unwrap_or_default());
let radius = LazyCell::new(|| elem.radius.resolve(styles).unwrap_or_default());
// Clip the contents, if requested.
if elem.clip.get(styles) {
frame.clip(clip_rect(frame.size(), &radius, &stroke, &outset));
}
// Add fill and/or stroke.
if fill.is_some() || stroke.iter().any(Option::is_some) {
fill_and_stroke(&mut frame, fill, &stroke, &outset, &radius, elem.span());
}
// Assign label to each frame in the fragment.
if let Some(label) = elem.label() {
frame.label(label);
}
Ok(frame)
}
/// Lay this out as a breakable block.
#[typst_macros::time(name = "block", span = elem.span())]
pub fn layout_multi_block(
elem: &Packed<BlockElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
// Fetch sizing properties.
let width = elem.width.get(styles);
let height = elem.height.get(styles);
let inset = elem.inset.resolve(styles).unwrap_or_default();
// Allocate a small vector for backlogs.
let mut buf = SmallVec::<[Abs; 2]>::new();
// Build the pod regions.
let pod = breakable_pod(&width.into(), &height, &inset, styles, regions, &mut buf);
// Layout the body.
let body = elem.body.get_ref(styles);
let mut fragment = match body {
// If we have no body, just create one frame plus one per backlog
// region. We create them zero-sized; if necessary, their size will
// be adjusted below.
None => {
let mut frames = vec![];
frames.push(Frame::hard(Size::zero()));
if pod.expand.y {
let mut iter = pod;
while !iter.backlog.is_empty() {
frames.push(Frame::hard(Size::zero()));
iter.next();
}
}
Fragment::frames(frames)
}
// If we have content as our body, just layout it.
Some(BlockBody::Content(body)) => {
let mut fragment =
crate::layout_fragment(engine, body, locator.relayout(), styles, pod)?;
// If the body is automatically sized and produced more than one
// fragment, ensure that the width was consistent across all
// regions. If it wasn't, we need to relayout with expansion.
if !pod.expand.x
&& fragment
.as_slice()
.windows(2)
.any(|w| !w[0].width().approx_eq(w[1].width()))
{
let max_width =
fragment.iter().map(|frame| frame.width()).max().unwrap_or_default();
let pod = Regions {
size: Size::new(max_width, pod.size.y),
expand: Axes::new(true, pod.expand.y),
..pod
};
fragment = crate::layout_fragment(engine, body, locator, styles, pod)?;
}
fragment
}
// If we have a child that wants to layout with just access to the
// base region, give it that.
Some(BlockBody::SingleLayouter(callback)) => {
let pod = Region::new(pod.base(), pod.expand);
callback.call(engine, locator, styles, pod).map(Fragment::frame)?
}
// If we have a child that wants to layout with full region access,
// we layout it.
//
// For auto-sized multi-layouters, we propagate the outer expansion
// so that they can decide for themselves. We also ensure again to
// only expand if the size is finite.
Some(BlockBody::MultiLayouter(callback)) => {
let expand = (pod.expand | regions.expand) & pod.size.map(Abs::is_finite);
let pod = Regions { expand, ..pod };
callback.call(engine, locator, styles, pod)?
}
};
// Prepare fill and stroke.
let fill = elem.fill.get_ref(styles);
let stroke = elem
.stroke
.resolve(styles)
.unwrap_or_default()
.map(|s| s.map(Stroke::unwrap_or_default));
// Only fetch these if necessary (for clipping or filling/stroking).
let outset = LazyCell::new(|| elem.outset.resolve(styles).unwrap_or_default());
let radius = LazyCell::new(|| elem.radius.resolve(styles).unwrap_or_default());
// Fetch/compute these outside of the loop.
let clip = elem.clip.get(styles);
let has_fill_or_stroke = fill.is_some() || stroke.iter().any(Option::is_some);
let has_inset = !inset.is_zero();
let is_explicit = matches!(body, None | Some(BlockBody::Content(_)));
// Skip filling, stroking and labeling the first frame if it is empty and
// a non-empty one follows.
let mut skip_first = false;
if let [first, rest @ ..] = fragment.as_slice() {
skip_first = first.is_empty() && rest.iter().any(|frame| !frame.is_empty());
}
// Post-process to apply insets, clipping, fills, and strokes.
for (i, (frame, region)) in fragment.iter_mut().zip(pod.iter()).enumerate() {
// Explicit blocks are boundaries for gradient relativeness.
if is_explicit {
frame.set_kind(FrameKind::Hard);
}
// Enforce a correct frame size on the expanded axes. Do this before
// applying the inset, since the pod shrunk.
frame.set_size(pod.expand.select(region, frame.size()));
// Apply the inset.
if has_inset {
crate::pad::grow(frame, &inset);
}
// Clip the contents, if requested.
if clip {
frame.clip(clip_rect(frame.size(), &radius, &stroke, &outset));
}
// Add fill and/or stroke.
if has_fill_or_stroke && (i > 0 || !skip_first) {
fill_and_stroke(frame, fill.clone(), &stroke, &outset, &radius, elem.span());
}
}
// Assign label to each frame in the fragment.
if let Some(label) = elem.label() {
// Skip empty orphan frames, as a label would make them non-empty.
for frame in fragment.iter_mut().skip(if skip_first { 1 } else { 0 }) {
frame.label(label);
}
}
Ok(fragment)
}
/// Builds the pod region for an unbreakable sized container.
pub(crate) fn unbreakable_pod(
width: &Sizing,
height: &Sizing,
inset: &Sides<Rel<Abs>>,
styles: StyleChain,
base: Size,
) -> Region {
// Resolve the size.
let mut size = Size::new(
match width {
// - For auto, the whole region is available.
// - Fr is handled outside and already factored into the `region`,
// so we can treat it equivalently to 100%.
Sizing::Auto | Sizing::Fr(_) => base.x,
// Resolve the relative sizing.
Sizing::Rel(rel) => rel.resolve(styles).relative_to(base.x),
},
match height {
Sizing::Auto | Sizing::Fr(_) => base.y,
Sizing::Rel(rel) => rel.resolve(styles).relative_to(base.y),
},
);
// Take the inset, if any, into account.
if !inset.is_zero() {
size = crate::pad::shrink(size, inset);
}
// If the child is manually, the size is forced and we should enable
// expansion.
let expand = Axes::new(
*width != Sizing::Auto && size.x.is_finite(),
*height != Sizing::Auto && size.y.is_finite(),
);
Region::new(size, expand)
}
/// Builds the pod regions for a breakable sized container.
fn breakable_pod<'a>(
width: &Sizing,
height: &Sizing,
inset: &Sides<Rel<Abs>>,
styles: StyleChain,
regions: Regions,
buf: &'a mut SmallVec<[Abs; 2]>,
) -> Regions<'a> {
let base = regions.base();
// The vertical region sizes we're about to build.
let first;
let full;
let backlog: &mut [Abs];
let last;
// If the block has a fixed height, things are very different, so we
// handle that case completely separately.
match height {
Sizing::Auto | Sizing::Fr(_) => {
// If the block is automatically sized, we can just inherit the
// regions.
first = regions.size.y;
full = regions.full;
buf.extend_from_slice(regions.backlog);
backlog = buf;
last = regions.last;
}
Sizing::Rel(rel) => {
// Resolve the sizing to a concrete size.
let resolved = rel.resolve(styles).relative_to(base.y);
// Since we're manually sized, the resolved size is the base height.
full = resolved;
// Distribute the fixed height across a start region and a backlog.
(first, backlog) = distribute(resolved, regions, buf);
// If the height is manually sized, we don't want a final repeatable
// region.
last = None;
}
};
// Resolve the horizontal sizing to a concrete width and combine
// `width` and `first` into `size`.
let mut size = Size::new(
match width {
Sizing::Auto | Sizing::Fr(_) => regions.size.x,
Sizing::Rel(rel) => rel.resolve(styles).relative_to(base.x),
},
first,
);
// Take the inset, if any, into account, applying it to the
// individual region components.
let (mut full, mut last) = (full, last);
if !inset.is_zero() {
crate::pad::shrink_multiple(&mut size, &mut full, backlog, &mut last, inset);
}
// If the child is manually, the size is forced and we should enable
// expansion.
let expand = Axes::new(
*width != Sizing::Auto && size.x.is_finite(),
*height != Sizing::Auto && size.y.is_finite(),
);
Regions { size, full, backlog, last, expand }
}
/// Distribute a fixed height spread over existing regions into a new first
/// height and a new backlog.
///
/// Note that, if the given height fits within the first region, no backlog is
/// generated and the first region's height shrinks to fit exactly the given
/// height. In particular, negative and zero heights always fit in any region,
/// so such heights are always directly returned as the new first region
/// height.
fn distribute<'a>(
height: Abs,
mut regions: Regions,
buf: &'a mut SmallVec<[Abs; 2]>,
) -> (Abs, &'a mut [Abs]) {
// Build new region heights from old regions.
let mut remaining = height;
// Negative and zero heights always fit, so just keep them.
// No backlog is generated.
if remaining <= Abs::zero() {
buf.push(remaining);
return (buf[0], &mut buf[1..]);
}
loop {
// This clamp is safe (min <= max), as 'remaining' won't be negative
// due to the initial check above (on the first iteration) and due to
// stopping on 'remaining.approx_empty()' below (for the second
// iteration onwards).
let limited = regions.size.y.clamp(Abs::zero(), remaining);
buf.push(limited);
remaining -= limited;
if remaining.approx_empty()
|| !regions.may_break()
|| (!regions.may_progress() && limited.approx_empty())
{
break;
}
regions.next();
}
// If there is still something remaining, apply it to the
// last region (it will overflow, but there's nothing else
// we can do).
if !remaining.approx_empty()
&& let Some(last) = buf.last_mut()
{
*last += remaining;
}
// Distribute the heights to the first region and the
// backlog. There is no last region, since the height is
// fixed.
(buf[0], &mut buf[1..])
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/flow/distribute.rs | crates/typst-layout/src/flow/distribute.rs | use typst_library::introspection::Tag;
use typst_library::layout::{
Abs, Axes, FixedAlignment, Fr, Frame, FrameItem, Point, Region, Regions, Rel, Size,
};
use typst_utils::Numeric;
use super::{
Child, Composer, FlowResult, LineChild, MultiChild, MultiSpill, PlacedChild,
SingleChild, Stop, Work,
};
/// Distributes as many children as fit from `composer.work` into the first
/// region and returns the resulting frame.
pub fn distribute(composer: &mut Composer, regions: Regions) -> FlowResult<Frame> {
let mut distributor = Distributor {
composer,
regions,
items: vec![],
sticky: None,
stickable: None,
};
let init = distributor.snapshot();
let forced = match distributor.run() {
Ok(()) => distributor.composer.work.done(),
Err(Stop::Finish(forced)) => forced,
Err(err) => return Err(err),
};
let region = Region::new(regions.size, regions.expand);
distributor.finalize(region, init, forced)
}
/// State for distribution.
///
/// See [Composer] regarding lifetimes.
struct Distributor<'a, 'b, 'x, 'y, 'z> {
/// The composer that is used to handle insertions.
composer: &'z mut Composer<'a, 'b, 'x, 'y>,
/// Regions which are continuously shrunk as new items are added.
regions: Regions<'z>,
/// Already laid out items, not yet aligned.
items: Vec<Item<'a, 'b>>,
/// A snapshot which can be restored to migrate a suffix of sticky blocks to
/// the next region.
sticky: Option<DistributionSnapshot<'a, 'b>>,
/// Whether the current group of consecutive sticky blocks are still sticky
/// and may migrate with the attached frame. This is `None` while we aren't
/// processing sticky blocks. On the first sticky block, this will become
/// `Some(true)` if migrating sticky blocks as usual would make a
/// difference - this is given by `regions.may_progress()`. Otherwise, it
/// is set to `Some(false)`, which is usually the case when the first
/// sticky block in the group is at the very top of the page (then,
/// migrating it would just lead us back to the top of the page, leading
/// to an infinite loop). In that case, all sticky blocks of the group are
/// also disabled, until this is reset to `None` on the first non-sticky
/// frame we find.
///
/// While this behavior of disabling stickiness of sticky blocks at the
/// very top of the page may seem non-ideal, it is only problematic (that
/// is, may lead to orphaned sticky blocks / headings) if the combination
/// of 'sticky blocks + attached frame' doesn't fit in one page, in which
/// case there is nothing Typst can do to improve the situation, as sticky
/// blocks are supposed to always be in the same page as the subsequent
/// frame, but that is impossible in that case, which is thus pathological.
stickable: Option<bool>,
}
/// A snapshot of the distribution state.
struct DistributionSnapshot<'a, 'b> {
work: Work<'a, 'b>,
items: usize,
}
/// A laid out item in a distribution.
enum Item<'a, 'b> {
/// An introspection tag.
Tag(&'a Tag),
/// Absolute spacing and its weakness level.
Abs(Abs, u8),
/// Fractional spacing or a fractional block.
Fr(Fr, Option<&'b SingleChild<'a>>),
/// A frame for a laid out line or block.
Frame(Frame, Axes<FixedAlignment>),
/// A frame for an absolutely (not floatingly) placed child.
Placed(Frame, &'b PlacedChild<'a>),
}
impl Item<'_, '_> {
/// Whether this item should be migrated to the next region if the region
/// consists solely of such items.
fn migratable(&self) -> bool {
match self {
Self::Tag(_) => true,
Self::Frame(frame, _) => {
frame.size().is_zero()
&& frame.items().all(|(_, item)| {
matches!(item, FrameItem::Link(_, _) | FrameItem::Tag(_))
})
}
Self::Placed(_, placed) => !placed.float,
_ => false,
}
}
}
impl<'a, 'b> Distributor<'a, 'b, '_, '_, '_> {
/// Distributes content into the region.
fn run(&mut self) -> FlowResult<()> {
// First, handle spill of a breakable block.
if let Some(spill) = self.composer.work.spill.take() {
self.multi_spill(spill)?;
}
// If spill are taken care of, process children until no space is left
// or no children are left.
while let Some(child) = self.composer.work.head() {
self.child(child)?;
self.composer.work.advance();
}
Ok(())
}
/// Processes a single child.
///
/// - Returns `Ok(())` if the child was successfully processed.
/// - Returns `Err(Stop::Finish)` if a region break should be triggered.
/// - Returns `Err(Stop::Relayout(_))` if the region needs to be relayouted
/// due to an insertion (float/footnote).
/// - Returns `Err(Stop::Error(_))` if there was a fatal error.
fn child(&mut self, child: &'b Child<'a>) -> FlowResult<()> {
match child {
Child::Tag(tag) => self.tag(tag),
Child::Rel(amount, weakness) => self.rel(*amount, *weakness),
Child::Fr(fr) => self.fr(*fr),
Child::Line(line) => self.line(line)?,
Child::Single(single) => self.single(single)?,
Child::Multi(multi) => self.multi(multi)?,
Child::Placed(placed) => self.placed(placed)?,
Child::Flush => self.flush()?,
Child::Break(weak) => self.break_(*weak)?,
}
Ok(())
}
/// Processes a tag.
fn tag(&mut self, tag: &'a Tag) {
self.composer.work.tags.push(tag);
}
/// Generate items for pending tags.
fn flush_tags(&mut self) {
if !self.composer.work.tags.is_empty() {
let tags = &mut self.composer.work.tags;
self.items.extend(tags.iter().copied().map(Item::Tag));
tags.clear();
}
}
/// Processes relative spacing.
fn rel(&mut self, amount: Rel<Abs>, weakness: u8) {
let amount = amount.relative_to(self.regions.base().y);
if weakness > 0 && !self.keep_spacing(amount, weakness) {
return;
}
self.regions.size.y -= amount;
self.items.push(Item::Abs(amount, weakness));
}
/// Processes fractional spacing.
fn fr(&mut self, fr: Fr) {
self.trim_spacing();
self.items.push(Item::Fr(fr, None));
}
/// Decides whether to keep weak spacing based on previous items. If there
/// is a preceding weak spacing, it might be patched in place.
fn keep_spacing(&mut self, amount: Abs, weakness: u8) -> bool {
for item in self.items.iter_mut().rev() {
match *item {
Item::Abs(prev_amount, prev_weakness @ 1..) => {
if weakness <= prev_weakness
&& (weakness < prev_weakness || amount > prev_amount)
{
self.regions.size.y -= amount - prev_amount;
*item = Item::Abs(amount, weakness);
}
return false;
}
Item::Tag(_) | Item::Abs(..) | Item::Placed(..) => {}
Item::Fr(.., None) => return false,
Item::Frame(..) | Item::Fr(.., Some(_)) => return true,
}
}
false
}
/// Trims trailing weak spacing from the items.
fn trim_spacing(&mut self) {
for (i, item) in self.items.iter().enumerate().rev() {
match *item {
Item::Abs(amount, 1..) => {
self.regions.size.y += amount;
self.items.remove(i);
break;
}
Item::Tag(_) | Item::Abs(..) | Item::Placed(..) => {}
Item::Frame(..) | Item::Fr(..) => break,
}
}
}
/// The amount of trailing weak spacing.
fn weak_spacing(&mut self) -> Abs {
for item in self.items.iter().rev() {
match *item {
Item::Abs(amount, 1..) => return amount,
Item::Tag(_) | Item::Abs(..) | Item::Placed(..) => {}
Item::Frame(..) | Item::Fr(..) => break,
}
}
Abs::zero()
}
/// Processes a line of a paragraph.
fn line(&mut self, line: &'b LineChild) -> FlowResult<()> {
// If the line doesn't fit and a followup region may improve things,
// finish the region.
if !self.regions.size.y.fits(line.frame.height()) && self.regions.may_progress() {
return Err(Stop::Finish(false));
}
// If the line's need, which includes its own height and that of
// following lines grouped by widow/orphan prevention, does not fit into
// the current region, but does fit into the next region, finish the
// region.
if !self.regions.size.y.fits(line.need)
&& self
.regions
.iter()
.nth(1)
.is_some_and(|region| region.y.fits(line.need))
{
return Err(Stop::Finish(false));
}
self.frame(line.frame.clone(), line.align, false, false)
}
/// Processes an unbreakable block.
fn single(&mut self, single: &'b SingleChild<'a>) -> FlowResult<()> {
// Lay out the block.
let frame = single.layout(
self.composer.engine,
Region::new(self.regions.base(), self.regions.expand),
)?;
// Handle fractionally sized blocks.
if let Some(fr) = single.fr {
self.composer
.footnotes(&self.regions, &frame, Abs::zero(), false, true)?;
self.flush_tags();
self.items.push(Item::Fr(fr, Some(single)));
return Ok(());
}
// If the block doesn't fit and a followup region may improve things,
// finish the region.
if !self.regions.size.y.fits(frame.height()) && self.regions.may_progress() {
return Err(Stop::Finish(false));
}
self.frame(frame, single.align, single.sticky, false)
}
/// Processes a breakable block.
fn multi(&mut self, multi: &'b MultiChild<'a>) -> FlowResult<()> {
// Skip directly if the region is already (over)full. `line` and
// `single` implicitly do this through their `fits` checks.
if self.regions.is_full() {
return Err(Stop::Finish(false));
}
// Lay out the block.
let (frame, spill) = multi.layout(self.composer.engine, self.regions)?;
if frame.is_empty()
&& spill.as_ref().is_some_and(|s| s.exist_non_empty_frame)
&& self.regions.may_progress()
{
// If the first frame is empty, but there are non-empty frames in
// the spill, the whole child should be put in the next region to
// avoid any invisible orphans at the end of this region.
return Err(Stop::Finish(false));
}
self.frame(frame, multi.align, multi.sticky, true)?;
// If the block didn't fully fit into the current region, save it into
// the `spill` and finish the region.
if let Some(spill) = spill {
self.composer.work.spill = Some(spill);
self.composer.work.advance();
return Err(Stop::Finish(false));
}
Ok(())
}
/// Processes spillover from a breakable block.
fn multi_spill(&mut self, spill: MultiSpill<'a, 'b>) -> FlowResult<()> {
// Skip directly if the region is already (over)full.
if self.regions.is_full() {
self.composer.work.spill = Some(spill);
return Err(Stop::Finish(false));
}
// Lay out the spilled remains.
let align = spill.align();
let (frame, spill) = spill.layout(self.composer.engine, self.regions)?;
self.frame(frame, align, false, true)?;
// If there's still more, save it into the `spill` and finish the
// region.
if let Some(spill) = spill {
self.composer.work.spill = Some(spill);
return Err(Stop::Finish(false));
}
Ok(())
}
/// Processes an in-flow frame, generated from a line or block.
fn frame(
&mut self,
frame: Frame,
align: Axes<FixedAlignment>,
sticky: bool,
breakable: bool,
) -> FlowResult<()> {
if sticky {
// If the frame is sticky and we haven't remembered a preceding
// sticky element, make a checkpoint which we can restore should we
// end on this sticky element.
//
// The first sticky block within consecutive sticky blocks
// determines whether this group of sticky blocks has stickiness
// disabled or not.
//
// The criteria used here is: if migrating this group of sticky
// blocks together with the "attached" block can't improve the lack
// of space, since we're at the start of the region, then we don't
// do so, and stickiness is disabled (at least, for this region).
// Otherwise, migration is allowed.
//
// Note that, since the whole region is checked, this ensures sticky
// blocks at the top of a block - but not necessarily of the page -
// can still be migrated.
if self.sticky.is_none()
&& *self.stickable.get_or_insert_with(|| self.regions.may_progress())
{
self.sticky = Some(self.snapshot());
}
} else if !frame.is_empty() {
// If the frame isn't sticky, we can forget a previous snapshot. We
// interrupt a group of sticky blocks, if there was one, so we reset
// the saved stickable check for the next group of sticky blocks.
self.sticky = None;
self.stickable = None;
}
// Handle footnotes.
self.composer.footnotes(
&self.regions,
&frame,
frame.height(),
breakable,
true,
)?;
// Push an item for the frame.
self.regions.size.y -= frame.height();
self.flush_tags();
self.items.push(Item::Frame(frame, align));
Ok(())
}
/// Processes an absolutely or floatingly placed child.
fn placed(&mut self, placed: &'b PlacedChild<'a>) -> FlowResult<()> {
if placed.float {
// If the element is floatingly placed, let the composer handle it.
// It might require relayout because the area available for
// distribution shrinks. We make the spacing occupied by weak
// spacing temporarily available again because it can collapse if it
// ends up at a break due to the float.
let weak_spacing = self.weak_spacing();
self.regions.size.y += weak_spacing;
self.composer.float(
placed,
&self.regions,
self.items.iter().any(|item| matches!(item, Item::Frame(..))),
true,
)?;
self.regions.size.y -= weak_spacing;
} else {
let frame = placed.layout(self.composer.engine, self.regions.base())?;
self.composer
.footnotes(&self.regions, &frame, Abs::zero(), true, true)?;
self.flush_tags();
self.items.push(Item::Placed(frame, placed));
}
Ok(())
}
/// Processes a float flush.
fn flush(&mut self) -> FlowResult<()> {
// If there are still pending floats, finish the region instead of
// adding more content to it.
if !self.composer.work.floats.is_empty() {
return Err(Stop::Finish(false));
}
Ok(())
}
/// Processes a column break.
fn break_(&mut self, weak: bool) -> FlowResult<()> {
// If there is a region to break into, break into it.
if (!weak || !self.items.is_empty())
&& (!self.regions.backlog.is_empty() || self.regions.last.is_some())
{
self.composer.work.advance();
return Err(Stop::Finish(true));
}
Ok(())
}
/// Arranges the produced items into an output frame.
///
/// This performs alignment and resolves fractional spacing and blocks.
fn finalize(
mut self,
region: Region,
init: DistributionSnapshot<'a, 'b>,
forced: bool,
) -> FlowResult<Frame> {
if forced {
// If this is the very end of the flow, flush pending tags.
self.flush_tags();
} else if !self.items.is_empty() && self.items.iter().all(Item::migratable) {
// Restore the initial state of all items are migratable.
self.restore(init);
} else {
// If we ended on a sticky block, but are not yet at the end of
// the flow, restore the saved checkpoint to move the sticky
// suffix to the next region.
if let Some(snapshot) = self.sticky.take() {
self.restore(snapshot)
}
}
self.trim_spacing();
let mut frs = Fr::zero();
let mut used = Size::zero();
let mut has_fr_child = false;
// Determine the amount of used space and the sum of fractionals.
for item in &self.items {
match item {
Item::Abs(v, _) => used.y += *v,
Item::Fr(v, child) => {
frs += *v;
has_fr_child |= child.is_some();
}
Item::Frame(frame, _) => {
used.y += frame.height();
used.x.set_max(frame.width());
}
Item::Tag(_) | Item::Placed(..) => {}
}
}
// When we have fractional spacing, occupy the remaining space with it.
let mut fr_space = Abs::zero();
if frs.get() > 0.0 && region.size.y.is_finite() {
fr_space = region.size.y - used.y;
used.y = region.size.y;
}
// Lay out fractionally sized blocks.
let mut fr_frames = vec![];
if has_fr_child {
for item in &self.items {
let Item::Fr(v, Some(single)) = item else { continue };
let length = v.share(frs, fr_space);
let pod = Region::new(Size::new(region.size.x, length), region.expand);
let frame = single.layout(self.composer.engine, pod)?;
used.x.set_max(frame.width());
fr_frames.push(frame);
}
}
// Also consider the width of insertions for alignment.
if !region.expand.x {
used.x.set_max(self.composer.insertion_width());
}
// Determine the region's size.
let size = region.expand.select(region.size, used.min(region.size));
let free = size.y - used.y;
let mut output = Frame::soft(size);
let mut ruler = FixedAlignment::Start;
let mut offset = Abs::zero();
let mut fr_frames = fr_frames.into_iter();
// Position all items.
for item in self.items {
match item {
Item::Tag(tag) => {
let y = offset + ruler.position(free);
let pos = Point::with_y(y);
output.push(pos, FrameItem::Tag(tag.clone()));
}
Item::Abs(v, _) => {
offset += v;
}
Item::Fr(v, single) => {
let length = v.share(frs, fr_space);
if let Some(single) = single {
let frame = fr_frames.next().unwrap();
let x = single.align.x.position(size.x - frame.width());
let pos = Point::new(x, offset);
output.push_frame(pos, frame);
}
offset += length;
}
Item::Frame(frame, align) => {
ruler = ruler.max(align.y);
let x = align.x.position(size.x - frame.width());
let y = offset + ruler.position(free);
let pos = Point::new(x, y);
offset += frame.height();
output.push_frame(pos, frame);
}
Item::Placed(frame, placed) => {
let x = placed.align_x.position(size.x - frame.width());
let y = match placed.align_y.unwrap_or_default() {
Some(align) => align.position(size.y - frame.height()),
_ => offset + ruler.position(free),
};
let pos = Point::new(x, y)
+ placed.delta.zip_map(size, Rel::relative_to).to_point();
output.push_frame(pos, frame);
}
}
}
Ok(output)
}
/// Create a snapshot of the work and items.
fn snapshot(&self) -> DistributionSnapshot<'a, 'b> {
DistributionSnapshot {
work: self.composer.work.clone(),
items: self.items.len(),
}
}
/// Restore a snapshot of the work and items.
fn restore(&mut self, snapshot: DistributionSnapshot<'a, 'b>) {
*self.composer.work = snapshot.work;
self.items.truncate(snapshot.items);
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/flow/mod.rs | crates/typst-layout/src/flow/mod.rs | //! Layout of content into a [`Frame`] or [`Fragment`].
mod block;
mod collect;
mod compose;
mod distribute;
pub(crate) use self::block::unbreakable_pod;
use std::num::NonZeroUsize;
use std::rc::Rc;
use bumpalo::Bump;
use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec;
use rustc_hash::FxHashSet;
use typst_library::World;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
use typst_library::introspection::{
Introspector, Location, Locator, LocatorLink, SplitLocator, Tag,
};
use typst_library::layout::{
Abs, ColumnsElem, Dir, Em, Fragment, Frame, PageElem, PlacementScope, Region,
Regions, Rel, Size,
};
use typst_library::model::{FootnoteElem, FootnoteEntry, LineNumberingScope, ParLine};
use typst_library::pdf::ArtifactKind;
use typst_library::routines::{Arenas, FragmentKind, Pair, RealizationKind, Routines};
use typst_library::text::TextElem;
use typst_utils::{NonZeroExt, Numeric, Protected};
use self::block::{layout_multi_block, layout_single_block};
use self::collect::{
Child, LineChild, MultiChild, MultiSpill, PlacedChild, SingleChild, collect,
};
use self::compose::{Composer, compose};
use self::distribute::distribute;
/// Lays out content into a single region, producing a single frame.
pub fn layout_frame(
engine: &mut Engine,
content: &Content,
locator: Locator,
styles: StyleChain,
region: Region,
) -> SourceResult<Frame> {
layout_fragment(engine, content, locator, styles, region.into())
.map(Fragment::into_frame)
}
/// Lays out content into multiple regions.
///
/// When laying out into just one region, prefer [`layout_frame`].
pub fn layout_fragment(
engine: &mut Engine,
content: &Content,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
layout_fragment_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
content,
locator.track(),
styles,
regions,
NonZeroUsize::ONE,
Rel::zero(),
)
}
/// Layout the columns.
///
/// This is different from just laying out into column-sized regions as the
/// columns can interact due to parent-scoped placed elements.
#[typst_macros::time(span = elem.span())]
pub fn layout_columns(
elem: &Packed<ColumnsElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
layout_fragment_impl(
engine.routines,
engine.world,
engine.introspector.into_raw(),
engine.traced,
TrackedMut::reborrow_mut(&mut engine.sink),
engine.route.track(),
&elem.body,
locator.track(),
styles,
regions,
elem.count.get(styles),
elem.gutter.resolve(styles),
)
}
/// The cached, internal implementation of [`layout_fragment`].
#[comemo::memoize]
#[allow(clippy::too_many_arguments)]
fn layout_fragment_impl(
routines: &Routines,
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
traced: Tracked<Traced>,
sink: TrackedMut<Sink>,
route: Tracked<Route>,
content: &Content,
locator: Tracked<Locator>,
styles: StyleChain,
regions: Regions,
columns: NonZeroUsize,
column_gutter: Rel<Abs>,
) -> SourceResult<Fragment> {
if !regions.size.x.is_finite() && regions.expand.x {
bail!(content.span(), "cannot expand into infinite width");
}
if !regions.size.y.is_finite() && regions.expand.y {
bail!(content.span(), "cannot expand into infinite height");
}
let introspector = Protected::from_raw(introspector);
let link = LocatorLink::new(locator);
let mut locator = Locator::link(&link).split();
let mut engine = Engine {
routines,
world,
introspector,
traced,
sink,
route: Route::extend(route),
};
engine.route.check_layout_depth().at(content.span())?;
let mut kind = FragmentKind::Block;
let arenas = Arenas::default();
let children = (engine.routines.realize)(
RealizationKind::LayoutFragment { kind: &mut kind },
&mut engine,
&mut locator,
&arenas,
content,
styles,
)?;
layout_flow(
&mut engine,
&children,
&mut locator,
styles,
regions,
columns,
column_gutter,
kind.into(),
)
}
/// The mode a flow can be laid out in.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum FlowMode {
/// A root flow with block-level elements. Like `FlowMode::Block`, but can
/// additionally host footnotes and line numbers.
Root,
/// A flow whose children are block-level elements.
Block,
/// A flow whose children are inline-level elements.
Inline,
}
impl From<FragmentKind> for FlowMode {
fn from(value: FragmentKind) -> Self {
match value {
FragmentKind::Inline => Self::Inline,
FragmentKind::Block => Self::Block,
}
}
}
/// Lays out realized content into regions, potentially with columns.
#[allow(clippy::too_many_arguments)]
pub fn layout_flow<'a>(
engine: &mut Engine,
children: &[Pair<'a>],
locator: &mut SplitLocator<'a>,
shared: StyleChain<'a>,
mut regions: Regions,
columns: NonZeroUsize,
column_gutter: Rel<Abs>,
mode: FlowMode,
) -> SourceResult<Fragment> {
// Prepare configuration that is shared across the whole flow.
let config = configuration(shared, regions, columns, column_gutter, mode);
// Collect the elements into pre-processed children. These are much easier
// to handle than the raw elements.
let bump = Bump::new();
let children = collect(
engine,
&bump,
children,
locator.next(&()),
Size::new(config.columns.width, regions.full),
regions.expand.x,
mode,
)?;
let mut work = Work::new(&children);
let mut finished = vec![];
// This loop runs once per region produced by the flow layout.
loop {
let frame = compose(engine, &mut work, &config, locator.next(&()), regions)?;
finished.push(frame);
// Terminate the loop when everything is processed, though draining the
// backlog if necessary.
if work.done() && (!regions.expand.y || regions.backlog.is_empty()) {
break;
}
regions.next();
}
Ok(Fragment::frames(finished))
}
/// Determine the flow's configuration.
fn configuration<'x>(
shared: StyleChain<'x>,
regions: Regions,
columns: NonZeroUsize,
column_gutter: Rel<Abs>,
mode: FlowMode,
) -> Config<'x> {
Config {
mode,
shared,
columns: {
let mut count = columns.get();
if !regions.size.x.is_finite() {
count = 1;
}
let gutter = column_gutter.relative_to(regions.base().x);
let width = (regions.size.x - gutter * (count - 1) as f64) / count as f64;
let dir = shared.resolve(TextElem::dir);
ColumnConfig { count, width, gutter, dir }
},
footnote: FootnoteConfig {
separator: shared
.get_cloned(FootnoteEntry::separator)
.artifact(ArtifactKind::Other),
clearance: shared.resolve(FootnoteEntry::clearance),
gap: shared.resolve(FootnoteEntry::gap),
expand: regions.expand.x,
},
line_numbers: (mode == FlowMode::Root).then(|| LineNumberConfig {
scope: shared.get(ParLine::numbering_scope),
default_clearance: {
let width = if shared.get(PageElem::flipped) {
shared.resolve(PageElem::height)
} else {
shared.resolve(PageElem::width)
};
// Clamp below is safe (min <= max): if the font size is
// negative, we set min = max = 0; otherwise,
// `0.75 * size <= 2.5 * size` for zero and positive sizes.
(0.026 * width.unwrap_or_default()).clamp(
Em::new(0.75).resolve(shared).max(Abs::zero()),
Em::new(2.5).resolve(shared).max(Abs::zero()),
)
},
}),
}
}
/// The work that is left to do by flow layout.
///
/// The lifetimes 'a and 'b are used across flow layout:
/// - 'a is that of the content coming out of realization
/// - 'b is that of the collected/prepared children
#[derive(Clone)]
struct Work<'a, 'b> {
/// Children that we haven't processed yet. This slice shrinks over time.
children: &'b [Child<'a>],
/// Leftovers from a breakable block.
spill: Option<MultiSpill<'a, 'b>>,
/// Queued floats that didn't fit in previous regions.
floats: EcoVec<&'b PlacedChild<'a>>,
/// Queued footnotes that didn't fit in previous regions.
footnotes: EcoVec<Packed<FootnoteElem>>,
/// Spilled frames of a footnote that didn't fully fit. Similar to `spill`.
footnote_spill: Option<std::vec::IntoIter<Frame>>,
/// Queued tags that will be attached to the next frame.
tags: EcoVec<&'a Tag>,
/// Identifies floats and footnotes that can be skipped if visited because
/// they were already handled and incorporated as column or page level
/// insertions.
skips: Rc<FxHashSet<Location>>,
}
impl<'a, 'b> Work<'a, 'b> {
/// Create the initial work state from a list of children.
fn new(children: &'b [Child<'a>]) -> Self {
Self {
children,
spill: None,
floats: EcoVec::new(),
footnotes: EcoVec::new(),
footnote_spill: None,
tags: EcoVec::new(),
skips: Rc::new(FxHashSet::default()),
}
}
/// Get the first unprocessed child, from the start of the slice.
fn head(&self) -> Option<&'b Child<'a>> {
self.children.first()
}
/// Mark the `head()` child as processed, advancing the slice by one.
fn advance(&mut self) {
self.children = &self.children[1..];
}
/// Whether all work is done. This means we can terminate flow layout.
fn done(&self) -> bool {
self.children.is_empty()
&& self.spill.is_none()
&& self.floats.is_empty()
&& self.footnote_spill.is_none()
&& self.footnotes.is_empty()
}
/// Add skipped floats and footnotes from the insertion areas to the skip
/// set.
fn extend_skips(&mut self, skips: &[Location]) {
if !skips.is_empty() {
Rc::make_mut(&mut self.skips).extend(skips.iter().copied());
}
}
}
/// Shared configuration for the whole flow.
struct Config<'x> {
/// Whether this is the root flow, which can host footnotes and line
/// numbers.
mode: FlowMode,
/// The styles shared by the whole flow. This is used for footnotes and line
/// numbers.
shared: StyleChain<'x>,
/// Settings for columns.
columns: ColumnConfig,
/// Settings for footnotes.
footnote: FootnoteConfig,
/// Settings for line numbers.
line_numbers: Option<LineNumberConfig>,
}
/// Configuration of footnotes.
struct FootnoteConfig {
/// The separator between flow content and footnotes. Typically a line.
separator: Content,
/// The amount of space left above the separator.
clearance: Abs,
/// The gap between footnote entries.
gap: Abs,
/// Whether horizontal expansion is enabled for footnotes.
expand: bool,
}
/// Configuration of columns.
struct ColumnConfig {
/// The number of columns.
count: usize,
/// The width of each column.
width: Abs,
/// The amount of space between columns.
gutter: Abs,
/// The horizontal direction in which columns progress. Defined by
/// `text.dir`.
dir: Dir,
}
/// Configuration of line numbers.
struct LineNumberConfig {
/// Where line numbers are reset.
scope: LineNumberingScope,
/// The default clearance for `auto`.
///
/// This value should be relative to the page's width, such that the
/// clearance between line numbers and text is small when the page is,
/// itself, small. However, that could cause the clearance to be too small
/// or too large when considering the current text size; in particular, a
/// larger text size would require more clearance to be able to tell line
/// numbers apart from text, whereas a smaller text size requires less
/// clearance so they aren't way too far apart. Therefore, the default
/// value is a percentage of the page width clamped between `0.75em` and
/// `2.5em`.
default_clearance: Abs,
}
/// The result type for flow layout.
///
/// The `Err(_)` variant incorporate control flow events for finishing and
/// relayouting regions.
type FlowResult<T> = Result<T, Stop>;
/// A control flow event during flow layout.
enum Stop {
/// Indicates that the current subregion should be finished. Can be caused
/// by a lack of space (`false`) or an explicit column break (`true`).
Finish(bool),
/// Indicates that the given scope should be relayouted.
Relayout(PlacementScope),
/// A fatal error.
Error(EcoVec<SourceDiagnostic>),
}
impl From<EcoVec<SourceDiagnostic>> for Stop {
fn from(error: EcoVec<SourceDiagnostic>) -> Self {
Stop::Error(error)
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/underover.rs | crates/typst-layout/src/math/underover.rs | use typst_library::diag::SourceResult;
use typst_library::foundations::{Content, Packed, StyleChain};
use typst_library::layout::{Abs, Em, Frame, FrameItem, Point, Size};
use typst_library::math::{
Accent, OverbraceElem, OverbracketElem, OverlineElem, OverparenElem, OvershellElem,
UnderbraceElem, UnderbracketElem, UnderlineElem, UnderparenElem, UndershellElem,
};
use typst_library::text::TextElem;
use typst_library::visualize::{FixedStroke, Geometry};
use typst_syntax::Span;
use super::accent::place_accent;
use super::attach::layout_attachments;
use super::{
FrameFragment, MathContext, style_cramped, style_for_subscript, style_for_superscript,
};
/// A marker to distinguish under- and overlines.
enum Position {
Under,
Over,
}
/// Lays out an [`UnderlineElem`].
#[typst_macros::time(name = "math.underline", span = elem.span())]
pub fn layout_underline(
elem: &Packed<UnderlineElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverline(ctx, styles, &elem.body, elem.span(), Position::Under)
}
/// Lays out an [`OverlineElem`].
#[typst_macros::time(name = "math.overline", span = elem.span())]
pub fn layout_overline(
elem: &Packed<OverlineElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverline(ctx, styles, &elem.body, elem.span(), Position::Over)
}
/// Lays out an [`UnderbraceElem`].
#[typst_macros::time(name = "math.underbrace", span = elem.span())]
pub fn layout_underbrace(
elem: &Packed<UnderbraceElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏟',
Position::Under,
elem.span(),
)
}
/// Lays out an [`OverbraceElem`].
#[typst_macros::time(name = "math.overbrace", span = elem.span())]
pub fn layout_overbrace(
elem: &Packed<OverbraceElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏞',
Position::Over,
elem.span(),
)
}
/// Lays out an [`UnderbracketElem`].
#[typst_macros::time(name = "math.underbracket", span = elem.span())]
pub fn layout_underbracket(
elem: &Packed<UnderbracketElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⎵',
Position::Under,
elem.span(),
)
}
/// Lays out an [`OverbracketElem`].
#[typst_macros::time(name = "math.overbracket", span = elem.span())]
pub fn layout_overbracket(
elem: &Packed<OverbracketElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⎴',
Position::Over,
elem.span(),
)
}
/// Lays out an [`UnderparenElem`].
#[typst_macros::time(name = "math.underparen", span = elem.span())]
pub fn layout_underparen(
elem: &Packed<UnderparenElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏝',
Position::Under,
elem.span(),
)
}
/// Lays out an [`OverparenElem`].
#[typst_macros::time(name = "math.overparen", span = elem.span())]
pub fn layout_overparen(
elem: &Packed<OverparenElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏜',
Position::Over,
elem.span(),
)
}
/// Lays out an [`UndershellElem`].
#[typst_macros::time(name = "math.undershell", span = elem.span())]
pub fn layout_undershell(
elem: &Packed<UndershellElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏡',
Position::Under,
elem.span(),
)
}
/// Lays out an [`OvershellElem`].
#[typst_macros::time(name = "math.overshell", span = elem.span())]
pub fn layout_overshell(
elem: &Packed<OvershellElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_underoverspreader(
ctx,
styles,
&elem.body,
elem.annotation.get_ref(styles),
'⏠',
Position::Over,
elem.span(),
)
}
/// layout under- or overlined content.
fn layout_underoverline(
ctx: &mut MathContext,
styles: StyleChain,
body: &Content,
span: Span,
position: Position,
) -> SourceResult<()> {
let (extra_height, content, line_pos, content_pos, baseline, bar_height, line_adjust);
match position {
Position::Under => {
content = ctx.layout_into_fragment(body, styles)?;
let (font, size) = content.font(ctx, styles);
let sep = font.math().underbar_extra_descender.at(size);
bar_height = font.math().underbar_rule_thickness.at(size);
let gap = font.math().underbar_vertical_gap.at(size);
extra_height = sep + bar_height + gap;
line_pos = Point::with_y(content.height() + gap + bar_height / 2.0);
content_pos = Point::zero();
baseline = content.ascent();
line_adjust = -content.italics_correction();
}
Position::Over => {
let cramped = style_cramped();
let styles = styles.chain(&cramped);
content = ctx.layout_into_fragment(body, styles)?;
let (font, size) = content.font(ctx, styles);
let sep = font.math().overbar_extra_ascender.at(size);
bar_height = font.math().overbar_rule_thickness.at(size);
let gap = font.math().overbar_vertical_gap.at(size);
extra_height = sep + bar_height + gap;
line_pos = Point::with_y(sep + bar_height / 2.0);
content_pos = Point::with_y(extra_height);
baseline = content.ascent() + extra_height;
line_adjust = Abs::zero();
}
}
let width = content.width();
let height = content.height() + extra_height;
let size = Size::new(width, height);
let line_width = width + line_adjust;
let content_class = content.class();
let content_is_text_like = content.is_text_like();
let content_italics_correction = content.italics_correction();
let mut frame = Frame::soft(size);
frame.set_baseline(baseline);
frame.push_frame(content_pos, content.into_frame());
frame.push(
line_pos,
FrameItem::Shape(
Geometry::Line(Point::with_x(line_width)).stroked(FixedStroke {
paint: styles.get_ref(TextElem::fill).as_decoration(),
thickness: bar_height,
..FixedStroke::default()
}),
span,
),
);
ctx.push(
FrameFragment::new(styles, frame)
.with_class(content_class)
.with_text_like(content_is_text_like)
.with_italics_correction(content_italics_correction),
);
Ok(())
}
/// Layout an over- or underbrace-like object.
fn layout_underoverspreader(
ctx: &mut MathContext,
styles: StyleChain,
body: &Content,
annotation: &Option<Content>,
c: char,
position: Position,
span: Span,
) -> SourceResult<()> {
let body = ctx.layout_into_fragment(body, styles)?;
let body_class = body.class();
let accent = Accent(c);
let width = body.width().into();
let frame =
place_accent(ctx, body, styles, accent, styles, width, Em::zero(), false, span)?;
let base = FrameFragment::new(styles, frame).with_class(body_class);
let Some(annotation) = annotation else {
ctx.push(base);
return Ok(());
};
let fragments = match position {
Position::Under => {
let under_style = style_for_subscript(styles);
let annotation_styles = styles.chain(&under_style);
let b = ctx.layout_into_fragment(annotation, annotation_styles)?;
[None, None, None, None, Some(b), None]
}
Position::Over => {
let over_style = style_for_superscript(styles);
let annotation_styles = styles.chain(&over_style);
let t = ctx.layout_into_fragment(annotation, annotation_styles)?;
[None, Some(t), None, None, None, None]
}
};
layout_attachments(ctx, styles, base.into(), fragments)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/frac.rs | crates/typst-layout/src/math/frac.rs | use typst_library::diag::SourceResult;
use typst_library::foundations::{
Content, NativeElement, Packed, Resolve, StyleChain, SymbolElem,
};
use typst_library::layout::{Abs, Em, Frame, FrameItem, Point, Size};
use typst_library::math::{
BinomElem, EquationElem, FracElem, FracStyle, LrElem, MathSize,
};
use typst_library::text::TextElem;
use typst_library::visualize::{FixedStroke, Geometry};
use typst_syntax::Span;
use super::{
DELIM_SHORT_FALL, FrameFragment, MathContext, style_for_denominator,
style_for_numerator,
};
const FRAC_AROUND: Em = Em::new(0.1);
/// Lays out a [`FracElem`].
#[typst_macros::time(name = "math.frac", span = elem.span())]
pub fn layout_frac(
elem: &Packed<FracElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
match elem.style.get(styles) {
FracStyle::Skewed => {
layout_skewed_frac(ctx, styles, &elem.num, &elem.denom, elem.span())
}
FracStyle::Horizontal => layout_horizontal_frac(
ctx,
styles,
&elem.num,
&elem.denom,
elem.span(),
elem.num_deparenthesized.get(styles),
elem.denom_deparenthesized.get(styles),
),
FracStyle::Vertical => layout_vertical_frac_like(
ctx,
styles,
&elem.num,
std::slice::from_ref(&elem.denom),
false,
elem.span(),
),
}
}
/// Lays out a [`BinomElem`].
#[typst_macros::time(name = "math.binom", span = elem.span())]
pub fn layout_binom(
elem: &Packed<BinomElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
layout_vertical_frac_like(ctx, styles, &elem.upper, &elem.lower, true, elem.span())
}
/// Layout a vertical fraction or binomial.
fn layout_vertical_frac_like(
ctx: &mut MathContext,
styles: StyleChain,
num: &Content,
denom: &[Content],
binom: bool,
span: Span,
) -> SourceResult<()> {
let constants = ctx.font().math();
let axis = constants.axis_height.resolve(styles);
let thickness = constants.fraction_rule_thickness.resolve(styles);
let size = styles.get(EquationElem::size);
let shift_up = match size {
MathSize::Display => constants.fraction_numerator_display_style_shift_up,
_ => constants.fraction_numerator_shift_up,
}
.resolve(styles);
let shift_down = match size {
MathSize::Display => constants.fraction_denominator_display_style_shift_down,
_ => constants.fraction_denominator_shift_down,
}
.resolve(styles);
let num_min = match size {
MathSize::Display => constants.fraction_num_display_style_gap_min,
_ => constants.fraction_numerator_gap_min,
}
.resolve(styles);
let denom_min = match size {
MathSize::Display => constants.fraction_denom_display_style_gap_min,
_ => constants.fraction_denominator_gap_min,
}
.resolve(styles);
let num_style = style_for_numerator(styles);
let num = ctx.layout_into_frame(num, styles.chain(&num_style))?;
let denom_style = style_for_denominator(styles);
let denom = ctx.layout_into_frame(
&Content::sequence(
// Add a comma between each element.
denom
.iter()
.flat_map(|a| [SymbolElem::packed(',').spanned(span), a.clone()])
.skip(1),
),
styles.chain(&denom_style),
)?;
let around = FRAC_AROUND.resolve(styles);
let num_gap = (shift_up - (axis + thickness / 2.0) - num.descent()).max(num_min);
let denom_gap =
(shift_down + (axis - thickness / 2.0) - denom.ascent()).max(denom_min);
let line_width = num.width().max(denom.width());
let width = line_width + 2.0 * around;
let height = num.height() + num_gap + thickness + denom_gap + denom.height();
let size = Size::new(width, height);
let num_pos = Point::with_x((width - num.width()) / 2.0);
let line_pos =
Point::new((width - line_width) / 2.0, num.height() + num_gap + thickness / 2.0);
let denom_pos = Point::new((width - denom.width()) / 2.0, height - denom.height());
let baseline = line_pos.y + axis;
let mut frame = Frame::soft(size);
frame.set_baseline(baseline);
frame.push_frame(num_pos, num);
frame.push_frame(denom_pos, denom);
if binom {
let short_fall = DELIM_SHORT_FALL.resolve(styles);
let mut left =
ctx.layout_into_fragment(&SymbolElem::packed('(').spanned(span), styles)?;
left.stretch_vertical(ctx, height, short_fall);
left.center_on_axis();
ctx.push(left);
ctx.push(FrameFragment::new(styles, frame));
let mut right =
ctx.layout_into_fragment(&SymbolElem::packed(')').spanned(span), styles)?;
right.stretch_vertical(ctx, height, short_fall);
right.center_on_axis();
ctx.push(right);
} else {
frame.push(
line_pos,
FrameItem::Shape(
Geometry::Line(Point::with_x(line_width)).stroked(
FixedStroke::from_pair(
styles.get_ref(TextElem::fill).as_decoration(),
thickness,
),
),
span,
),
);
ctx.push(FrameFragment::new(styles, frame));
}
Ok(())
}
// Lays out a horizontal fraction
fn layout_horizontal_frac(
ctx: &mut MathContext,
styles: StyleChain,
num: &Content,
denom: &Content,
span: Span,
num_deparen: bool,
denom_deparen: bool,
) -> SourceResult<()> {
let num = if num_deparen {
&LrElem::new(Content::sequence(vec![
SymbolElem::packed('('),
num.clone(),
SymbolElem::packed(')'),
]))
.pack()
} else {
num
};
let num_frame = ctx.layout_into_fragment(num, styles)?;
ctx.push(num_frame);
let mut slash =
ctx.layout_into_fragment(&SymbolElem::packed('/').spanned(span), styles)?;
slash.center_on_axis();
ctx.push(slash);
let denom = if denom_deparen {
&LrElem::new(Content::sequence(vec![
SymbolElem::packed('('),
denom.clone(),
SymbolElem::packed(')'),
]))
.pack()
} else {
denom
};
let denom_frame = ctx.layout_into_fragment(denom, styles)?;
ctx.push(denom_frame);
Ok(())
}
/// Lay out a skewed fraction.
fn layout_skewed_frac(
ctx: &mut MathContext,
styles: StyleChain,
num: &Content,
denom: &Content,
span: Span,
) -> SourceResult<()> {
// Font-derived constants
let constants = ctx.font().math();
let vgap = constants.skewed_fraction_vertical_gap.resolve(styles);
let hgap = constants.skewed_fraction_horizontal_gap.resolve(styles);
let axis = constants.axis_height.resolve(styles);
let num_style = style_for_numerator(styles);
let num_frame = ctx.layout_into_frame(num, styles.chain(&num_style))?;
let num_size = num_frame.size();
let denom_style = style_for_denominator(styles);
let denom_frame = ctx.layout_into_frame(denom, styles.chain(&denom_style))?;
let denom_size = denom_frame.size();
let short_fall = DELIM_SHORT_FALL.resolve(styles);
// Height of the fraction frame
// We recalculate this value below if the slash glyph overflows
let mut fraction_height = num_size.y + denom_size.y + vgap;
// Build the slash glyph to calculate its size
let mut slash_frag =
ctx.layout_into_fragment(&SymbolElem::packed('\u{2044}').spanned(span), styles)?;
slash_frag.stretch_vertical(ctx, fraction_height, short_fall);
slash_frag.center_on_axis();
let slash_frame = slash_frag.into_frame();
// Adjust the fraction height if the slash overflows
let slash_size = slash_frame.size();
let vertical_offset = Abs::zero().max(slash_size.y - fraction_height) / 2.0;
fraction_height.set_max(slash_size.y);
// Reference points for all three objects, used to place them in the frame.
let mut slash_up_left = Point::new(num_size.x + hgap / 2.0, fraction_height / 2.0)
- slash_size.to_point() / 2.0;
let mut num_up_left = Point::with_y(vertical_offset);
let mut denom_up_left = num_up_left + num_size.to_point() + Point::new(hgap, vgap);
// Fraction width
let fraction_width = (denom_up_left.x + denom_size.x)
.max(slash_up_left.x + slash_size.x)
+ Abs::zero().max(-slash_up_left.x);
// We have to shift everything right to avoid going in the negatives for
// the x coordinate
let horizontal_offset = Point::with_x(Abs::zero().max(-slash_up_left.x));
slash_up_left += horizontal_offset;
num_up_left += horizontal_offset;
denom_up_left += horizontal_offset;
// Build the final frame
let mut fraction_frame = Frame::soft(Size::new(fraction_width, fraction_height));
// Baseline (use axis height to center slash on the axis)
fraction_frame.set_baseline(fraction_height / 2.0 + axis);
// Numerator, Denominator, Slash
fraction_frame.push_frame(num_up_left, num_frame);
fraction_frame.push_frame(denom_up_left, denom_frame);
fraction_frame.push_frame(slash_up_left, slash_frame);
ctx.push(FrameFragment::new(styles, fraction_frame));
Ok(())
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/shaping.rs | crates/typst-layout/src/math/shaping.rs | use std::ops::{Deref, DerefMut};
use az::SaturatingAs;
use comemo::Tracked;
use ecow::EcoString;
use rustybuzz::{BufferFlags, UnicodeBuffer};
use typst_library::World;
use typst_library::layout::{Abs, Em};
use typst_library::text::{Font, FontFamily, FontVariant, Glyph, Lang, Region, TextItem};
use typst_library::visualize::{FixedStroke, Paint};
use typst_syntax::Span;
use crate::inline::{SharedShapingContext, create_shape_plan, get_font_and_covers};
/// A text item in Math.
///
/// This type is almost identical to
/// [`TextItem`](typst_library::text::TextItem), the difference being the
/// representation of the glyphs. See [`Glyphs`] for more info.
#[derive(Clone)]
pub struct ShapedText {
/// The text that was shaped.
pub text: EcoString,
/// The text's font.
pub font: Font,
/// The text's size
pub size: Abs,
/// Glyph color.
pub fill: Paint,
/// Glyph stroke.
pub stroke: Option<FixedStroke>,
/// The natural language of the text.
pub lang: Lang,
/// The region of the text.
pub region: Option<Region>,
/// The text's span.
pub span: Span,
/// The shaped glyphs.
pub glyphs: Glyphs,
}
impl ShapedText {
/// The width of the text run.
pub fn width(&self) -> Abs {
self.glyphs.iter().map(|g| g.x_advance).sum::<Em>().at(self.size)
}
/// The id of the first glyph in the original text.
pub fn original_id(&self) -> u16 {
self.glyphs.original()[0].id
}
}
impl From<ShapedText> for TextItem {
fn from(item: ShapedText) -> Self {
TextItem {
font: item.font,
size: item.size,
fill: item.fill,
stroke: item.stroke,
lang: item.lang,
region: item.region,
text: item.text.clone(),
glyphs: item
.glyphs
.iter()
.map(|g| Glyph {
id: g.id,
x_advance: g.x_advance,
x_offset: g.x_offset,
y_advance: g.y_advance,
y_offset: g.y_offset,
range: 0..item.text.len().saturating_as(),
span: (item.span, 0),
})
.collect(),
}
}
}
/// A collection of glyphs that stores the original set of glyphs when created.
#[derive(Clone)]
pub struct Glyphs {
original: Vec<ShapedGlyph>,
updated: Option<Vec<ShapedGlyph>>,
}
impl Deref for Glyphs {
type Target = Vec<ShapedGlyph>;
fn deref(&self) -> &Self::Target {
self.updated.as_ref().unwrap_or(&self.original)
}
}
impl DerefMut for Glyphs {
fn deref_mut(&mut self) -> &mut Self::Target {
self.updated.as_mut().unwrap_or(self.original.as_mut())
}
}
impl Glyphs {
/// Create a new set of glyphs.
pub fn new(initial: Vec<ShapedGlyph>) -> Self {
Self { original: initial, updated: None }
}
/// Update the glyphs with the given value.
pub fn update(&mut self, new_value: Vec<ShapedGlyph>) {
self.updated = Some(new_value);
}
/// Reset the glyphs back their originals.
pub fn reset(&mut self) {
self.updated = None;
}
fn original(&self) -> &[ShapedGlyph] {
&self.original
}
}
/// A single glyph resulting from shaping.
#[derive(Clone)]
pub struct ShapedGlyph {
/// The glyph's index in the font.
pub id: u16,
/// The advance width of the glyph.
pub x_advance: Em,
/// The horizontal offset of the glyph.
pub x_offset: Em,
/// The advance height of the glyph.
pub y_advance: Em,
/// The vertical offset of the glyph.
pub y_offset: Em,
}
/// Shape some text in math.
#[comemo::memoize]
pub fn shape(
world: Tracked<dyn World + '_>,
variant: FontVariant,
features: Vec<rustybuzz::Feature>,
language: rustybuzz::Language,
fallback: bool,
text: &str,
families: Vec<&FontFamily>,
) -> Option<(Font, Vec<ShapedGlyph>)> {
let mut ctx = ShapingContext {
world,
used: vec![],
variant,
features,
language,
fallback,
glyphs: vec![],
font: None,
};
shape_impl(&mut ctx, text, families.into_iter());
Some((ctx.font?, ctx.glyphs))
}
/// Holds shaping results and metadata for shaping some text.
struct ShapingContext<'a> {
world: Tracked<'a, dyn World + 'a>,
used: Vec<Font>,
variant: FontVariant,
features: Vec<rustybuzz::Feature>,
language: rustybuzz::Language,
fallback: bool,
glyphs: Vec<ShapedGlyph>,
font: Option<Font>,
}
impl<'a> SharedShapingContext<'a> for ShapingContext<'a> {
fn world(&self) -> Tracked<'a, dyn World + 'a> {
self.world
}
fn used(&mut self) -> &mut Vec<Font> {
&mut self.used
}
fn first(&self) -> Option<&Font> {
self.used.first()
}
fn variant(&self) -> FontVariant {
self.variant
}
fn fallback(&self) -> bool {
self.fallback
}
}
/// Shape text with font fallback using the `families` iterator.
fn shape_impl<'a>(
ctx: &mut ShapingContext<'a>,
text: &str,
mut families: impl Iterator<Item = &'a FontFamily> + Clone,
) {
let Some((font, covers)) =
get_font_and_covers(ctx, text, families.by_ref(), |ctx, text, font| {
let add_glyph = |_| {
ctx.glyphs.push(ShapedGlyph {
id: 0,
x_advance: font.x_advance(0).unwrap_or_default(),
x_offset: Em::zero(),
y_advance: Em::zero(),
y_offset: Em::zero(),
})
};
text.chars().for_each(add_glyph);
ctx.font = Some(font);
})
else {
return;
};
let mut buffer = UnicodeBuffer::new();
buffer.push_str(text);
buffer.set_language(ctx.language.clone());
// TODO: Use `rustybuzz::script::MATH` once
// https://github.com/harfbuzz/rustybuzz/pull/165 is released.
buffer.set_script(
rustybuzz::Script::from_iso15924_tag(ttf_parser::Tag::from_bytes(b"math"))
.unwrap(),
);
buffer.set_direction(rustybuzz::Direction::LeftToRight);
buffer.set_flags(BufferFlags::REMOVE_DEFAULT_IGNORABLES);
let plan = create_shape_plan(
&font,
buffer.direction(),
buffer.script(),
buffer.language().as_ref(),
&ctx.features,
);
let buffer = rustybuzz::shape_with_plan(font.rusty(), &plan, buffer);
// Because we will only ever shape single grapheme clusters, we will
// (incorrectly) assume that the output from the shaper is a single cluster
// that spans the entire range of the given text. The only problem this
// could cause is the ranges for glyphs being incorrect in the final
// `TextItem`, which could then affect text extraction in PDF export.
if buffer.glyph_infos().iter().any(|i| i.glyph_id == 0)
|| !covers.is_none_or(|cov| cov.is_match(text))
{
shape_impl(ctx, text, families);
} else {
for i in 0..buffer.len() {
let info = buffer.glyph_infos()[i];
let pos = buffer.glyph_positions()[i];
ctx.glyphs.push(ShapedGlyph {
id: info.glyph_id as u16,
x_advance: font.to_em(pos.x_advance),
x_offset: font.to_em(pos.x_offset),
y_advance: font.to_em(pos.y_advance),
y_offset: font.to_em(pos.y_offset),
});
}
if !buffer.is_empty() {
ctx.font = Some(font);
}
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/stretch.rs | crates/typst-layout/src/math/stretch.rs | use typst_library::diag::{SourceResult, warning};
use typst_library::foundations::{Packed, StyleChain};
use typst_library::layout::{Abs, Axis, Rel};
use typst_library::math::StretchElem;
use typst_utils::Get;
use super::{MathContext, MathFragment, stretch_axes};
/// Lays out a [`StretchElem`].
#[typst_macros::time(name = "math.stretch", span = elem.span())]
pub fn layout_stretch(
elem: &Packed<StretchElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
stretch_fragment(
ctx,
&mut fragment,
None,
None,
elem.size.resolve(styles),
Abs::zero(),
);
ctx.push(fragment);
Ok(())
}
/// Attempts to stretch the given fragment by/to the amount given in stretch.
pub fn stretch_fragment(
ctx: &mut MathContext,
fragment: &mut MathFragment,
axis: Option<Axis>,
relative_to: Option<Abs>,
stretch: Rel<Abs>,
short_fall: Abs,
) {
let size = fragment.size();
let MathFragment::Glyph(glyph) = fragment else { return };
// Return if we attempt to stretch along an axis which isn't stretchable,
// so that the original fragment isn't modified.
let axes = stretch_axes(&glyph.item.font, glyph.item.original_id());
let stretch_axis = if let Some(axis) = axis {
if !axes.get(axis) {
return;
}
axis
} else {
match (axes.x, axes.y) {
(true, false) => Axis::X,
(false, true) => Axis::Y,
(false, false) => return,
(true, true) => {
// As far as we know, there aren't any glyphs that have both
// vertical and horizontal constructions. So for the time being, we
// will assume that a glyph cannot have both.
ctx.engine.sink.warn(warning!(
glyph.item.span,
"glyph has both vertical and horizontal constructions";
hint: "this is probably a font bug";
hint: "please file an issue at https://github.com/typst/typst/issues";
));
return;
}
}
};
let relative_to_size = relative_to.unwrap_or_else(|| size.get(stretch_axis));
glyph.stretch(ctx, stretch.relative_to(relative_to_size), short_fall, stretch_axis);
if stretch_axis == Axis::Y {
glyph.center_on_axis();
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/fragment.rs | crates/typst-layout/src/math/fragment.rs | use std::fmt::{self, Debug, Formatter};
use comemo::Tracked;
use ttf_parser::GlyphId;
use ttf_parser::math::{GlyphAssembly, GlyphConstruction, GlyphPart};
use typst_library::World;
use typst_library::diag::warning;
use typst_library::foundations::StyleChain;
use typst_library::introspection::Tag;
use typst_library::layout::{
Abs, Axes, Axis, Corner, Em, Frame, FrameItem, Point, Size, VAlignment,
};
use typst_library::math::{EquationElem, MathSize};
use typst_library::text::{Font, TextElem, features, language, variant};
use typst_library::visualize::Paint;
use typst_syntax::Span;
use typst_utils::{Get, default_math_class};
use unicode_math_class::MathClass;
use unicode_segmentation::UnicodeSegmentation;
use super::{MathContext, families};
use crate::math::shaping::{Glyphs, ShapedGlyph, ShapedText, shape};
use crate::modifiers::{FrameModifiers, FrameModify};
/// Maximum number of times extenders can be repeated.
const MAX_REPEATS: usize = 1024;
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
pub enum MathFragment {
Glyph(GlyphFragment),
Frame(FrameFragment),
Spacing(Abs, bool),
Space(Abs),
Linebreak,
Align,
Tag(Tag),
}
impl MathFragment {
pub fn size(&self) -> Size {
match self {
Self::Glyph(glyph) => glyph.size,
Self::Frame(fragment) => fragment.frame.size(),
Self::Spacing(amount, _) => Size::with_x(*amount),
Self::Space(amount) => Size::with_x(*amount),
_ => Size::zero(),
}
}
pub fn width(&self) -> Abs {
match self {
Self::Glyph(glyph) => glyph.size.x,
Self::Frame(fragment) => fragment.frame.width(),
Self::Spacing(amount, _) => *amount,
Self::Space(amount) => *amount,
_ => Abs::zero(),
}
}
pub fn height(&self) -> Abs {
match self {
Self::Glyph(glyph) => glyph.size.y,
Self::Frame(fragment) => fragment.frame.height(),
_ => Abs::zero(),
}
}
pub fn ascent(&self) -> Abs {
match self {
Self::Glyph(glyph) => glyph.ascent(),
Self::Frame(fragment) => fragment.frame.ascent(),
_ => Abs::zero(),
}
}
pub fn descent(&self) -> Abs {
match self {
Self::Glyph(glyph) => glyph.descent(),
Self::Frame(fragment) => fragment.frame.descent(),
_ => Abs::zero(),
}
}
pub fn is_ignorant(&self) -> bool {
match self {
Self::Frame(fragment) => fragment.ignorant,
Self::Tag(_) => true,
_ => false,
}
}
pub fn class(&self) -> MathClass {
match self {
Self::Glyph(glyph) => glyph.class,
Self::Frame(fragment) => fragment.class,
Self::Spacing(_, _) => MathClass::Space,
Self::Space(_) => MathClass::Space,
Self::Linebreak => MathClass::Space,
Self::Align => MathClass::Special,
Self::Tag(_) => MathClass::Special,
}
}
pub fn math_size(&self) -> Option<MathSize> {
match self {
Self::Glyph(glyph) => Some(glyph.math_size),
Self::Frame(fragment) => Some(fragment.math_size),
_ => None,
}
}
#[inline]
pub fn font(&self, ctx: &MathContext, styles: StyleChain) -> (Font, Abs) {
(
match self {
Self::Glyph(glyph) => glyph.item.font.clone(),
_ => ctx.font().clone(),
},
self.font_size().unwrap_or_else(|| styles.resolve(TextElem::size)),
)
}
pub fn font_size(&self) -> Option<Abs> {
match self {
Self::Glyph(glyph) => Some(glyph.item.size),
Self::Frame(fragment) => Some(fragment.font_size),
_ => None,
}
}
pub fn set_class(&mut self, class: MathClass) {
match self {
Self::Glyph(glyph) => glyph.class = class,
Self::Frame(fragment) => fragment.class = class,
_ => {}
}
}
pub fn set_limits(&mut self, limits: Limits) {
match self {
Self::Glyph(glyph) => glyph.limits = limits,
Self::Frame(fragment) => fragment.limits = limits,
_ => {}
}
}
pub fn is_spaced(&self) -> bool {
if self.class() == MathClass::Fence {
return true;
}
matches!(
self,
MathFragment::Frame(FrameFragment {
spaced: true,
class: MathClass::Normal | MathClass::Alphabetic,
..
})
)
}
pub fn is_text_like(&self) -> bool {
match self {
Self::Glyph(glyph) => !glyph.extended_shape,
MathFragment::Frame(frame) => frame.text_like,
_ => false,
}
}
pub fn italics_correction(&self) -> Abs {
match self {
Self::Glyph(glyph) => glyph.italics_correction,
Self::Frame(fragment) => fragment.italics_correction,
_ => Abs::zero(),
}
}
pub fn accent_attach(&self) -> (Abs, Abs) {
match self {
Self::Glyph(glyph) => glyph.accent_attach,
Self::Frame(fragment) => fragment.accent_attach,
_ => (self.width() / 2.0, self.width() / 2.0),
}
}
pub fn into_frame(self) -> Frame {
match self {
Self::Glyph(glyph) => glyph.into_frame(),
Self::Frame(fragment) => fragment.frame,
Self::Tag(tag) => {
let mut frame = Frame::soft(Size::zero());
frame.push(Point::zero(), FrameItem::Tag(tag));
frame
}
_ => Frame::soft(self.size()),
}
}
pub fn limits(&self) -> Limits {
match self {
MathFragment::Glyph(glyph) => glyph.limits,
MathFragment::Frame(fragment) => fragment.limits,
_ => Limits::Never,
}
}
pub fn fill(&self) -> Option<Paint> {
match self {
Self::Glyph(glyph) => Some(glyph.item.fill.clone()),
_ => None,
}
}
pub fn stretch_vertical(
&mut self,
ctx: &mut MathContext,
height: Abs,
short_fall: Abs,
) {
if let Self::Glyph(glyph) = self {
glyph.stretch_vertical(ctx, height, short_fall)
}
}
pub fn stretch_horizontal(
&mut self,
ctx: &mut MathContext,
width: Abs,
short_fall: Abs,
) {
if let Self::Glyph(glyph) = self {
glyph.stretch_horizontal(ctx, width, short_fall)
}
}
pub fn center_on_axis(&mut self) {
if let Self::Glyph(glyph) = self {
glyph.center_on_axis()
}
}
/// If no kern table is provided for a corner, a kerning amount of zero is
/// assumed.
pub fn kern_at_height(&self, corner: Corner, height: Abs) -> Abs {
match self {
Self::Glyph(glyph) => {
// For glyph assemblies we pick either the start or end glyph
// depending on the corner.
let is_vertical =
glyph.item.glyphs.iter().all(|glyph| glyph.y_advance != Em::zero());
let glyph_index = match (is_vertical, corner) {
(true, Corner::TopLeft | Corner::TopRight) => {
glyph.item.glyphs.len() - 1
}
(false, Corner::TopRight | Corner::BottomRight) => {
glyph.item.glyphs.len() - 1
}
_ => 0,
};
kern_at_height(
&glyph.item.font,
GlyphId(glyph.item.glyphs[glyph_index].id),
corner,
Em::from_abs(height, glyph.item.size),
)
.unwrap_or_default()
.at(glyph.item.size)
}
_ => Abs::zero(),
}
}
}
impl From<GlyphFragment> for MathFragment {
fn from(glyph: GlyphFragment) -> Self {
Self::Glyph(glyph)
}
}
impl From<FrameFragment> for MathFragment {
fn from(fragment: FrameFragment) -> Self {
Self::Frame(fragment)
}
}
#[derive(Clone)]
pub struct GlyphFragment {
// Text stuff.
pub item: ShapedText,
// Math stuff.
pub size: Size,
pub baseline: Option<Abs>,
pub italics_correction: Abs,
pub accent_attach: (Abs, Abs),
pub math_size: MathSize,
pub class: MathClass,
pub limits: Limits,
pub extended_shape: bool,
pub mid_stretched: Option<bool>,
// External frame stuff.
pub modifiers: FrameModifiers,
pub shift: Abs,
pub align: Abs,
}
impl GlyphFragment {
/// Calls `new` with the given character.
pub fn new_char(
ctx: &MathContext,
styles: StyleChain,
c: char,
span: Span,
) -> Option<Self> {
Self::new(ctx.engine.world, styles, c.encode_utf8(&mut [0; 4]), span)
}
/// Selects a font to use and then shapes text.
#[comemo::memoize]
pub fn new(
world: Tracked<dyn World + '_>,
styles: StyleChain,
text: &str,
span: Span,
) -> Option<GlyphFragment> {
assert!(text.graphemes(true).count() == 1);
let (font, glyphs) = shape(
world,
variant(styles),
features(styles),
language(styles),
styles.get(TextElem::fallback),
text,
families(styles).collect(),
)?;
let item = ShapedText {
text: text.into(),
font,
size: styles.resolve(TextElem::size),
fill: styles.get_ref(TextElem::fill).as_decoration(),
stroke: styles.resolve(TextElem::stroke).map(|s| s.unwrap_or_default()),
lang: styles.get(TextElem::lang),
region: styles.get(TextElem::region),
glyphs: Glyphs::new(glyphs),
span,
};
let c = text.chars().next().unwrap();
let limits = Limits::for_char(c);
let class = styles
.get(EquationElem::class)
.or_else(|| default_math_class(c))
.unwrap_or(MathClass::Normal);
let mut fragment = Self {
item,
// Math
math_size: styles.get(EquationElem::size),
class,
limits,
mid_stretched: None,
// Math in need of updating.
extended_shape: false,
italics_correction: Abs::zero(),
accent_attach: (Abs::zero(), Abs::zero()),
size: Size::zero(),
baseline: None,
// Misc
align: Abs::zero(),
shift: styles.resolve(TextElem::baseline),
modifiers: FrameModifiers::get_in(styles),
};
fragment.update_glyph(true);
Some(fragment)
}
/// Sets element id and boxes in appropriate way without changing other
/// styles. This is used to replace the glyph with a stretch variant.
pub fn update_glyph(&mut self, initial: bool) {
let id = GlyphId(self.item.glyphs[0].id);
let extended_shape = is_extended_shape(&self.item.font, id);
let italics = italics_correction(&self.item.font, id).unwrap_or_default();
let width = self.item.width();
// The second condition below is needed so we don't accidentally add the
// italic correction again to the original glyphs. This can happen when
// `reset_glyph` is called after the glyph fragment is created.
if !extended_shape && (initial || id.0 != self.item.original_id()) {
self.item.glyphs[0].x_advance += italics;
}
let italics = italics.at(self.item.size);
let (ascent, descent) =
ascent_descent(&self.item.font, id).unwrap_or((Em::zero(), Em::zero()));
// The fallback for accents is half the width plus or minus the italics
// correction. This is similar to how top and bottom attachments are
// shifted. For bottom accents we do not use the accent attach of the
// base as it is meant for top acccents.
let top_accent_attach = accent_attach(&self.item.font, id)
.map(|x| x.at(self.item.size))
.unwrap_or((width + italics) / 2.0);
let bottom_accent_attach = (width - italics) / 2.0;
self.baseline = Some(ascent.at(self.item.size));
self.size = Size::new(
self.item.width(),
ascent.at(self.item.size) + descent.at(self.item.size),
);
self.italics_correction = italics;
self.accent_attach = (top_accent_attach, bottom_accent_attach);
self.extended_shape = extended_shape;
}
// Reset a GlyphFragment's text field and math properties back to its
// base_id's. This is used to return a glyph to its unstretched state.
pub fn reset_glyph(&mut self) {
self.align = Abs::zero();
self.item.glyphs.reset();
self.update_glyph(false);
}
pub fn baseline(&self) -> Abs {
self.ascent()
}
/// The distance from the baseline to the top of the frame.
pub fn ascent(&self) -> Abs {
self.baseline.unwrap_or(self.size.y)
}
/// The distance from the baseline to the bottom of the frame.
pub fn descent(&self) -> Abs {
self.size.y - self.ascent()
}
pub fn into_frame(self) -> Frame {
let mut frame = Frame::soft(self.size);
frame.set_baseline(self.baseline());
frame.push(
Point::with_y(self.ascent() + self.shift + self.align),
FrameItem::Text(self.item.into()),
);
frame.modify(&self.modifiers);
frame
}
/// Try to stretch a glyph to a desired height.
pub fn stretch_vertical(
&mut self,
ctx: &mut MathContext,
height: Abs,
short_fall: Abs,
) {
self.stretch(ctx, height, short_fall, Axis::Y)
}
/// Try to stretch a glyph to a desired width.
pub fn stretch_horizontal(
&mut self,
ctx: &mut MathContext,
width: Abs,
short_fall: Abs,
) {
self.stretch(ctx, width, short_fall, Axis::X)
}
/// Try to stretch a glyph to a desired width or height.
///
/// The resulting frame may not have the exact desired width or height.
pub fn stretch(
&mut self,
ctx: &mut MathContext,
target: Abs,
short_fall: Abs,
axis: Axis,
) {
self.reset_glyph();
// If the base glyph is good enough, use it.
let mut advance = self.size.get(axis);
if axis == Axis::X && !self.extended_shape {
// For consistency, we subtract the italics correction from the
// glyph's width if it was added in `update_glyph`.
advance -= self.italics_correction;
}
let short_target = target - short_fall;
if short_target <= advance {
return;
}
let id = GlyphId(self.item.glyphs[0].id);
let font = self.item.font.clone();
let Some(construction) = glyph_construction(&font, id, axis) else { return };
// Search for a pre-made variant with a good advance.
let mut best_id = id;
let mut best_advance = advance;
for variant in construction.variants {
best_id = variant.variant_glyph;
best_advance =
self.item.font.to_em(variant.advance_measurement).at(self.item.size);
if short_target <= best_advance {
break;
}
}
// This is either good or the best we've got.
if short_target <= best_advance || construction.assembly.is_none() {
self.item.glyphs.update(vec![ShapedGlyph {
id: best_id.0,
x_advance: self.item.font.x_advance(best_id.0).unwrap_or_default(),
x_offset: Em::zero(),
y_advance: self.item.font.y_advance(best_id.0).unwrap_or_default(),
y_offset: Em::zero(),
}]);
self.update_glyph(false);
return;
}
// Assemble from parts.
let assembly = construction.assembly.unwrap();
let min_overlap = min_connector_overlap(&self.item.font)
.unwrap_or_default()
.at(self.item.size);
assemble(ctx, self, assembly, min_overlap, target, axis);
}
/// Vertically adjust the fragment's frame so that it is centered
/// on the axis.
pub fn center_on_axis(&mut self) {
self.align_on_axis(VAlignment::Horizon);
}
/// Vertically adjust the fragment's frame so that it is aligned
/// to the given alignment on the axis.
pub fn align_on_axis(&mut self, align: VAlignment) {
let h = self.size.y;
let axis = self.item.font.math().axis_height.at(self.item.size);
self.align += self.baseline();
self.baseline = Some(align.inv().position(h + axis * 2.0));
self.align -= self.baseline();
}
}
impl Debug for GlyphFragment {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "GlyphFragment({:?})", self.item.text)
}
}
#[derive(Debug, Clone)]
pub struct FrameFragment {
pub frame: Frame,
pub font_size: Abs,
pub class: MathClass,
pub math_size: MathSize,
pub limits: Limits,
pub spaced: bool,
pub base_ascent: Abs,
pub base_descent: Abs,
pub italics_correction: Abs,
pub accent_attach: (Abs, Abs),
pub text_like: bool,
pub ignorant: bool,
}
impl FrameFragment {
pub fn new(styles: StyleChain, frame: Frame) -> Self {
let base_ascent = frame.ascent();
let base_descent = frame.descent();
let accent_attach = frame.width() / 2.0;
Self {
frame: frame.modified(&FrameModifiers::get_in(styles)),
font_size: styles.resolve(TextElem::size),
class: styles.get(EquationElem::class).unwrap_or(MathClass::Normal),
math_size: styles.get(EquationElem::size),
limits: Limits::Never,
spaced: false,
base_ascent,
base_descent,
italics_correction: Abs::zero(),
accent_attach: (accent_attach, accent_attach),
text_like: false,
ignorant: false,
}
}
pub fn with_class(self, class: MathClass) -> Self {
Self { class, ..self }
}
pub fn with_spaced(self, spaced: bool) -> Self {
Self { spaced, ..self }
}
pub fn with_base_ascent(self, base_ascent: Abs) -> Self {
Self { base_ascent, ..self }
}
pub fn with_base_descent(self, base_descent: Abs) -> Self {
Self { base_descent, ..self }
}
pub fn with_italics_correction(self, italics_correction: Abs) -> Self {
Self { italics_correction, ..self }
}
pub fn with_accent_attach(self, accent_attach: (Abs, Abs)) -> Self {
Self { accent_attach, ..self }
}
pub fn with_text_like(self, text_like: bool) -> Self {
Self { text_like, ..self }
}
pub fn with_ignorant(self, ignorant: bool) -> Self {
Self { ignorant, ..self }
}
}
fn ascent_descent(font: &Font, id: GlyphId) -> Option<(Em, Em)> {
let bbox = font.ttf().glyph_bounding_box(id)?;
Some((font.to_em(bbox.y_max), -font.to_em(bbox.y_min)))
}
/// Look up the italics correction for a glyph.
fn italics_correction(font: &Font, id: GlyphId) -> Option<Em> {
font.ttf()
.tables()
.math?
.glyph_info?
.italic_corrections?
.get(id)
.map(|value| font.to_em(value.value))
}
/// Loop up the top accent attachment position for a glyph.
fn accent_attach(font: &Font, id: GlyphId) -> Option<Em> {
font.ttf()
.tables()
.math?
.glyph_info?
.top_accent_attachments?
.get(id)
.map(|value| font.to_em(value.value))
}
/// Look up whether a glyph is an extended shape.
fn is_extended_shape(font: &Font, id: GlyphId) -> bool {
font.ttf()
.tables()
.math
.and_then(|math| math.glyph_info)
.and_then(|glyph_info| glyph_info.extended_shapes)
.and_then(|coverage| coverage.get(id))
.is_some()
}
/// Look up a kerning value at a specific corner and height.
fn kern_at_height(font: &Font, id: GlyphId, corner: Corner, height: Em) -> Option<Em> {
let kerns = font.ttf().tables().math?.glyph_info?.kern_infos?.get(id)?;
let kern = match corner {
Corner::TopLeft => kerns.top_left,
Corner::TopRight => kerns.top_right,
Corner::BottomRight => kerns.bottom_right,
Corner::BottomLeft => kerns.bottom_left,
}?;
let mut i = 0;
while i < kern.count() && height > font.to_em(kern.height(i)?.value) {
i += 1;
}
Some(font.to_em(kern.kern(i)?.value))
}
pub fn stretch_axes(font: &Font, id: u16) -> Axes<bool> {
let id = GlyphId(id);
let horizontal = font
.ttf()
.tables()
.math
.and_then(|math| math.variants)
.and_then(|variants| variants.horizontal_constructions.get(id))
.is_some();
let vertical = font
.ttf()
.tables()
.math
.and_then(|math| math.variants)
.and_then(|variants| variants.vertical_constructions.get(id))
.is_some();
Axes::new(horizontal, vertical)
}
fn min_connector_overlap(font: &Font) -> Option<Em> {
font.ttf()
.tables()
.math?
.variants
.map(|variants| font.to_em(variants.min_connector_overlap))
}
fn glyph_construction(
font: &Font,
id: GlyphId,
axis: Axis,
) -> Option<GlyphConstruction<'_>> {
font.ttf()
.tables()
.math?
.variants
.map(|variants| match axis {
Axis::X => variants.horizontal_constructions,
Axis::Y => variants.vertical_constructions,
})?
.get(id)
}
/// Assemble a glyph from parts.
fn assemble(
ctx: &mut MathContext,
base: &mut GlyphFragment,
assembly: GlyphAssembly,
min_overlap: Abs,
target: Abs,
axis: Axis,
) {
// Determine the number of times the extenders need to be repeated as well
// as a ratio specifying how much to spread the parts apart
// (0 = maximal overlap, 1 = minimal overlap).
let mut full;
let mut ratio;
let mut repeat = 0;
loop {
full = Abs::zero();
ratio = 0.0;
let mut parts = parts(assembly, repeat).peekable();
let mut growable = Abs::zero();
while let Some(part) = parts.next() {
let mut advance = base.item.font.to_em(part.full_advance).at(base.item.size);
if let Some(next) = parts.peek() {
let max_overlap = base
.item
.font
.to_em(part.end_connector_length.min(next.start_connector_length))
.at(base.item.size);
if max_overlap < min_overlap {
// This condition happening is indicative of a bug in the
// font.
ctx.engine.sink.warn(warning!(
base.item.span,
"glyph has assembly parts with overlap less than minConnectorOverlap";
hint: "its rendering may appear broken - this is probably a font bug";
hint: "please file an issue at https://github.com/typst/typst/issues";
));
}
advance -= max_overlap;
// In case we have that max_overlap < min_overlap, ensure we
// don't decrease the value of growable.
growable += (max_overlap - min_overlap).max(Abs::zero());
}
full += advance;
}
if full < target {
let delta = target - full;
ratio = (delta / growable).min(1.0);
full += ratio * growable;
}
if target <= full || repeat >= MAX_REPEATS {
break;
}
repeat += 1;
}
let mut glyphs = vec![];
let mut parts = parts(assembly, repeat).peekable();
while let Some(part) = parts.next() {
let mut advance = base.item.font.to_em(part.full_advance).at(base.item.size);
if let Some(next) = parts.peek() {
let max_overlap = base
.item
.font
.to_em(part.end_connector_length.min(next.start_connector_length))
.at(base.item.size);
advance -= max_overlap;
advance += ratio * (max_overlap - min_overlap);
}
let (x_advance, y_advance, y_offset) = match axis {
Axis::X => (Em::from_abs(advance, base.item.size), Em::zero(), Em::zero()),
Axis::Y => (
Em::zero(),
Em::from_abs(advance, base.item.size),
// Glyph parts used in vertical assemblies are typically aligned
// at the vertical origin. This way, they combine properly when
// drawn consecutively, as required by the MATH table spec.
//
// However, in some fonts, they aren't. To still have them align
// properly, we are vertically offsetting such glyphs by their
// bounding-box computed descent. (Positive descent means that
// a glyph extends below the baseline and then we must move it
// up for it to align properly. `y_advance` is Y-up, so that
// matches up.)
ascent_descent(&base.item.font, part.glyph_id)
.map(|x| x.1)
.unwrap_or_default(),
),
};
glyphs.push(ShapedGlyph {
id: part.glyph_id.0,
x_advance,
x_offset: Em::zero(),
y_advance,
y_offset,
});
}
match axis {
Axis::X => {
base.size.x = full;
let (ascent, descent) = glyphs
.iter()
.filter_map(|glyph| ascent_descent(&base.item.font, GlyphId(glyph.id)))
.reduce(|(ma, md), (a, d)| (ma.max(a), md.max(d)))
.unwrap_or((Em::zero(), Em::zero()));
base.baseline = Some(ascent.at(base.item.size));
base.size.y = (ascent + descent).at(base.item.size);
}
Axis::Y => {
base.baseline = None;
base.size.y = full;
base.size.x = glyphs
.iter()
.map(|glyph| base.item.font.x_advance(glyph.id).unwrap_or_default())
.max()
.unwrap_or_default()
.at(base.item.size);
}
}
base.item.glyphs.update(glyphs);
base.italics_correction = base
.item
.font
.to_em(assembly.italics_correction.value)
.at(base.item.size);
if axis == Axis::X {
base.accent_attach = (full / 2.0, full / 2.0);
}
base.mid_stretched = None;
base.extended_shape = true;
}
/// Return an iterator over the assembly's parts with extenders repeated the
/// specified number of times.
fn parts(
assembly: GlyphAssembly<'_>,
repeat: usize,
) -> impl Iterator<Item = GlyphPart> + '_ {
assembly.parts.into_iter().flat_map(move |part| {
let count = if part.part_flags.extender() { repeat } else { 1 };
std::iter::repeat_n(part, count)
})
}
pub fn has_dtls_feat(font: &Font) -> bool {
font.ttf()
.tables()
.gsub
.and_then(|gsub| gsub.features.index(ttf_parser::Tag::from_bytes(b"dtls")))
.is_some()
}
/// Describes in which situation a frame should use limits for attachments.
#[derive(Debug, Copy, Clone)]
pub enum Limits {
/// Always scripts.
Never,
/// Display limits only in `display` math.
Display,
/// Always limits.
Always,
}
impl Limits {
/// The default limit configuration if the given character is the base.
pub fn for_char(c: char) -> Self {
match default_math_class(c) {
Some(MathClass::Large) => {
if is_integral_char(c) {
Limits::Never
} else {
Limits::Display
}
}
Some(MathClass::Relation) => Limits::Always,
_ => Limits::Never,
}
}
/// The default limit configuration for a math class.
pub fn for_class(class: MathClass) -> Self {
match class {
MathClass::Large => Self::Display,
MathClass::Relation => Self::Always,
_ => Self::Never,
}
}
/// Whether limits should be displayed in this context.
pub fn active(&self, styles: StyleChain) -> bool {
match self {
Self::Always => true,
Self::Display => styles.get(EquationElem::size) == MathSize::Display,
Self::Never => false,
}
}
}
/// Determines if the character is one of a variety of integral signs.
fn is_integral_char(c: char) -> bool {
('∫'..='∳').contains(&c) || ('⨋'..='⨜').contains(&c)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/root.rs | crates/typst-layout/src/math/root.rs | use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, StyleChain, SymbolElem};
use typst_library::layout::{Abs, Frame, FrameItem, Point, Size};
use typst_library::math::{EquationElem, MathSize, RootElem};
use typst_library::text::TextElem;
use typst_library::visualize::{FixedStroke, Geometry};
use super::{FrameFragment, MathContext, style_cramped};
/// Lays out a [`RootElem`].
///
/// TeXbook page 443, page 360
/// See also: <https://www.w3.org/TR/mathml-core/#radicals-msqrt-mroot>
#[typst_macros::time(name = "math.root", span = elem.span())]
pub fn layout_root(
elem: &Packed<RootElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
// Layout radicand.
let radicand = {
let cramped = style_cramped();
let styles = styles.chain(&cramped);
let run = ctx.layout_into_run(&elem.radicand, styles)?;
let multiline = run.is_multiline();
let radicand = run.into_fragment(styles);
if multiline {
// Align the frame center line with the math axis.
let (font, size) = radicand.font(ctx, styles);
let axis = font.math().axis_height.at(size);
let mut radicand = radicand.into_frame();
radicand.set_baseline(radicand.height() / 2.0 + axis);
radicand
} else {
radicand.into_frame()
}
};
// Layout root symbol.
let mut sqrt =
ctx.layout_into_fragment(&SymbolElem::packed('√').spanned(span), styles)?;
let (font, size) = sqrt.font(ctx, styles);
let thickness = font.math().radical_rule_thickness.at(size);
let extra_ascender = font.math().radical_extra_ascender.at(size);
let kern_before = font.math().radical_kern_before_degree.at(size);
let kern_after = font.math().radical_kern_after_degree.at(size);
let raise_factor = font.math().radical_degree_bottom_raise_percent;
let gap = match styles.get(EquationElem::size) {
MathSize::Display => font.math().radical_display_style_vertical_gap,
_ => font.math().radical_vertical_gap,
}
.at(size);
let line = FrameItem::Shape(
Geometry::Line(Point::with_x(radicand.width())).stroked(FixedStroke::from_pair(
sqrt.fill()
.unwrap_or_else(|| styles.get_ref(TextElem::fill).as_decoration()),
thickness,
)),
span,
);
let target = radicand.height() + thickness + gap;
sqrt.stretch_vertical(ctx, target, Abs::zero());
let sqrt = sqrt.into_frame();
// Layout the index.
let sscript = EquationElem::size.set(MathSize::ScriptScript).wrap();
let index = elem
.index
.get_ref(styles)
.as_ref()
.map(|elem| ctx.layout_into_frame(elem, styles.chain(&sscript)))
.transpose()?;
// TeXbook, page 443, item 11
// Keep original gap, and then distribute any remaining free space
// equally above and below.
let gap = gap.max((sqrt.height() - thickness - radicand.height() + gap) / 2.0);
let sqrt_ascent = radicand.ascent() + gap + thickness;
let descent = sqrt.height() - sqrt_ascent;
let inner_ascent = sqrt_ascent + extra_ascender;
let mut sqrt_offset = Abs::zero();
let mut shift_up = Abs::zero();
let mut ascent = inner_ascent;
if let Some(index) = &index {
sqrt_offset = kern_before + index.width() + kern_after;
// The formula below for how much raise the index by comes from
// the TeXbook, page 360, in the definition of `\root`.
// However, the `+ index.descent()` part is different from TeX.
// Without it, descenders can collide with the surd, a rarity
// in practice, but possible. MS Word also adjusts index positions
// for descenders.
shift_up = raise_factor * (inner_ascent - descent) + index.descent();
ascent.set_max(shift_up + index.ascent());
}
let sqrt_x = sqrt_offset.max(Abs::zero());
let radicand_x = sqrt_x + sqrt.width();
let radicand_y = ascent - radicand.ascent();
let width = radicand_x + radicand.width();
let size = Size::new(width, ascent + descent);
// The extra "- thickness" comes from the fact that the sqrt is placed
// in `push_frame` with respect to its top, not its baseline.
let sqrt_pos = Point::new(sqrt_x, radicand_y - gap - thickness);
let line_pos = Point::new(radicand_x, radicand_y - gap - (thickness / 2.0));
let radicand_pos = Point::new(radicand_x, radicand_y);
let mut frame = Frame::soft(size);
frame.set_baseline(ascent);
if let Some(index) = index {
let index_x = -sqrt_offset.min(Abs::zero()) + kern_before;
let index_pos = Point::new(index_x, ascent - index.ascent() - shift_up);
frame.push_frame(index_pos, index);
}
frame.push_frame(sqrt_pos, sqrt);
frame.push(line_pos, line);
frame.push_frame(radicand_pos, radicand);
ctx.push(FrameFragment::new(styles, frame));
Ok(())
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/cancel.rs | crates/typst-layout/src/math/cancel.rs | use comemo::Track;
use typst_library::diag::{At, SourceResult};
use typst_library::foundations::{Context, Packed, Smart, StyleChain};
use typst_library::layout::{Abs, Angle, Frame, FrameItem, Point, Rel, Size, Transform};
use typst_library::math::{CancelAngle, CancelElem};
use typst_library::text::TextElem;
use typst_library::visualize::{FixedStroke, Geometry};
use typst_syntax::Span;
use super::{FrameFragment, MathContext};
/// Lays out a [`CancelElem`].
#[typst_macros::time(name = "math.cancel", span = elem.span())]
pub fn layout_cancel(
elem: &Packed<CancelElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let body = ctx.layout_into_fragment(&elem.body, styles)?;
// Preserve properties of body.
let body_class = body.class();
let body_italics = body.italics_correction();
let body_attach = body.accent_attach();
let body_text_like = body.is_text_like();
let mut body = body.into_frame();
let body_size = body.size();
let span = elem.span();
let length = elem.length.resolve(styles);
let stroke = elem.stroke.resolve(styles).unwrap_or(FixedStroke {
paint: styles.get_ref(TextElem::fill).as_decoration(),
..Default::default()
});
let invert = elem.inverted.get(styles);
let cross = elem.cross.get(styles);
let angle = elem.angle.get_ref(styles);
let invert_first_line = !cross && invert;
let first_line = draw_cancel_line(
ctx,
length,
stroke.clone(),
invert_first_line,
angle,
body_size,
styles,
span,
)?;
// The origin of our line is the very middle of the element.
let center = body_size.to_point() / 2.0;
body.push_frame(center, first_line);
if cross {
// Draw the second line.
let second_line =
draw_cancel_line(ctx, length, stroke, true, angle, body_size, styles, span)?;
body.push_frame(center, second_line);
}
ctx.push(
FrameFragment::new(styles, body)
.with_class(body_class)
.with_italics_correction(body_italics)
.with_accent_attach(body_attach)
.with_text_like(body_text_like),
);
Ok(())
}
/// Draws a cancel line.
#[allow(clippy::too_many_arguments)]
fn draw_cancel_line(
ctx: &mut MathContext,
length_scale: Rel<Abs>,
stroke: FixedStroke,
invert: bool,
angle: &Smart<CancelAngle>,
body_size: Size,
styles: StyleChain,
span: Span,
) -> SourceResult<Frame> {
let default = default_angle(body_size);
let mut angle = match angle {
// Non specified angle defaults to the diagonal
Smart::Auto => default,
Smart::Custom(angle) => match angle {
// This specifies the absolute angle w.r.t y-axis clockwise.
CancelAngle::Angle(v) => *v,
// This specifies a function that takes the default angle as input.
CancelAngle::Func(func) => func
.call(ctx.engine, Context::new(None, Some(styles)).track(), [default])?
.cast()
.at(span)?,
},
};
// invert means flipping along the y-axis
if invert {
angle *= -1.0;
}
// same as above, the default length is the diagonal of the body box.
let default_length = body_size.to_point().hypot();
let length = length_scale.relative_to(default_length);
// Draw a vertical line of length and rotate it by angle
let start = Point::new(Abs::zero(), length / 2.0);
let delta = Point::new(Abs::zero(), -length);
let mut frame = Frame::soft(body_size);
frame.push(start, FrameItem::Shape(Geometry::Line(delta).stroked(stroke), span));
// Having the middle of the line at the origin is convenient here.
frame.transform(Transform::rotate(angle));
Ok(frame)
}
/// The default line angle for a body of the given size.
fn default_angle(body: Size) -> Angle {
// The default cancel line is the diagonal.
// We infer the default angle from
// the diagonal w.r.t to the body box.
//
// The returned angle is in the range of [0, Pi/2]
//
// Note that the angle is computed w.r.t to the y-axis
//
// B
// /|
// diagonal / | height
// / |
// / |
// O ----
// width
let (width, height) = (body.x, body.y);
let default_angle = (width / height).atan(); // arctangent (in the range [0, Pi/2])
Angle::rad(default_angle)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/text.rs | crates/typst-layout/src/math/text.rs | use codex::styling::{MathStyle, to_style};
use ecow::EcoString;
use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, Resolve, StyleChain, SymbolElem};
use typst_library::layout::{Abs, Size};
use typst_library::math::{EquationElem, MathSize};
use typst_library::text::{
BottomEdge, BottomEdgeMetric, TextElem, TopEdge, TopEdgeMetric,
};
use typst_syntax::{Span, is_newline};
use unicode_math_class::MathClass;
use unicode_segmentation::UnicodeSegmentation;
use super::{
FrameFragment, GlyphFragment, MathContext, MathFragment, MathRun, has_dtls_feat,
style_dtls,
};
/// Lays out a [`TextElem`].
pub fn layout_text(
elem: &Packed<TextElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let text = &elem.text;
let span = elem.span();
let fragment = if text.contains(is_newline) {
layout_text_lines(text.split(is_newline), span, ctx, styles)?
} else {
layout_inline_text(text, span, ctx, styles)?
};
ctx.push(fragment);
Ok(())
}
/// Layout multiple lines of text.
fn layout_text_lines<'a>(
lines: impl Iterator<Item = &'a str>,
span: Span,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<FrameFragment> {
let mut fragments = vec![];
for (i, line) in lines.enumerate() {
if i != 0 {
fragments.push(MathFragment::Linebreak);
}
if !line.is_empty() {
fragments.push(layout_inline_text(line, span, ctx, styles)?.into());
}
}
let mut frame = MathRun::new(fragments).into_frame(styles);
let axis = ctx.font().math().axis_height.resolve(styles);
frame.set_baseline(frame.height() / 2.0 + axis);
Ok(FrameFragment::new(styles, frame))
}
/// Layout the given text string into a [`FrameFragment`] after styling all
/// characters for the math font (without auto-italics).
fn layout_inline_text(
text: &str,
span: Span,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<FrameFragment> {
let variant = styles.get(EquationElem::variant);
let bold = styles.get(EquationElem::bold);
// Disable auto-italic.
let italic = styles.get(EquationElem::italic).or(Some(false));
if text.chars().all(|c| c.is_ascii_digit() || c == '.') {
// Small optimization for numbers. Note that this lays out slightly
// differently to normal text and is worth re-evaluating in the future.
let mut fragments = vec![];
for unstyled_c in text.chars() {
// This is fine as ascii digits and '.' can never end up as more
// than a single char after styling.
let style = MathStyle::select(unstyled_c, variant, bold, italic);
let c = to_style(unstyled_c, style).next().unwrap();
// This won't panic as ASCII digits and '.' will never end up as
// nothing after shaping.
let glyph = GlyphFragment::new_char(ctx, styles, c, span).unwrap();
fragments.push(glyph.into());
}
let frame = MathRun::new(fragments).into_frame(styles);
Ok(FrameFragment::new(styles, frame).with_text_like(true))
} else {
let local = [
TextElem::top_edge.set(TopEdge::Metric(TopEdgeMetric::Bounds)),
TextElem::bottom_edge.set(BottomEdge::Metric(BottomEdgeMetric::Bounds)),
]
.map(|p| p.wrap());
let styles = styles.chain(&local);
let styled_text: EcoString = text
.chars()
.flat_map(|c| to_style(c, MathStyle::select(c, variant, bold, italic)))
.collect();
let elem = TextElem::packed(styled_text).spanned(span);
// There isn't a natural width for a paragraph in a math environment;
// because it will be placed somewhere probably not at the left margin
// it will overflow. So emulate an `hbox` instead and allow the
// paragraph to extend as far as needed.
let frame = crate::inline::layout_inline(
ctx.engine,
&[(&elem, styles)],
&mut ctx.locator.next(&span).split(),
styles,
Size::splat(Abs::inf()),
false,
)?
.into_frame();
Ok(FrameFragment::new(styles, frame)
.with_class(MathClass::Alphabetic)
.with_text_like(true)
.with_spaced(true))
}
}
/// Layout a single character in the math font with the correct styling applied
/// (includes auto-italics).
pub fn layout_symbol(
elem: &Packed<SymbolElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let variant = styles.get(EquationElem::variant);
let bold = styles.get(EquationElem::bold);
let italic = styles.get(EquationElem::italic);
let dtls = style_dtls();
let has_dtls_feat = has_dtls_feat(ctx.font());
for cluster in elem.text.graphemes(true) {
// Switch dotless char to normal when we have the dtls OpenType feature.
// This should happen before the main styling pass.
let mut enable_dtls = false;
let text: EcoString = cluster
.chars()
.flat_map(|mut c| {
if has_dtls_feat && let Some(d) = try_dotless(c) {
enable_dtls = true;
c = d;
}
to_style(c, MathStyle::select(c, variant, bold, italic))
})
.collect();
let styles = if enable_dtls { styles.chain(&dtls) } else { styles };
if let Some(mut glyph) =
GlyphFragment::new(ctx.engine.world, styles, &text, elem.span())
{
if glyph.class == MathClass::Large {
if styles.get(EquationElem::size) == MathSize::Display {
let height = glyph
.item
.font
.math()
.display_operator_min_height
.at(glyph.item.size);
glyph.stretch_vertical(ctx, height, Abs::zero());
};
// TeXbook p 155. Large operators are always vertically centered on
// the axis.
glyph.center_on_axis();
}
ctx.push(glyph);
}
}
Ok(())
}
/// The non-dotless version of a dotless character that can be used with the
/// `dtls` OpenType feature.
pub fn try_dotless(c: char) -> Option<char> {
match c {
'ı' => Some('i'),
'ȷ' => Some('j'),
_ => None,
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/lr.rs | crates/typst-layout/src/math/lr.rs | use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, StyleChain};
use typst_library::layout::{Abs, Axis, Rel};
use typst_library::math::{EquationElem, LrElem, MidElem};
use typst_utils::SliceExt;
use unicode_math_class::MathClass;
use super::{DELIM_SHORT_FALL, MathContext, MathFragment, stretch_fragment};
/// Lays out an [`LrElem`].
#[typst_macros::time(name = "math.lr", span = elem.span())]
pub fn layout_lr(
elem: &Packed<LrElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
// Extract from an EquationElem.
let mut body = &elem.body;
if let Some(equation) = body.to_packed::<EquationElem>() {
body = &equation.body;
}
// Extract implicit LrElem.
if let Some(lr) = body.to_packed::<LrElem>()
&& lr.size.get(styles).is_one()
{
body = &lr.body;
}
let mut fragments = ctx.layout_into_fragments(body, styles)?;
// Ignore leading and trailing ignorant fragments.
let (start_idx, end_idx) = fragments.split_prefix_suffix(|f| f.is_ignorant());
let inner_fragments = &mut fragments[start_idx..end_idx];
let mut max_extent = Abs::zero();
for fragment in inner_fragments.iter() {
let (font, size) = fragment.font(ctx, styles);
let axis = font.math().axis_height.at(size);
let extent = (fragment.ascent() - axis).max(fragment.descent() + axis);
max_extent = max_extent.max(extent);
}
let relative_to = 2.0 * max_extent;
let height = elem.size.resolve(styles);
// Scale up fragments at both ends.
match inner_fragments {
[one] => scale_if_delimiter(ctx, one, relative_to, height, None),
[first, .., last] => {
scale_if_delimiter(ctx, first, relative_to, height, Some(MathClass::Opening));
scale_if_delimiter(ctx, last, relative_to, height, Some(MathClass::Closing));
}
[] => {}
}
// Handle MathFragment::Glyph fragments that should be scaled up.
for fragment in inner_fragments.iter_mut() {
if let MathFragment::Glyph(glyph) = fragment
&& glyph.mid_stretched == Some(false)
{
glyph.mid_stretched = Some(true);
scale(ctx, fragment, relative_to, height);
}
}
// Remove weak SpacingFragment immediately after the opening or immediately
// before the closing.
let mut index = 0;
let opening_exists = inner_fragments
.first()
.is_some_and(|f| f.class() == MathClass::Opening);
let closing_exists = inner_fragments
.last()
.is_some_and(|f| f.class() == MathClass::Closing);
fragments.retain(|fragment| {
let discard = (index == start_idx + 1 && opening_exists
|| index + 2 == end_idx && closing_exists)
&& matches!(fragment, MathFragment::Spacing(_, true));
index += 1;
!discard
});
ctx.extend(fragments);
Ok(())
}
/// Lays out a [`MidElem`].
#[typst_macros::time(name = "math.mid", span = elem.span())]
pub fn layout_mid(
elem: &Packed<MidElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let mut fragments = ctx.layout_into_fragments(&elem.body, styles)?;
for fragment in &mut fragments {
if let MathFragment::Glyph(glyph) = fragment {
glyph.mid_stretched = Some(false);
glyph.class = MathClass::Relation;
}
}
ctx.extend(fragments);
Ok(())
}
/// Scales a math fragment to a height if it has the class Opening, Closing, or
/// Fence.
///
/// In case `apply` is `Some(class)`, `class` will be applied to the fragment if
/// it is a delimiter, in a way that cannot be overridden by the user.
fn scale_if_delimiter(
ctx: &mut MathContext,
fragment: &mut MathFragment,
relative_to: Abs,
height: Rel<Abs>,
apply: Option<MathClass>,
) {
if matches!(
fragment.class(),
MathClass::Opening | MathClass::Closing | MathClass::Fence
) {
scale(ctx, fragment, relative_to, height);
if let Some(class) = apply {
fragment.set_class(class);
}
}
}
/// Scales a math fragment to a height.
fn scale(
ctx: &mut MathContext,
fragment: &mut MathFragment,
relative_to: Abs,
height: Rel<Abs>,
) {
// This unwrap doesn't really matter. If it is None, then the fragment
// won't be stretchable anyways.
let short_fall = DELIM_SHORT_FALL.at(fragment.font_size().unwrap_or_default());
stretch_fragment(ctx, fragment, Some(Axis::Y), Some(relative_to), height, short_fall);
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/mod.rs | crates/typst-layout/src/math/mod.rs | #[macro_use]
mod shared;
mod accent;
mod attach;
mod cancel;
mod frac;
mod fragment;
mod lr;
mod mat;
mod root;
mod run;
mod shaping;
mod stretch;
mod text;
mod underover;
use comemo::Tracked;
use typst_library::World;
use typst_library::diag::{At, SourceResult, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{
Content, NativeElement, Packed, Resolve, Style, StyleChain, SymbolElem,
};
use typst_library::introspection::{Counter, Locator, SplitLocator, TagElem};
use typst_library::layout::{
Abs, AlignElem, Axes, BlockElem, BoxElem, Em, FixedAlignment, Fragment, Frame, HElem,
InlineItem, OuterHAlignment, PlaceElem, Point, Region, Regions, Size, Spacing,
SpecificAlignment, VAlignment,
};
use typst_library::math::*;
use typst_library::model::ParElem;
use typst_library::routines::{Arenas, RealizationKind};
use typst_library::text::{
Font, FontFlags, LinebreakElem, SpaceElem, TextEdgeBounds, TextElem, variant,
};
use typst_syntax::Span;
use typst_utils::{LazyHash, Numeric};
use unicode_math_class::MathClass;
use self::fragment::{
FrameFragment, GlyphFragment, Limits, MathFragment, has_dtls_feat, stretch_axes,
};
use self::run::{LeftRightAlternator, MathRun, MathRunFrameBuilder};
use self::shared::*;
use self::stretch::stretch_fragment;
/// Layout an inline equation (in a paragraph).
#[typst_macros::time(span = elem.span())]
pub fn layout_equation_inline(
elem: &Packed<EquationElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
region: Size,
) -> SourceResult<Vec<InlineItem>> {
assert!(!elem.block.get(styles));
let span = elem.span();
let font = get_font(engine.world, styles, span)?;
warn_non_math_font(&font, engine, span);
let mut locator = locator.split();
let mut ctx = MathContext::new(engine, &mut locator, region, font.clone());
let scale_style = style_for_script_scale(&font);
let styles = styles.chain(&scale_style);
let run = ctx.layout_into_run(&elem.body, styles)?;
let mut items = if run.row_count() == 1 {
run.into_par_items()
} else {
vec![InlineItem::Frame(run.into_fragment(styles).into_frame())]
};
// An empty equation should have a height, so we still create a frame
// (which is then resized in the loop).
if items.is_empty() {
items.push(InlineItem::Frame(Frame::soft(Size::zero())));
}
for item in &mut items {
let InlineItem::Frame(frame) = item else { continue };
let slack = styles.resolve(ParElem::leading) * 0.7;
let (t, b) = font.edges(
styles.get(TextElem::top_edge),
styles.get(TextElem::bottom_edge),
styles.resolve(TextElem::size),
TextEdgeBounds::Frame(frame),
);
let ascent = t.max(frame.ascent() - slack);
let descent = b.max(frame.descent() - slack);
frame.translate(Point::with_y(ascent - frame.baseline()));
frame.size_mut().y = ascent + descent;
}
Ok(items)
}
/// Layout a block-level equation (in a flow).
#[typst_macros::time(span = elem.span())]
pub fn layout_equation_block(
elem: &Packed<EquationElem>,
engine: &mut Engine,
locator: Locator,
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
assert!(elem.block.get(styles));
let span = elem.span();
let font = get_font(engine.world, styles, span)?;
warn_non_math_font(&font, engine, span);
let mut locator = locator.split();
let mut ctx = MathContext::new(engine, &mut locator, regions.base(), font.clone());
let scale_style = style_for_script_scale(&font);
let styles = styles.chain(&scale_style);
let full_equation_builder = ctx
.layout_into_run(&elem.body, styles)?
.multiline_frame_builder(styles);
let width = full_equation_builder.size.x;
let equation_builders = if styles.get(BlockElem::breakable) {
let mut rows = full_equation_builder.frames.into_iter().peekable();
let mut equation_builders = vec![];
let mut last_first_pos = Point::zero();
let mut regions = regions;
loop {
// Keep track of the position of the first row in this region,
// so that the offset can be reverted later.
let Some(&(_, first_pos)) = rows.peek() else { break };
last_first_pos = first_pos;
let mut frames = vec![];
let mut height = Abs::zero();
while let Some((sub, pos)) = rows.peek() {
let mut pos = *pos;
pos.y -= first_pos.y;
// Finish this region if the line doesn't fit. Only do it if
// we placed at least one line _or_ we still have non-last
// regions. Crucially, we don't want to infinitely create
// new regions which are too small.
if !regions.size.y.fits(sub.height() + pos.y)
&& (regions.may_progress()
|| (regions.may_break() && !frames.is_empty()))
{
break;
}
let (sub, _) = rows.next().unwrap();
height = height.max(pos.y + sub.height());
frames.push((sub, pos));
}
equation_builders
.push(MathRunFrameBuilder { frames, size: Size::new(width, height) });
regions.next();
}
// Append remaining rows to the equation builder of the last region.
if let Some(equation_builder) = equation_builders.last_mut() {
equation_builder.frames.extend(rows.map(|(frame, mut pos)| {
pos.y -= last_first_pos.y;
(frame, pos)
}));
let height = equation_builder
.frames
.iter()
.map(|(frame, pos)| frame.height() + pos.y)
.max()
.unwrap_or(equation_builder.size.y);
equation_builder.size.y = height;
}
// Ensure that there is at least one frame, even for empty equations.
if equation_builders.is_empty() {
equation_builders
.push(MathRunFrameBuilder { frames: vec![], size: Size::zero() });
}
equation_builders
} else {
vec![full_equation_builder]
};
let Some(numbering) = elem.numbering.get_ref(styles) else {
let frames = equation_builders
.into_iter()
.map(MathRunFrameBuilder::build)
.collect();
return Ok(Fragment::frames(frames));
};
let pod = Region::new(regions.base(), Axes::splat(false));
let counter = Counter::of(EquationElem::ELEM)
.display_at(engine, elem.location().unwrap(), styles, numbering, span)?
.spanned(span);
let number = crate::layout_frame(engine, &counter, locator.next(&()), styles, pod)?;
static NUMBER_GUTTER: Em = Em::new(0.5);
let full_number_width = number.width() + NUMBER_GUTTER.resolve(styles);
let number_align = match elem.number_align.get(styles) {
SpecificAlignment::H(h) => SpecificAlignment::Both(h, VAlignment::Horizon),
SpecificAlignment::V(v) => SpecificAlignment::Both(OuterHAlignment::End, v),
SpecificAlignment::Both(h, v) => SpecificAlignment::Both(h, v),
};
// Add equation numbers to each equation region.
let region_count = equation_builders.len();
let frames = equation_builders
.into_iter()
.map(|builder| {
if builder.frames.is_empty() && region_count > 1 {
// Don't number empty regions, but do number empty equations.
return builder.build();
}
add_equation_number(
builder,
number.clone(),
number_align.resolve(styles),
styles.get(AlignElem::alignment).resolve(styles).x,
regions.size.x,
full_number_width,
)
})
.collect();
Ok(Fragment::frames(frames))
}
fn add_equation_number(
equation_builder: MathRunFrameBuilder,
number: Frame,
number_align: Axes<FixedAlignment>,
equation_align: FixedAlignment,
region_size_x: Abs,
full_number_width: Abs,
) -> Frame {
let first =
equation_builder.frames.first().map_or(
(equation_builder.size, Point::zero(), Abs::zero()),
|(frame, pos)| (frame.size(), *pos, frame.baseline()),
);
let last =
equation_builder.frames.last().map_or(
(equation_builder.size, Point::zero(), Abs::zero()),
|(frame, pos)| (frame.size(), *pos, frame.baseline()),
);
let line_count = equation_builder.frames.len();
let mut equation = equation_builder.build();
let width = if region_size_x.is_finite() {
region_size_x
} else {
equation.width() + 2.0 * full_number_width
};
let is_multiline = line_count >= 2;
let resizing_offset = resize_equation(
&mut equation,
&number,
number_align,
equation_align,
width,
is_multiline,
[first, last],
);
equation.translate(Point::with_x(match (equation_align, number_align.x) {
(FixedAlignment::Start, FixedAlignment::Start) => full_number_width,
(FixedAlignment::End, FixedAlignment::End) => -full_number_width,
_ => Abs::zero(),
}));
let x = match number_align.x {
FixedAlignment::Start => Abs::zero(),
FixedAlignment::End => equation.width() - number.width(),
_ => unreachable!(),
};
let y = {
let align_baselines = |(_, pos, baseline): (_, Point, Abs), number: &Frame| {
resizing_offset.y + pos.y + baseline - number.baseline()
};
match number_align.y {
FixedAlignment::Start => align_baselines(first, &number),
FixedAlignment::Center if !is_multiline => align_baselines(first, &number),
// In this case, the center lines (not baselines) of the number frame
// and the equation frame shall be aligned.
FixedAlignment::Center => (equation.height() - number.height()) / 2.0,
FixedAlignment::End => align_baselines(last, &number),
}
};
equation.push_frame(Point::new(x, y), number);
equation
}
/// Resize the equation's frame accordingly so that it encompasses the number.
fn resize_equation(
equation: &mut Frame,
number: &Frame,
number_align: Axes<FixedAlignment>,
equation_align: FixedAlignment,
width: Abs,
is_multiline: bool,
[first, last]: [(Axes<Abs>, Point, Abs); 2],
) -> Point {
if matches!(number_align.y, FixedAlignment::Center if is_multiline) {
// In this case, the center lines (not baselines) of the number frame
// and the equation frame shall be aligned.
return equation.resize(
Size::new(width, equation.height().max(number.height())),
Axes::<FixedAlignment>::new(equation_align, FixedAlignment::Center),
);
}
let excess_above = Abs::zero().max({
if !is_multiline || matches!(number_align.y, FixedAlignment::Start) {
let (.., baseline) = first;
number.baseline() - baseline
} else {
Abs::zero()
}
});
let excess_below = Abs::zero().max({
if !is_multiline || matches!(number_align.y, FixedAlignment::End) {
let (size, .., baseline) = last;
(number.height() - number.baseline()) - (size.y - baseline)
} else {
Abs::zero()
}
});
// The vertical expansion is asymmetric on the top and bottom edges, so we
// first align at the top then translate the content downward later.
let resizing_offset = equation.resize(
Size::new(width, equation.height() + excess_above + excess_below),
Axes::<FixedAlignment>::new(equation_align, FixedAlignment::Start),
);
equation.translate(Point::with_y(excess_above));
resizing_offset + Point::with_y(excess_above)
}
/// The context for math layout.
struct MathContext<'a, 'v, 'e> {
// External.
engine: &'v mut Engine<'e>,
locator: &'v mut SplitLocator<'a>,
region: Region,
// Mutable.
fonts_stack: Vec<Font>,
fragments: Vec<MathFragment>,
}
impl<'a, 'v, 'e> MathContext<'a, 'v, 'e> {
/// Create a new math context.
fn new(
engine: &'v mut Engine<'e>,
locator: &'v mut SplitLocator<'a>,
base: Size,
font: Font,
) -> Self {
Self {
engine,
locator,
region: Region::new(base, Axes::splat(false)),
fonts_stack: vec![font],
fragments: vec![],
}
}
/// Get the current base font.
#[inline]
fn font(&self) -> &Font {
// Will always be at least one font in the stack.
self.fonts_stack.last().unwrap()
}
/// Push a fragment.
fn push(&mut self, fragment: impl Into<MathFragment>) {
self.fragments.push(fragment.into());
}
/// Push multiple fragments.
fn extend(&mut self, fragments: impl IntoIterator<Item = MathFragment>) {
self.fragments.extend(fragments);
}
/// Layout the given element and return the result as a [`MathRun`].
fn layout_into_run(
&mut self,
elem: &Content,
styles: StyleChain,
) -> SourceResult<MathRun> {
Ok(MathRun::new(self.layout_into_fragments(elem, styles)?))
}
/// Layout the given element and return the resulting [`MathFragment`]s.
fn layout_into_fragments(
&mut self,
elem: &Content,
styles: StyleChain,
) -> SourceResult<Vec<MathFragment>> {
// The element's layout_math() changes the fragments held in this
// MathContext object, but for convenience this function shouldn't change
// them, so we restore the MathContext's fragments after obtaining the
// layout result.
let prev = std::mem::take(&mut self.fragments);
self.layout_into_self(elem, styles)?;
Ok(std::mem::replace(&mut self.fragments, prev))
}
/// Layout the given element and return the result as a
/// unified [`MathFragment`].
fn layout_into_fragment(
&mut self,
elem: &Content,
styles: StyleChain,
) -> SourceResult<MathFragment> {
Ok(self.layout_into_run(elem, styles)?.into_fragment(styles))
}
/// Layout the given element and return the result as a [`Frame`].
fn layout_into_frame(
&mut self,
elem: &Content,
styles: StyleChain,
) -> SourceResult<Frame> {
Ok(self.layout_into_fragment(elem, styles)?.into_frame())
}
/// Layout arbitrary content.
fn layout_into_self(
&mut self,
content: &Content,
styles: StyleChain,
) -> SourceResult<()> {
let arenas = Arenas::default();
let pairs = (self.engine.routines.realize)(
RealizationKind::Math,
self.engine,
self.locator,
&arenas,
content,
styles,
)?;
let outer_styles = styles;
let outer_font = styles.get_ref(TextElem::font);
for (elem, styles) in pairs {
// Whilst this check isn't exact, it more or less suffices as a
// change in font variant probably won't have an effect on metrics.
if styles != outer_styles && styles.get_ref(TextElem::font) != outer_font {
self.fonts_stack
.push(get_font(self.engine.world, styles, elem.span())?);
let scale_style = style_for_script_scale(self.font());
layout_realized(elem, self, styles.chain(&scale_style))?;
self.fonts_stack.pop();
} else {
layout_realized(elem, self, styles)?;
}
}
Ok(())
}
}
/// Lays out a leaf element resulting from realization.
fn layout_realized(
elem: &Content,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
if let Some(elem) = elem.to_packed::<TagElem>() {
ctx.push(MathFragment::Tag(elem.tag.clone()));
} else if elem.is::<SpaceElem>() {
ctx.push(MathFragment::Space(ctx.font().math().space_width.resolve(styles)));
} else if elem.is::<LinebreakElem>() {
ctx.push(MathFragment::Linebreak);
} else if let Some(elem) = elem.to_packed::<HElem>() {
layout_h(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<TextElem>() {
self::text::layout_text(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<SymbolElem>() {
self::text::layout_symbol(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<BoxElem>() {
layout_box(elem, ctx, styles)?;
} else if elem.is::<AlignPointElem>() {
ctx.push(MathFragment::Align);
} else if let Some(elem) = elem.to_packed::<ClassElem>() {
layout_class(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<AccentElem>() {
self::accent::layout_accent(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<AttachElem>() {
self::attach::layout_attach(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<PrimesElem>() {
self::attach::layout_primes(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<ScriptsElem>() {
self::attach::layout_scripts(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<LimitsElem>() {
self::attach::layout_limits(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<CancelElem>() {
self::cancel::layout_cancel(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<FracElem>() {
self::frac::layout_frac(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<BinomElem>() {
self::frac::layout_binom(elem, ctx, styles)?;
} else if let Some(elem) = elem.to_packed::<LrElem>() {
self::lr::layout_lr(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<MidElem>() {
self::lr::layout_mid(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<VecElem>() {
self::mat::layout_vec(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<MatElem>() {
self::mat::layout_mat(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<CasesElem>() {
self::mat::layout_cases(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OpElem>() {
layout_op(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<RootElem>() {
self::root::layout_root(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<StretchElem>() {
self::stretch::layout_stretch(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<UnderlineElem>() {
self::underover::layout_underline(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OverlineElem>() {
self::underover::layout_overline(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<UnderbraceElem>() {
self::underover::layout_underbrace(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OverbraceElem>() {
self::underover::layout_overbrace(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<UnderbracketElem>() {
self::underover::layout_underbracket(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OverbracketElem>() {
self::underover::layout_overbracket(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<UnderparenElem>() {
self::underover::layout_underparen(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OverparenElem>() {
self::underover::layout_overparen(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<UndershellElem>() {
self::underover::layout_undershell(elem, ctx, styles)?
} else if let Some(elem) = elem.to_packed::<OvershellElem>() {
self::underover::layout_overshell(elem, ctx, styles)?
} else {
let mut frame = layout_external(elem, ctx, styles)?;
if !frame.has_baseline() {
let axis = ctx.font().math().axis_height.resolve(styles);
frame.set_baseline(frame.height() / 2.0 + axis);
}
ctx.push(
FrameFragment::new(styles, frame)
.with_spaced(true)
.with_ignorant(elem.is::<PlaceElem>()),
);
}
Ok(())
}
/// Lays out an [`BoxElem`].
fn layout_box(
elem: &Packed<BoxElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let frame = crate::inline::layout_box(
elem,
ctx.engine,
ctx.locator.next(&elem.span()),
styles,
ctx.region.size,
)?;
ctx.push(FrameFragment::new(styles, frame).with_spaced(true));
Ok(())
}
/// Lays out an [`HElem`].
fn layout_h(
elem: &Packed<HElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
if let Spacing::Rel(rel) = elem.amount
&& rel.rel.is_zero()
{
ctx.push(MathFragment::Spacing(rel.abs.resolve(styles), elem.weak.get(styles)));
}
Ok(())
}
/// Lays out a [`ClassElem`].
#[typst_macros::time(name = "math.class", span = elem.span())]
fn layout_class(
elem: &Packed<ClassElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let style = EquationElem::class.set(Some(elem.class)).wrap();
let mut fragment = ctx.layout_into_fragment(&elem.body, styles.chain(&style))?;
fragment.set_class(elem.class);
fragment.set_limits(Limits::for_class(elem.class));
ctx.push(fragment);
Ok(())
}
/// Lays out an [`OpElem`].
#[typst_macros::time(name = "math.op", span = elem.span())]
fn layout_op(
elem: &Packed<OpElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let mut fragment = ctx.layout_into_fragment(&elem.text, styles)?;
fragment.set_class(MathClass::Large);
fragment.set_limits(if elem.limits.get(styles) {
Limits::Display
} else {
Limits::Never
});
ctx.push(fragment);
Ok(())
}
/// Layout into a frame with normal layout.
fn layout_external(
content: &Content,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<Frame> {
crate::layout_frame(
ctx.engine,
content,
ctx.locator.next(&content.span()),
styles,
ctx.region,
)
}
/// Styles to add font constants to the style chain.
fn style_for_script_scale(font: &Font) -> LazyHash<Style> {
EquationElem::script_scale
.set((
font.math().script_percent_scale_down,
font.math().script_script_percent_scale_down,
))
.wrap()
}
/// Get the current base font.
fn get_font(
world: Tracked<dyn World + '_>,
styles: StyleChain,
span: Span,
) -> SourceResult<Font> {
let variant = variant(styles);
families(styles)
.find_map(|family| {
world
.book()
.select(family.as_str(), variant)
.and_then(|id| world.font(id))
.filter(|_| family.covers().is_none())
})
.ok_or("no font could be found")
.at(span)
}
/// Check if the top-level base font has a MATH table.
fn warn_non_math_font(font: &Font, engine: &mut Engine, span: Span) {
if !font.info().flags.contains(FontFlags::MATH) {
engine.sink.warn(warning!(
span,
"current font is not designed for math";
hint: "rendering may be poor";
))
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/run.rs | crates/typst-layout/src/math/run.rs | use std::iter::once;
use typst_library::foundations::{Resolve, StyleChain};
use typst_library::layout::{Abs, AlignElem, Em, Frame, InlineItem, Point, Size};
use typst_library::math::{EquationElem, MEDIUM, MathSize, THICK, THIN};
use typst_library::model::ParElem;
use unicode_math_class::MathClass;
use super::{FrameFragment, MathFragment, alignments};
const TIGHT_LEADING: Em = Em::new(0.25);
/// A linear collection of [`MathFragment`]s.
#[derive(Debug, Default, Clone)]
pub struct MathRun(Vec<MathFragment>);
impl MathRun {
/// Takes the given [`MathFragment`]s and do some basic processing.
pub fn new(fragments: Vec<MathFragment>) -> Self {
let iter = fragments.into_iter().peekable();
let mut last: Option<usize> = None;
let mut space: Option<MathFragment> = None;
let mut resolved: Vec<MathFragment> = vec![];
for mut fragment in iter {
match fragment {
// Keep space only if supported by spaced fragments.
MathFragment::Space(_) => {
if last.is_some() {
space = Some(fragment);
}
continue;
}
// Explicit spacing disables automatic spacing.
MathFragment::Spacing(width, weak) => {
last = None;
space = None;
if weak {
match resolved.last_mut() {
None => continue,
Some(MathFragment::Spacing(prev, true)) => {
*prev = (*prev).max(width);
continue;
}
Some(_) => {}
}
}
resolved.push(fragment);
continue;
}
// Alignment points are resolved later.
MathFragment::Align => {
resolved.push(fragment);
continue;
}
// New line, new things.
MathFragment::Linebreak => {
resolved.push(fragment);
space = None;
last = None;
continue;
}
_ => {}
}
// Convert variable operators into binary operators if something
// precedes them and they are not preceded by a operator or comparator.
if fragment.class() == MathClass::Vary
&& matches!(
last.map(|i| resolved[i].class()),
Some(
MathClass::Normal
| MathClass::Alphabetic
| MathClass::Closing
| MathClass::Fence
)
)
{
fragment.set_class(MathClass::Binary);
}
// Insert spacing between the last and this non-ignorant item.
if !fragment.is_ignorant() {
if let Some(i) = last
&& let Some(s) = spacing(&resolved[i], space.take(), &fragment)
{
resolved.insert(i + 1, s);
}
last = Some(resolved.len());
}
resolved.push(fragment);
}
if let Some(MathFragment::Spacing(_, true)) = resolved.last() {
resolved.pop();
}
Self(resolved)
}
pub fn iter(&self) -> std::slice::Iter<'_, MathFragment> {
self.0.iter()
}
/// Split by linebreaks, and copy [`MathFragment`]s into rows.
pub fn rows(&self) -> Vec<Self> {
self.0
.split(|frag| matches!(frag, MathFragment::Linebreak))
.map(|slice| Self(slice.to_vec()))
.collect()
}
pub fn row_count(&self) -> usize {
let mut count =
1 + self.0.iter().filter(|f| matches!(f, MathFragment::Linebreak)).count();
// A linebreak at the very end does not introduce an extra row.
if let Some(f) = self.0.last()
&& matches!(f, MathFragment::Linebreak)
{
count -= 1
}
count
}
pub fn ascent(&self) -> Abs {
self.iter()
.filter(|e| affects_row_height(e))
.map(|e| e.ascent())
.max()
.unwrap_or_default()
}
pub fn descent(&self) -> Abs {
self.iter()
.filter(|e| affects_row_height(e))
.map(|e| e.descent())
.max()
.unwrap_or_default()
}
pub fn into_frame(self, styles: StyleChain) -> Frame {
if !self.is_multiline() {
self.into_line_frame(&[], LeftRightAlternator::Right)
} else {
self.multiline_frame_builder(styles).build()
}
}
pub fn into_fragment(self, styles: StyleChain) -> MathFragment {
if self.0.len() == 1 {
return self.0.into_iter().next().unwrap();
}
// Fragments without a math_size are ignored: the notion of size do not
// apply to them, so their text-likeness is meaningless.
let text_like = self
.iter()
.filter(|e| e.math_size().is_some())
.all(|e| e.is_text_like());
FrameFragment::new(styles, self.into_frame(styles))
.with_text_like(text_like)
.into()
}
/// Returns a builder that lays out the [`MathFragment`]s into a possibly
/// multi-row [`Frame`]. The rows are aligned using the same set of alignment
/// points computed from them as a whole.
pub fn multiline_frame_builder(self, styles: StyleChain) -> MathRunFrameBuilder {
let rows: Vec<_> = self.rows();
let row_count = rows.len();
let alignments = alignments(&rows);
let leading = if styles.get(EquationElem::size) >= MathSize::Text {
styles.resolve(ParElem::leading)
} else {
TIGHT_LEADING.resolve(styles)
};
let align = styles.resolve(AlignElem::alignment).x;
let mut frames: Vec<(Frame, Point)> = vec![];
let mut size = Size::zero();
for (i, row) in rows.into_iter().enumerate() {
if i == row_count - 1 && row.0.is_empty() {
continue;
}
let sub = row.into_line_frame(&alignments.points, LeftRightAlternator::Right);
if i > 0 {
size.y += leading;
}
let mut pos = Point::with_y(size.y);
if alignments.points.is_empty() {
pos.x = align.position(alignments.width - sub.width());
}
size.x.set_max(sub.width());
size.y += sub.height();
frames.push((sub, pos));
}
MathRunFrameBuilder { size, frames }
}
/// Lay out [`MathFragment`]s into a one-row [`Frame`], using the
/// caller-provided alignment points.
pub fn into_line_frame(
self,
points: &[Abs],
mut alternator: LeftRightAlternator,
) -> Frame {
let ascent = self.ascent();
let mut frame = Frame::soft(Size::new(Abs::zero(), ascent + self.descent()));
frame.set_baseline(ascent);
let mut next_x = {
let widths: Vec<Abs> = if points.is_empty() {
vec![]
} else {
self.iter()
.as_slice()
.split(|e| matches!(e, MathFragment::Align))
.map(|chunk| chunk.iter().map(|e| e.width()).sum())
.collect()
};
let mut prev_points = once(Abs::zero()).chain(points.iter().copied());
let mut point_widths = points.iter().copied().zip(widths);
move || {
point_widths
.next()
.zip(prev_points.next())
.zip(alternator.next())
.map(|(((point, width), prev_point), alternator)| match alternator {
LeftRightAlternator::Right => point - width,
_ => prev_point,
})
}
};
let mut x = next_x().unwrap_or_default();
for fragment in self.0.into_iter() {
if matches!(fragment, MathFragment::Align) {
x = next_x().unwrap_or(x);
continue;
}
let y = ascent - fragment.ascent();
let pos = Point::new(x, y);
x += fragment.width();
frame.push_frame(pos, fragment.into_frame());
}
frame.size_mut().x = x;
frame
}
/// Convert this run of math fragments into a vector of inline items for
/// paragraph layout. Creates multiple fragments when relation or binary
/// operators are present to allow for line-breaking opportunities later.
pub fn into_par_items(self) -> Vec<InlineItem> {
let mut items = vec![];
let mut x = Abs::zero();
let mut ascent = Abs::zero();
let mut descent = Abs::zero();
let mut frame = Frame::soft(Size::zero());
let mut empty = true;
let finalize_frame = |frame: &mut Frame, x, ascent, descent| {
frame.set_size(Size::new(x, ascent + descent));
frame.set_baseline(Abs::zero());
frame.translate(Point::with_y(ascent));
};
let mut space_is_visible = false;
let is_space = |f: &MathFragment| {
matches!(f, MathFragment::Space(_) | MathFragment::Spacing(_, _))
};
let is_line_break_opportunity = |class, next_fragment| match class {
// Don't split when two relations are in a row or when preceding a
// closing parenthesis.
MathClass::Binary => next_fragment != Some(MathClass::Closing),
MathClass::Relation => {
!matches!(next_fragment, Some(MathClass::Relation | MathClass::Closing))
}
_ => false,
};
let mut iter = self.0.into_iter().peekable();
while let Some(fragment) = iter.next() {
if space_is_visible && is_space(&fragment) {
items.push(InlineItem::Space(fragment.width(), true));
continue;
}
let class = fragment.class();
let y = fragment.ascent();
ascent.set_max(y);
descent.set_max(fragment.descent());
let pos = Point::new(x, -y);
x += fragment.width();
frame.push_frame(pos, fragment.into_frame());
empty = false;
// Split our current frame when we encounter a binary operator or
// relation so that there is a line-breaking opportunity.
if is_line_break_opportunity(class, iter.peek().map(|f| f.class())) {
let mut frame_prev =
std::mem::replace(&mut frame, Frame::soft(Size::zero()));
finalize_frame(&mut frame_prev, x, ascent, descent);
items.push(InlineItem::Frame(frame_prev));
empty = true;
x = Abs::zero();
ascent = Abs::zero();
descent = Abs::zero();
space_is_visible = true;
if let Some(f_next) = iter.peek()
&& !is_space(f_next)
{
items.push(InlineItem::Space(Abs::zero(), true));
}
} else {
space_is_visible = false;
}
}
// Don't use `frame.is_empty()` because even an empty frame can
// contribute width (if it had hidden content).
if !empty {
finalize_frame(&mut frame, x, ascent, descent);
items.push(InlineItem::Frame(frame));
}
items
}
pub fn is_multiline(&self) -> bool {
self.iter().any(|frag| matches!(frag, MathFragment::Linebreak))
}
}
impl<T: Into<MathFragment>> From<T> for MathRun {
fn from(fragment: T) -> Self {
Self(vec![fragment.into()])
}
}
/// An iterator that alternates between the `Left` and `Right` values, if the
/// initial value is not `None`.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum LeftRightAlternator {
None,
Left,
Right,
}
impl Iterator for LeftRightAlternator {
type Item = LeftRightAlternator;
fn next(&mut self) -> Option<Self::Item> {
let r = Some(*self);
match self {
Self::None => {}
Self::Left => *self = Self::Right,
Self::Right => *self = Self::Left,
}
r
}
}
/// How the rows from the [`MathRun`] should be aligned and merged into a [`Frame`].
pub struct MathRunFrameBuilder {
/// The size of the resulting frame.
pub size: Size,
/// Each row's frame, and the position where the frame should
/// be pushed into the resulting frame.
pub frames: Vec<(Frame, Point)>,
}
impl MathRunFrameBuilder {
/// Consumes the builder and returns a [`Frame`].
pub fn build(self) -> Frame {
let mut frame = Frame::soft(self.size);
for (sub, pos) in self.frames.into_iter() {
frame.push_frame(pos, sub);
}
frame
}
}
fn affects_row_height(fragment: &MathFragment) -> bool {
!matches!(
fragment,
MathFragment::Align | MathFragment::Linebreak | MathFragment::Tag(_)
)
}
/// Create the spacing between two fragments in a given style.
fn spacing(
l: &MathFragment,
space: Option<MathFragment>,
r: &MathFragment,
) -> Option<MathFragment> {
use MathClass::*;
let resolve = |v: Em, size_ref: &MathFragment| -> Option<MathFragment> {
let width = size_ref.font_size().map_or(Abs::zero(), |size| v.at(size));
Some(MathFragment::Spacing(width, false))
};
let script = |f: &MathFragment| f.math_size().is_some_and(|s| s <= MathSize::Script);
match (l.class(), r.class()) {
// No spacing before punctuation; thin spacing after punctuation, unless
// in script size.
(_, Punctuation) => None,
(Punctuation, _) if !script(l) => resolve(THIN, l),
// No spacing after opening delimiters and before closing delimiters.
(Opening, _) | (_, Closing) => None,
// Thick spacing around relations, unless followed by a another relation
// or in script size.
(Relation, Relation) => None,
(Relation, _) if !script(l) => resolve(THICK, l),
(_, Relation) if !script(r) => resolve(THICK, r),
// Medium spacing around binary operators, unless in script size.
(Binary, _) if !script(l) => resolve(MEDIUM, l),
(_, Binary) if !script(r) => resolve(MEDIUM, r),
// Thin spacing around large operators, unless to the left of
// an opening delimiter. TeXBook, p170
(Large, Opening | Fence) => None,
(Large, _) => resolve(THIN, l),
(_, Large) => resolve(THIN, r),
// Spacing around spaced frames.
_ if (l.is_spaced() || r.is_spaced()) => space,
_ => None,
}
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/shared.rs | crates/typst-layout/src/math/shared.rs | use ttf_parser::Tag;
use typst_library::foundations::{Style, StyleChain};
use typst_library::layout::{Abs, Em};
use typst_library::math::{EquationElem, MathSize};
use typst_library::text::{FontFamily, FontFeatures, TextElem};
use typst_utils::{LazyHash, singleton};
use super::{MathFragment, MathRun};
/// How much less high scaled delimiters can be than what they wrap.
pub const DELIM_SHORT_FALL: Em = Em::new(0.1);
/// Styles something as cramped.
pub fn style_cramped() -> LazyHash<Style> {
EquationElem::cramped.set(true).wrap()
}
/// Sets flac OpenType feature.
pub fn style_flac() -> LazyHash<Style> {
TextElem::features
.set(FontFeatures(vec![(Tag::from_bytes(b"flac"), 1)]))
.wrap()
}
/// Sets dtls OpenType feature.
pub fn style_dtls() -> LazyHash<Style> {
TextElem::features
.set(FontFeatures(vec![(Tag::from_bytes(b"dtls"), 1)]))
.wrap()
}
/// The style for subscripts in the current style.
pub fn style_for_subscript(styles: StyleChain) -> [LazyHash<Style>; 2] {
[style_for_superscript(styles), EquationElem::cramped.set(true).wrap()]
}
/// The style for superscripts in the current style.
pub fn style_for_superscript(styles: StyleChain) -> LazyHash<Style> {
EquationElem::size
.set(match styles.get(EquationElem::size) {
MathSize::Display | MathSize::Text => MathSize::Script,
MathSize::Script | MathSize::ScriptScript => MathSize::ScriptScript,
})
.wrap()
}
/// The style for numerators in the current style.
pub fn style_for_numerator(styles: StyleChain) -> LazyHash<Style> {
EquationElem::size
.set(match styles.get(EquationElem::size) {
MathSize::Display => MathSize::Text,
MathSize::Text => MathSize::Script,
MathSize::Script | MathSize::ScriptScript => MathSize::ScriptScript,
})
.wrap()
}
/// The style for denominators in the current style.
pub fn style_for_denominator(styles: StyleChain) -> [LazyHash<Style>; 2] {
[style_for_numerator(styles), EquationElem::cramped.set(true).wrap()]
}
/// Resolve a prioritized iterator over the font families for math.
pub fn families(styles: StyleChain<'_>) -> impl Iterator<Item = &'_ FontFamily> + Clone {
let fallbacks = singleton!(Vec<FontFamily>, {
[
"new computer modern math",
"libertinus serif",
"twitter color emoji",
"noto color emoji",
"apple color emoji",
"segoe ui emoji",
]
.into_iter()
.map(FontFamily::new)
.collect()
});
let tail = if styles.get(TextElem::fallback) { fallbacks.as_slice() } else { &[] };
styles.get_ref(TextElem::font).into_iter().chain(tail.iter())
}
/// Determine the positions of the alignment points, according to the input rows combined.
pub fn alignments(rows: &[MathRun]) -> AlignmentResult {
let mut widths = Vec::<Abs>::new();
let mut pending_width = Abs::zero();
for row in rows {
let mut width = Abs::zero();
let mut alignment_index = 0;
for fragment in row.iter() {
if matches!(fragment, MathFragment::Align) {
if alignment_index < widths.len() {
widths[alignment_index].set_max(width);
} else {
widths.push(width.max(pending_width));
}
width = Abs::zero();
alignment_index += 1;
} else {
width += fragment.width();
}
}
if widths.is_empty() {
pending_width.set_max(width);
} else if alignment_index < widths.len() {
widths[alignment_index].set_max(width);
} else {
widths.push(width.max(pending_width));
}
}
let mut points = widths;
for i in 1..points.len() {
let prev = points[i - 1];
points[i] += prev;
}
AlignmentResult {
width: points.last().copied().unwrap_or(pending_width),
points,
}
}
pub struct AlignmentResult {
pub points: Vec<Abs>,
pub width: Abs,
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/attach.rs | crates/typst-layout/src/math/attach.rs | // Can be re-enabled once `Option::map_or_default` is stable in our MSRV.
#![allow(unstable_name_collisions)]
// Is unused in compiler versions where `Option::map_or_default` is stable.
#[allow(unused_imports)]
use typst_utils::OptionExt;
use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, StyleChain, SymbolElem};
use typst_library::layout::{Abs, Axis, Corner, Frame, Point, Rel, Size};
use typst_library::math::{
AttachElem, EquationElem, LimitsElem, PrimesElem, ScriptsElem, StretchElem,
};
use typst_library::text::Font;
use super::{
FrameFragment, Limits, MathContext, MathFragment, stretch_fragment,
style_for_subscript, style_for_superscript,
};
macro_rules! measure {
($e: ident, $attr: ident) => {
$e.as_ref().map(|e| e.$attr()).unwrap_or_default()
};
}
/// Lays out an [`AttachElem`].
#[typst_macros::time(name = "math.attach", span = elem.span())]
pub fn layout_attach(
elem: &Packed<AttachElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let merged = elem.merge_base();
let elem = merged.as_ref().unwrap_or(elem);
let stretch = stretch_size(styles, elem);
let mut base = ctx.layout_into_fragment(&elem.base, styles)?;
let sup_style = style_for_superscript(styles);
let sup_style_chain = styles.chain(&sup_style);
let tl = elem.tl.get_cloned(sup_style_chain);
let tr = elem.tr.get_cloned(sup_style_chain);
let primed = tr.as_ref().is_some_and(|content| content.is::<PrimesElem>());
let t = elem.t.get_cloned(sup_style_chain);
let sub_style = style_for_subscript(styles);
let sub_style_chain = styles.chain(&sub_style);
let bl = elem.bl.get_cloned(sub_style_chain);
let br = elem.br.get_cloned(sub_style_chain);
let b = elem.b.get_cloned(sub_style_chain);
let limits = base.limits().active(styles);
let (t, tr) = match (t, tr) {
(Some(t), Some(tr)) if primed && !limits => (None, Some(tr + t)),
(Some(t), None) if !limits => (None, Some(t)),
(t, tr) => (t, tr),
};
let (b, br) = if limits || br.is_some() { (b, br) } else { (None, b) };
macro_rules! layout {
($content:ident, $style_chain:ident) => {
$content
.map(|elem| ctx.layout_into_fragment(&elem, $style_chain))
.transpose()
};
}
// Layout the top and bottom attachments early so we can measure their
// widths, in order to calculate what the stretch size is relative to.
let t = layout!(t, sup_style_chain)?;
let b = layout!(b, sub_style_chain)?;
if let Some(stretch) = stretch {
let relative_to_width = measure!(t, width).max(measure!(b, width));
stretch_fragment(
ctx,
&mut base,
Some(Axis::X),
Some(relative_to_width),
stretch,
Abs::zero(),
);
}
let fragments = [
layout!(tl, sup_style_chain)?,
t,
layout!(tr, sup_style_chain)?,
layout!(bl, sub_style_chain)?,
b,
layout!(br, sub_style_chain)?,
];
layout_attachments(ctx, styles, base, fragments)
}
/// Lays out a [`PrimeElem`].
#[typst_macros::time(name = "math.primes", span = elem.span())]
pub fn layout_primes(
elem: &Packed<PrimesElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
match elem.count {
count @ 1..=4 => {
let c = match count {
1 => '′',
2 => '″',
3 => '‴',
4 => '⁗',
_ => unreachable!(),
};
let f = ctx.layout_into_fragment(
&SymbolElem::packed(c).spanned(elem.span()),
styles,
)?;
ctx.push(f);
}
count => {
// Custom amount of primes
let prime = ctx
.layout_into_fragment(
&SymbolElem::packed('′').spanned(elem.span()),
styles,
)?
.into_frame();
let width = prime.width() * (count + 1) as f64 / 2.0;
let mut frame = Frame::soft(Size::new(width, prime.height()));
frame.set_baseline(prime.ascent());
for i in 0..count {
frame.push_frame(
Point::new(prime.width() * (i as f64 / 2.0), Abs::zero()),
prime.clone(),
)
}
ctx.push(FrameFragment::new(styles, frame).with_text_like(true));
}
}
Ok(())
}
/// Lays out a [`ScriptsElem`].
#[typst_macros::time(name = "math.scripts", span = elem.span())]
pub fn layout_scripts(
elem: &Packed<ScriptsElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
fragment.set_limits(Limits::Never);
ctx.push(fragment);
Ok(())
}
/// Lays out a [`LimitsElem`].
#[typst_macros::time(name = "math.limits", span = elem.span())]
pub fn layout_limits(
elem: &Packed<LimitsElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let limits = if elem.inline.get(styles) { Limits::Always } else { Limits::Display };
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
fragment.set_limits(limits);
ctx.push(fragment);
Ok(())
}
/// Get the size to stretch the base to.
fn stretch_size(styles: StyleChain, elem: &Packed<AttachElem>) -> Option<Rel<Abs>> {
// Extract from an EquationElem.
let mut base = &elem.base;
while let Some(equation) = base.to_packed::<EquationElem>() {
base = &equation.body;
}
base.to_packed::<StretchElem>()
.map(|stretch| stretch.size.resolve(styles))
}
/// Lay out the attachments.
pub fn layout_attachments(
ctx: &mut MathContext,
styles: StyleChain,
base: MathFragment,
[tl, t, tr, bl, b, br]: [Option<MathFragment>; 6],
) -> SourceResult<()> {
let class = base.class();
let (font, size) = base.font(ctx, styles);
let cramped = styles.get(EquationElem::cramped);
// Calculate the distance from the base's baseline to the superscripts' and
// subscripts' baseline.
let (tx_shift, bx_shift) = if [&tl, &tr, &bl, &br].iter().all(|e| e.is_none()) {
(Abs::zero(), Abs::zero())
} else {
compute_script_shifts(&font, size, cramped, &base, [&tl, &tr, &bl, &br])
};
// Calculate the distance from the base's baseline to the top attachment's
// and bottom attachment's baseline.
let (t_shift, b_shift) =
compute_limit_shifts(&font, size, &base, [t.as_ref(), b.as_ref()]);
// Calculate the final frame height.
let ascent = base
.ascent()
.max(tx_shift + measure!(tr, ascent))
.max(tx_shift + measure!(tl, ascent))
.max(t_shift + measure!(t, ascent));
let descent = base
.descent()
.max(bx_shift + measure!(br, descent))
.max(bx_shift + measure!(bl, descent))
.max(b_shift + measure!(b, descent));
let height = ascent + descent;
// Calculate the vertical position of each element in the final frame.
let base_y = ascent - base.ascent();
let tx_y = |tx: &MathFragment| ascent - tx_shift - tx.ascent();
let bx_y = |bx: &MathFragment| ascent + bx_shift - bx.ascent();
let t_y = |t: &MathFragment| ascent - t_shift - t.ascent();
let b_y = |b: &MathFragment| ascent + b_shift - b.ascent();
// Calculate the distance each limit extends to the left and right of the
// base's width.
let ((t_pre_width, t_post_width), (b_pre_width, b_post_width)) =
compute_limit_widths(&base, [t.as_ref(), b.as_ref()]);
// `space_after_script` is extra spacing that is at the start before each
// pre-script, and at the end after each post-script (see the MathConstants
// table in the OpenType MATH spec).
let space_after_script = font.math().space_after_script.at(size);
// Calculate the distance each pre-script extends to the left of the base's
// width.
let (tl_pre_width, bl_pre_width) = compute_pre_script_widths(
&base,
[tl.as_ref(), bl.as_ref()],
(tx_shift, bx_shift),
space_after_script,
);
// Calculate the distance each post-script extends to the right of the
// base's width. Also calculate each post-script's kerning (we need this for
// its position later).
let ((tr_post_width, tr_kern), (br_post_width, br_kern)) = compute_post_script_widths(
&base,
[tr.as_ref(), br.as_ref()],
(tx_shift, bx_shift),
space_after_script,
);
// Calculate the final frame width.
let pre_width = t_pre_width.max(b_pre_width).max(tl_pre_width).max(bl_pre_width);
let base_width = base.width();
let post_width = t_post_width.max(b_post_width).max(tr_post_width).max(br_post_width);
let width = pre_width + base_width + post_width;
// Calculate the horizontal position of each element in the final frame.
let base_x = pre_width;
let tl_x = pre_width - tl_pre_width + space_after_script;
let bl_x = pre_width - bl_pre_width + space_after_script;
let tr_x = pre_width + base_width + tr_kern;
let br_x = pre_width + base_width + br_kern;
let t_x = pre_width - t_pre_width;
let b_x = pre_width - b_pre_width;
// Create the final frame.
let mut frame = Frame::soft(Size::new(width, height));
frame.set_baseline(ascent);
frame.push_frame(Point::new(base_x, base_y), base.into_frame());
macro_rules! layout {
($e: ident, $x: ident, $y: ident) => {
if let Some($e) = $e {
frame.push_frame(Point::new($x, $y(&$e)), $e.into_frame());
}
};
}
layout!(tl, tl_x, tx_y); // pre-superscript
layout!(bl, bl_x, bx_y); // pre-subscript
layout!(tr, tr_x, tx_y); // post-superscript
layout!(br, br_x, bx_y); // post-subscript
layout!(t, t_x, t_y); // upper-limit
layout!(b, b_x, b_y); // lower-limit
// Done! Note that we retain the class of the base.
ctx.push(FrameFragment::new(styles, frame).with_class(class));
Ok(())
}
/// Calculate the distance each post-script extends to the right of the base's
/// width, as well as its kerning value. Requires the distance from the base's
/// baseline to each post-script's baseline to obtain the correct kerning value.
/// Returns 2 tuples of two lengths, each first containing the distance the
/// post-script extends left of the base's width and second containing the
/// post-script's kerning value. The first tuple is for the post-superscript,
/// and the second is for the post-subscript.
fn compute_post_script_widths(
base: &MathFragment,
[tr, br]: [Option<&MathFragment>; 2],
(tr_shift, br_shift): (Abs, Abs),
space_after_post_script: Abs,
) -> ((Abs, Abs), (Abs, Abs)) {
let tr_values = tr.map_or_default(|tr| {
let kern = math_kern(base, tr, tr_shift, Corner::TopRight);
(space_after_post_script + tr.width() + kern, kern)
});
// The base's bounding box already accounts for its italic correction, so we
// need to shift the post-subscript left by the base's italic correction
// (see the kerning algorithm as described in the OpenType MATH spec).
let br_values = br.map_or_default(|br| {
let kern = math_kern(base, br, br_shift, Corner::BottomRight)
- base.italics_correction();
(space_after_post_script + br.width() + kern, kern)
});
(tr_values, br_values)
}
/// Calculate the distance each pre-script extends to the left of the base's
/// width. Requires the distance from the base's baseline to each pre-script's
/// baseline to obtain the correct kerning value.
/// Returns two lengths, the first being the distance the pre-superscript
/// extends left of the base's width and the second being the distance the
/// pre-subscript extends left of the base's width.
fn compute_pre_script_widths(
base: &MathFragment,
[tl, bl]: [Option<&MathFragment>; 2],
(tl_shift, bl_shift): (Abs, Abs),
space_before_pre_script: Abs,
) -> (Abs, Abs) {
let tl_pre_width = tl.map_or_default(|tl| {
let kern = math_kern(base, tl, tl_shift, Corner::TopLeft);
space_before_pre_script + tl.width() + kern
});
let bl_pre_width = bl.map_or_default(|bl| {
let kern = math_kern(base, bl, bl_shift, Corner::BottomLeft);
space_before_pre_script + bl.width() + kern
});
(tl_pre_width, bl_pre_width)
}
/// Calculate the distance each limit extends beyond the base's width, in each
/// direction. Can be a negative value if the limit does not extend beyond the
/// base's width, indicating how far into the base's width the limit extends.
/// Returns 2 tuples of two lengths, each first containing the distance the
/// limit extends leftward beyond the base's width and second containing the
/// distance the limit extends rightward beyond the base's width. The first
/// tuple is for the upper-limit, and the second is for the lower-limit.
fn compute_limit_widths(
base: &MathFragment,
[t, b]: [Option<&MathFragment>; 2],
) -> ((Abs, Abs), (Abs, Abs)) {
// The upper- (lower-) limit is shifted to the right (left) of the base's
// center by half the base's italic correction.
let delta = base.italics_correction() / 2.0;
let t_widths = t.map_or_default(|t| {
let half = (t.width() - base.width()) / 2.0;
(half - delta, half + delta)
});
let b_widths = b.map_or_default(|b| {
let half = (b.width() - base.width()) / 2.0;
(half + delta, half - delta)
});
(t_widths, b_widths)
}
/// Calculate the distance from the base's baseline to each limit's baseline.
/// Returns two lengths, the first being the distance to the upper-limit's
/// baseline and the second being the distance to the lower-limit's baseline.
fn compute_limit_shifts(
font: &Font,
font_size: Abs,
base: &MathFragment,
[t, b]: [Option<&MathFragment>; 2],
) -> (Abs, Abs) {
// `upper_gap_min` and `lower_gap_min` give gaps to the descender and
// ascender of the limits respectively, whereas `upper_rise_min` and
// `lower_drop_min` give gaps to each limit's baseline (see the
// MathConstants table in the OpenType MATH spec).
let t_shift = t.map_or_default(|t| {
let upper_gap_min = font.math().upper_limit_gap_min.at(font_size);
let upper_rise_min = font.math().upper_limit_baseline_rise_min.at(font_size);
base.ascent() + upper_rise_min.max(upper_gap_min + t.descent())
});
let b_shift = b.map_or_default(|b| {
let lower_gap_min = font.math().lower_limit_gap_min.at(font_size);
let lower_drop_min = font.math().lower_limit_baseline_drop_min.at(font_size);
base.descent() + lower_drop_min.max(lower_gap_min + b.ascent())
});
(t_shift, b_shift)
}
/// Calculate the distance from the base's baseline to each script's baseline.
/// Returns two lengths, the first being the distance to the superscripts'
/// baseline and the second being the distance to the subscripts' baseline.
fn compute_script_shifts(
font: &Font,
font_size: Abs,
cramped: bool,
base: &MathFragment,
[tl, tr, bl, br]: [&Option<MathFragment>; 4],
) -> (Abs, Abs) {
let sup_shift_up = (if cramped {
font.math().superscript_shift_up_cramped
} else {
font.math().superscript_shift_up
})
.at(font_size);
let sup_bottom_min = font.math().superscript_bottom_min.at(font_size);
let sup_bottom_max_with_sub =
font.math().superscript_bottom_max_with_subscript.at(font_size);
let sup_drop_max = font.math().superscript_baseline_drop_max.at(font_size);
let gap_min = font.math().sub_superscript_gap_min.at(font_size);
let sub_shift_down = font.math().subscript_shift_down.at(font_size);
let sub_top_max = font.math().subscript_top_max.at(font_size);
let sub_drop_min = font.math().subscript_baseline_drop_min.at(font_size);
let mut shift_up = Abs::zero();
let mut shift_down = Abs::zero();
let is_text_like = base.is_text_like();
if tl.is_some() || tr.is_some() {
let ascent = match &base {
MathFragment::Frame(frame) => frame.base_ascent,
_ => base.ascent(),
};
shift_up = shift_up
.max(sup_shift_up)
.max(if is_text_like { Abs::zero() } else { ascent - sup_drop_max })
.max(sup_bottom_min + measure!(tl, descent))
.max(sup_bottom_min + measure!(tr, descent));
}
if bl.is_some() || br.is_some() {
let descent = match &base {
MathFragment::Frame(frame) => frame.base_descent,
_ => base.descent(),
};
shift_down = shift_down
.max(sub_shift_down)
.max(if is_text_like { Abs::zero() } else { descent + sub_drop_min })
.max(measure!(bl, ascent) - sub_top_max)
.max(measure!(br, ascent) - sub_top_max);
}
for (sup, sub) in [(tl, bl), (tr, br)] {
if let (Some(sup), Some(sub)) = (&sup, &sub) {
let sup_bottom = shift_up - sup.descent();
let sub_top = sub.ascent() - shift_down;
let gap = sup_bottom - sub_top;
if gap >= gap_min {
continue;
}
let increase = gap_min - gap;
let sup_only =
(sup_bottom_max_with_sub - sup_bottom).clamp(Abs::zero(), increase);
let rest = (increase - sup_only) / 2.0;
shift_up += sup_only + rest;
shift_down += rest;
}
}
(shift_up, shift_down)
}
/// Calculate the kerning value for a script with respect to the base. A
/// positive value means shifting the script further away from the base, whereas
/// a negative value means shifting the script closer to the base. Requires the
/// distance from the base's baseline to the script's baseline, as well as the
/// script's corner (tl, tr, bl, br).
fn math_kern(base: &MathFragment, script: &MathFragment, shift: Abs, pos: Corner) -> Abs {
// This process is described under the MathKernInfo table in the OpenType
// MATH spec.
let (corr_height_top, corr_height_bot) = match pos {
// Calculate two correction heights for superscripts:
// - The distance from the superscript's baseline to the top of the
// base's bounding box.
// - The distance from the base's baseline to the bottom of the
// superscript's bounding box.
Corner::TopLeft | Corner::TopRight => {
(base.ascent() - shift, shift - script.descent())
}
// Calculate two correction heights for subscripts:
// - The distance from the base's baseline to the top of the
// subscript's bounding box.
// - The distance from the subscript's baseline to the bottom of the
// base's bounding box.
Corner::BottomLeft | Corner::BottomRight => {
(script.ascent() - shift, shift - base.descent())
}
};
// Calculate the sum of kerning values for each correction height.
let summed_kern = |height| {
let base_kern = base.kern_at_height(pos, height);
let attach_kern = script.kern_at_height(pos.inv(), height);
base_kern + attach_kern
};
// Take the smaller kerning amount (and so the larger value). Note that
// there is a bug in the spec (as of 2024-08-15): it says to take the
// minimum of the two sums, but as the kerning value is usually negative it
// really means the smaller kern. The current wording of the spec could
// result in glyphs colliding.
summed_kern(corr_height_top).max(summed_kern(corr_height_bot))
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/accent.rs | crates/typst-layout/src/math/accent.rs | use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, StyleChain, SymbolElem};
use typst_library::layout::{Abs, Em, Frame, Point, Rel, Size};
use typst_library::math::{Accent, AccentElem};
use typst_syntax::Span;
use super::{
FrameFragment, MathContext, MathFragment, style_cramped, style_dtls, style_flac,
};
/// How much the accent can be shorter than the base.
const ACCENT_SHORT_FALL: Em = Em::new(0.5);
/// Lays out an [`AccentElem`].
#[typst_macros::time(name = "math.accent", span = elem.span())]
pub fn layout_accent(
elem: &Packed<AccentElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let top_accent = !elem.accent.is_bottom();
// Try to replace the base glyph with its dotless variant.
let dtls = style_dtls();
let base_styles =
if top_accent && elem.dotless.get(styles) { styles.chain(&dtls) } else { styles };
let cramped = style_cramped();
let base_styles = base_styles.chain(&cramped);
let base = ctx.layout_into_fragment(&elem.base, base_styles)?;
// Try to replace the accent glyph with its flattened variant.
let (font, size) = base.font(ctx, base_styles);
let flattened_base_height = font.math().flattened_accent_base_height.at(size);
let flac = style_flac();
let accent_styles = if top_accent && base.ascent() > flattened_base_height {
styles.chain(&flac)
} else {
styles
};
// Preserve class to preserve automatic spacing.
let base_class = base.class();
let base_attach = base.accent_attach();
let base_italics_correction = base.italics_correction();
let base_text_like = base.is_text_like();
let base_ascent = match &base {
MathFragment::Frame(frame) => frame.base_ascent,
_ => base.ascent(),
};
let base_descent = match &base {
MathFragment::Frame(frame) => frame.base_descent,
_ => base.descent(),
};
let frame = place_accent(
ctx,
base,
base_styles,
elem.accent,
accent_styles,
elem.size.resolve(styles),
ACCENT_SHORT_FALL,
true,
elem.span(),
)?;
ctx.push(
FrameFragment::new(styles, frame)
.with_class(base_class)
.with_base_ascent(base_ascent)
.with_base_descent(base_descent)
.with_italics_correction(base_italics_correction)
.with_accent_attach(base_attach)
.with_text_like(base_text_like),
);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn place_accent(
ctx: &mut MathContext,
base: MathFragment,
base_styles: StyleChain,
accent: Accent,
accent_styles: StyleChain,
stretch_width: Rel<Abs>,
short_fall: Em,
// If this is true, the final frame will derive its width solely from the
// base. If it's false, a large accent can make the final width exceed the
// base width.
force_body_width: bool,
span: Span,
) -> SourceResult<Frame> {
let top_accent = !accent.is_bottom();
let base_attach = base.accent_attach();
let (font, size) = base.font(ctx, base_styles);
let mut accent = ctx.layout_into_fragment(
&SymbolElem::packed(accent.0).spanned(span),
accent_styles,
)?;
// Forcing the accent to be at least as large as the base makes it too wide
// in many cases.
let stretch_width = stretch_width.relative_to(base.width());
let short_fall = short_fall.at(size);
accent.stretch_horizontal(ctx, stretch_width, short_fall);
let accent_attach = accent.accent_attach().0;
let accent = accent.into_frame();
// Calculate the width of the final frame.
let (width, base_x, accent_x) = {
let base_attach = if top_accent { base_attach.0 } else { base_attach.1 };
if force_body_width {
(base.width(), Abs::zero(), base_attach - accent_attach)
} else {
let pre_width = accent_attach - base_attach;
let post_width =
(accent.width() - accent_attach) - (base.width() - base_attach);
let width =
pre_width.max(Abs::zero()) + base.width() + post_width.max(Abs::zero());
if pre_width < Abs::zero() {
(width, Abs::zero(), -pre_width)
} else {
(width, pre_width, Abs::zero())
}
}
};
let (gap, accent_pos, base_pos) = if top_accent {
// Descent is negative because the accent's ink bottom is above the
// baseline. Therefore, the default gap is the accent's negated descent
// minus the accent base height. Only if the base is very small, we
// need a larger gap so that the accent doesn't move too low.
let accent_base_height = font.math().accent_base_height.at(size);
let gap = -accent.descent() - base.ascent().min(accent_base_height);
let accent_pos = Point::with_x(accent_x);
let base_pos = Point::new(base_x, accent.height() + gap);
(gap, accent_pos, base_pos)
} else {
let gap = -accent.ascent();
let accent_pos = Point::new(accent_x, base.height() + gap);
let base_pos = Point::with_x(base_x);
(gap, accent_pos, base_pos)
};
let size = Size::new(width, accent.height() + gap + base.height());
let baseline = base_pos.y + base.ascent();
let mut frame = Frame::soft(size);
frame.set_baseline(baseline);
frame.push_frame(accent_pos, accent);
frame.push_frame(base_pos, base.into_frame());
Ok(frame)
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/math/mat.rs | crates/typst-layout/src/math/mat.rs | use typst_library::diag::{SourceResult, bail, warning};
use typst_library::foundations::{Content, Packed, Resolve, StyleChain, SymbolElem};
use typst_library::layout::{
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
};
use typst_library::math::{Augment, AugmentOffsets, CasesElem, MatElem, VecElem};
use typst_library::text::TextElem;
use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
use typst_syntax::Span;
use unicode_math_class::MathClass;
use super::{
AlignmentResult, DELIM_SHORT_FALL, FrameFragment, GlyphFragment, LeftRightAlternator,
MathContext, alignments, style_for_denominator,
};
const VERTICAL_PADDING: Ratio = Ratio::new(0.1);
const DEFAULT_STROKE_THICKNESS: Em = Em::new(0.05);
/// Lays out a [`VecElem`].
#[typst_macros::time(name = "math.vec", span = elem.span())]
pub fn layout_vec(
elem: &Packed<VecElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx,
styles,
&[column],
elem.align.resolve(styles),
LeftRightAlternator::Right,
None,
Axes::with_y(elem.gap.resolve(styles)),
span,
"elements",
)?;
let delim = elem.delim.get(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
}
/// Lays out a [`CasesElem`].
#[typst_macros::time(name = "math.cases", span = elem.span())]
pub fn layout_cases(
elem: &Packed<CasesElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx,
styles,
&[column],
FixedAlignment::Start,
LeftRightAlternator::None,
None,
Axes::with_y(elem.gap.resolve(styles)),
span,
"branches",
)?;
let delim = elem.delim.get(styles);
let (open, close) = if elem.reverse.get(styles) {
(None, delim.close())
} else {
(delim.open(), None)
};
layout_delimiters(ctx, styles, frame, open, close, span)
}
/// Lays out a [`MatElem`].
#[typst_macros::time(name = "math.mat", span = elem.span())]
pub fn layout_mat(
elem: &Packed<MatElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let rows = &elem.rows;
let nrows = rows.len();
let ncols = rows.first().map_or(0, |row| row.len());
let augment = elem.augment.resolve(styles);
if let Some(aug) = &augment {
for &offset in &aug.hline.0 {
if offset > nrows as isize || offset.unsigned_abs() > nrows {
bail!(
span,
"cannot draw a horizontal line at offset {offset} \
in a matrix with {nrows} rows",
);
}
}
for &offset in &aug.vline.0 {
if offset > ncols as isize || offset.unsigned_abs() > ncols {
bail!(
span,
"cannot draw a vertical line at offset {offset} \
in a matrix with {ncols} columns",
);
}
}
}
// Transpose rows of the matrix into columns.
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
let columns: Vec<Vec<_>> = (0..ncols)
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
.collect();
let frame = layout_body(
ctx,
styles,
&columns,
elem.align.resolve(styles),
LeftRightAlternator::Right,
augment,
Axes::new(elem.column_gap.resolve(styles), elem.row_gap.resolve(styles)),
span,
"cells",
)?;
let delim = elem.delim.get(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
}
/// Layout the inner contents of a matrix, vector, or cases.
#[allow(clippy::too_many_arguments)]
fn layout_body(
ctx: &mut MathContext,
styles: StyleChain,
columns: &[Vec<&Content>],
align: FixedAlignment,
alternator: LeftRightAlternator,
augment: Option<Augment<Abs>>,
gap: Axes<Rel<Abs>>,
span: Span,
children: &str,
) -> SourceResult<Frame> {
let nrows = columns.first().map_or(0, |col| col.len());
let ncols = columns.len();
if ncols == 0 || nrows == 0 {
return Ok(Frame::soft(Size::zero()));
}
let gap = gap.zip_map(ctx.region.size, Rel::relative_to);
let half_gap = gap * 0.5;
// We provide a default stroke thickness that scales
// with font size to ensure that augmentation lines
// look correct by default at all matrix sizes.
// The line cap is also set to square because it looks more "correct".
let default_stroke_thickness = DEFAULT_STROKE_THICKNESS.resolve(styles);
let default_stroke = FixedStroke {
thickness: default_stroke_thickness,
paint: styles.get_ref(TextElem::fill).as_decoration(),
cap: LineCap::Square,
..Default::default()
};
let (mut hline, mut vline, stroke) = match augment {
Some(augment) => {
// We need to get stroke here for ownership.
let stroke = augment.stroke.unwrap_or_default().unwrap_or(default_stroke);
(augment.hline, augment.vline, stroke)
}
_ => (AugmentOffsets::default(), AugmentOffsets::default(), default_stroke),
};
// Before the full matrix body can be laid out, the
// individual cells must first be independently laid out
// so we can ensure alignment across rows and columns.
let mut cols = vec![vec![]; ncols];
// This variable stores the maximum ascent and descent for each row.
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
let denom_style = style_for_denominator(styles);
// We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal matrices are aligned with others unless they are
// way too big.
// This will never panic as a paren will never shape into nothing.
let paren =
GlyphFragment::new_char(ctx, styles.chain(&denom_style), '(', Span::detached())
.unwrap();
for (column, col) in columns.iter().zip(&mut cols) {
for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
let cell_span = cell.span();
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
// We ignore linebreaks in the cells as we can't differentiate
// alignment points for the whole body from ones for a specific
// cell, and multiline cells don't quite make sense at the moment.
if cell.is_multiline() {
ctx.engine.sink.warn(warning!(
cell_span,
"linebreaks are ignored in {}", children;
hint: "use commas instead to separate each line";
));
}
ascent.set_max(cell.ascent().max(paren.ascent()));
descent.set_max(cell.descent().max(paren.descent()));
col.push(cell);
}
}
for line in hline.0.iter_mut() {
if *line < 0 {
*line += nrows as isize;
}
}
for line in vline.0.iter_mut() {
if *line < 0 {
*line += ncols as isize;
}
}
// For each row, combine maximum ascent and descent into a row height.
// Sum the row heights, then add the total height of the gaps between rows.
let mut total_height =
heights.iter().map(|&(a, b)| a + b).sum::<Abs>() + gap.y * (nrows - 1) as f64;
if hline.0.contains(&0) {
total_height += gap.y;
}
if hline.0.contains(&(nrows as isize)) {
total_height += gap.y;
}
// Width starts at zero because it can't be calculated until later
let mut frame = Frame::soft(Size::new(Abs::zero(), total_height));
let mut x = Abs::zero();
if vline.0.contains(&0) {
frame.push(
Point::with_x(x + half_gap.x),
line_item(total_height, true, stroke.clone(), span),
);
x += gap.x;
}
for (index, col) in cols.into_iter().enumerate() {
let AlignmentResult { points, width: rcol } = alignments(&col);
let mut y = if hline.0.contains(&0) { gap.y } else { Abs::zero() };
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
let cell = cell.into_line_frame(&points, alternator);
let pos = Point::new(
if points.is_empty() {
x + align.position(rcol - cell.width())
} else {
x
},
y + ascent - cell.ascent(),
);
frame.push_frame(pos, cell);
y += ascent + descent + gap.y;
}
// Advance to the end of the column
x += rcol;
// If a vertical line should be inserted after this column
if vline.0.contains(&(index as isize + 1)) {
frame.push(
Point::with_x(x + half_gap.x),
line_item(total_height, true, stroke.clone(), span),
);
}
// Advance to the start of the next column
x += gap.x;
}
let total_width = if !(vline.0.contains(&(ncols as isize))) { x - gap.x } else { x };
// This allows the horizontal lines to be laid out
for line in hline.0 {
let offset = if line == 0 {
gap.y
} else {
(heights[0..line as usize].iter().map(|&(a, b)| a + b).sum::<Abs>()
+ gap.y * (line - 1) as f64)
+ half_gap.y
};
frame.push(
Point::with_y(offset),
line_item(total_width, false, stroke.clone(), span),
);
}
frame.size_mut().x = total_width;
Ok(frame)
}
fn line_item(length: Abs, vertical: bool, stroke: FixedStroke, span: Span) -> FrameItem {
let line_geom = if vertical {
Geometry::Line(Point::with_y(length))
} else {
Geometry::Line(Point::with_x(length))
};
FrameItem::Shape(
Shape {
geometry: line_geom,
fill: None,
fill_rule: FillRule::default(),
stroke: Some(stroke),
},
span,
)
}
/// Layout the outer wrapper around the body of a vector or matrix.
fn layout_delimiters(
ctx: &mut MathContext,
styles: StyleChain,
mut frame: Frame,
left: Option<char>,
right: Option<char>,
span: Span,
) -> SourceResult<()> {
let short_fall = DELIM_SHORT_FALL.resolve(styles);
let axis = ctx.font().math().axis_height.resolve(styles);
let height = frame.height();
let target = height + VERTICAL_PADDING.of(height);
frame.set_baseline(height / 2.0 + axis);
if let Some(left_c) = left {
let mut left =
ctx.layout_into_fragment(&SymbolElem::packed(left_c).spanned(span), styles)?;
left.stretch_vertical(ctx, target, short_fall);
left.center_on_axis();
left.set_class(MathClass::Opening);
ctx.push(left);
}
ctx.push(FrameFragment::new(styles, frame));
if let Some(right_c) = right {
let mut right =
ctx.layout_into_fragment(&SymbolElem::packed(right_c).spanned(span), styles)?;
right.stretch_vertical(ctx, target, short_fall);
right.center_on_axis();
right.set_class(MathClass::Closing);
ctx.push(right);
}
Ok(())
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
typst/typst | https://github.com/typst/typst/blob/a87f4b15ca86a0b2f98948d8f393608070ed731e/crates/typst-layout/src/pages/collect.rs | crates/typst-layout/src/pages/collect.rs | use rustc_hash::FxHashSet;
use typst_library::foundations::StyleChain;
use typst_library::introspection::{Locator, SplitLocator, Tag, TagElem};
use typst_library::layout::{PagebreakElem, Parity};
use typst_library::routines::Pair;
/// An item in page layout.
pub enum Item<'a> {
/// A page run containing content. All runs will be layouted in parallel.
Run(&'a [Pair<'a>], StyleChain<'a>, Locator<'a>),
/// Tags in between pages. These will be prepended to the first start of
/// the next page, or appended at the very end of the final page if there is
/// no next page.
Tags(&'a [Pair<'a>]),
/// An instruction to possibly add a page to bring the page number parity to
/// the desired state. Can only be done at the end, sequentially, because it
/// requires knowledge of the concrete page number.
Parity(Parity, StyleChain<'a>, Locator<'a>),
}
/// Slices up the children into logical parts, processing styles and handling
/// things like tags and weak pagebreaks.
pub fn collect<'a>(
mut children: &'a mut [Pair<'a>],
locator: &mut SplitLocator<'a>,
mut initial: StyleChain<'a>,
) -> Vec<Item<'a>> {
// The collected page-level items.
let mut items: Vec<Item<'a>> = vec![];
// When this is true, an empty page should be added to `pages` at the end.
let mut staged_empty_page = true;
// The `children` are a flat list of flow-level items and pagebreaks. This
// loops splits it up into pagebreaks and consecutive slices of
// non-pagebreaks. From these pieces, we build page items that we can then
// layout in parallel.
while let Some(&(elem, styles)) = children.first() {
if let Some(pagebreak) = elem.to_packed::<PagebreakElem>() {
// Add a blank page if we encounter a strong pagebreak and there was
// a staged empty page.
let strong = !pagebreak.weak.get(styles);
if strong && staged_empty_page {
let locator = locator.next(&elem.span());
items.push(Item::Run(&[], initial, locator));
}
// Add an instruction to adjust the page parity if requested.
if let Some(parity) = pagebreak.to.get(styles) {
let locator = locator.next(&elem.span());
items.push(Item::Parity(parity, styles, locator));
}
// The initial styles for the next page are ours unless this is a
// "boundary" pagebreak. Such a pagebreak is generated at the end of
// the scope of a page set rule to ensure a page boundary. Its
// styles correspond to the styles _before_ the page set rule, so we
// don't want to apply it to a potential empty page.
if !pagebreak.boundary.get(styles) {
initial = styles;
}
// Stage an empty page after a strong pagebreak.
staged_empty_page |= strong;
// Advance to the next child.
children = &mut children[1..];
} else {
// Find the end of the consecutive non-pagebreak run.
let end =
children.iter().take_while(|(c, _)| !c.is::<PagebreakElem>()).count();
// Migrate start tags without accompanying end tags from before a
// pagebreak to after it.
let end = migrate_unterminated_tags(children, end);
if end == 0 {
continue;
}
// Advance to the rest of the children.
let (group, rest) = children.split_at_mut(end);
children = rest;
// If all that is left now are tags, then we don't want to add a
// page just for them (since no group would have been detected in a
// tagless layout and tags should never affect the layout). For this
// reason, we remember them in a `PageItem::Tags` and later insert
// them at the _very start_ of the next page, even before the
// header.
//
// We don't do this if all that's left is end boundary pagebreaks
// and if an empty page is still staged, since then we can just
// conceptually replace that final page with us.
if group.iter().all(|(c, _)| c.is::<TagElem>())
&& !(staged_empty_page
&& children.iter().all(|&(c, s)| {
c.to_packed::<PagebreakElem>().is_some_and(|c| c.boundary.get(s))
}))
{
items.push(Item::Tags(group));
continue;
}
// Record a page run and then disregard a staged empty page because
// we have real content now.
let locator = locator.next(&elem.span());
items.push(Item::Run(group, initial, locator));
staged_empty_page = false;
}
}
// Flush a staged empty page.
if staged_empty_page {
items.push(Item::Run(&[], initial, locator.next(&())));
}
items
}
/// Migrates trailing start tags without accompanying end tags from before
/// a pagebreak to after it. Returns the position right after the last
/// non-migrated tag.
///
/// This is important because we want the positions of introspectable elements
/// that technically started before a pagebreak, but have no visible content
/// yet, to be after the pagebreak. A typical case where this happens is `show
/// heading: it => pagebreak() + it`.
fn migrate_unterminated_tags(children: &mut [Pair], mid: usize) -> usize {
// Compute the range from before the first trailing tag to after the last
// following pagebreak.
let (before, after) = children.split_at(mid);
let start = mid - before.iter().rev().take_while(|&(c, _)| c.is::<TagElem>()).count();
let end = mid + after.iter().take_while(|&(c, _)| c.is::<PagebreakElem>()).count();
// Determine the set of tag locations which we won't migrate (because they
// are terminated).
let excluded: FxHashSet<_> = children[start..mid]
.iter()
.filter_map(|(c, _)| match c.to_packed::<TagElem>()?.tag {
Tag::Start(..) => None,
Tag::End(loc, ..) => Some(loc),
})
.collect();
// A key function that partitions the area of interest into three groups:
// Excluded tags (-1) | Pagebreaks (0) | Migrated tags (1).
let key = |(c, _): &Pair| match c.to_packed::<TagElem>() {
Some(elem) => {
if excluded.contains(&elem.tag.location()) {
-1
} else {
1
}
}
None => 0,
};
// Partition the children using a *stable* sort. While it would be possible
// to write a more efficient direct algorithm for this, the sort version is
// less likely to have bugs and this is absolutely not on a hot path.
children[start..end].sort_by_key(key);
// Compute the new end index, right before the pagebreaks.
start + children[start..end].iter().take_while(|pair| key(pair) == -1).count()
}
| rust | Apache-2.0 | a87f4b15ca86a0b2f98948d8f393608070ed731e | 2026-01-04T15:31:59.400510Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.