file_path stringlengths 3 280 | file_language stringclasses 66
values | content stringlengths 1 1.04M | repo_name stringlengths 5 92 | repo_stars int64 0 154k | repo_description stringlengths 0 402 | repo_primary_language stringclasses 108
values | developer_username stringlengths 1 25 | developer_name stringlengths 0 30 | developer_company stringlengths 0 82 |
|---|---|---|---|---|---|---|---|---|---|
crates/swc_ecma_compat_es2019/src/optional_catch_binding.rs | Rust | use swc_ecma_ast::*;
use swc_ecma_utils::private_ident;
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
struct OptionalCatchBinding;
pub fn optional_catch_binding() -> impl Pass {
visit_mut_pass(OptionalCatchBinding)
}
#[swc_trace]
impl VisitMut for OptionalCatchBinding {
noop_visit_mut_type!(fail);
fn visit_mut_catch_clause(&mut self, cc: &mut CatchClause) {
cc.visit_mut_children_with(self);
if cc.param.is_some() {
return;
}
cc.param = Some(private_ident!("e").into());
}
}
#[cfg(test)]
mod tests {
use swc_common::Mark;
use swc_ecma_ast::Pass;
use swc_ecma_transforms_base::resolver;
use swc_ecma_transforms_testing::test;
use swc_ecma_visit::visit_mut_pass;
use crate::optional_catch_binding::OptionalCatchBinding;
pub fn tr() -> impl Pass {
(
resolver(Mark::new(), Mark::new(), false),
visit_mut_pass(OptionalCatchBinding),
)
}
test!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
issue_411,
"try {} catch {}"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
catch_binding_name_collision_1,
"try { throw new Error(); } catch { log(e); }"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| tr(),
catch_binding_name_collision_2,
"var e; try {} catch { log(e); }"
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2019/tests/__swc_snapshots__/src/optional_catch_binding.rs/catch_binding_name_collision_1.js | JavaScript | try {
throw new Error();
} catch (e1) {
log(e);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2019/tests/__swc_snapshots__/src/optional_catch_binding.rs/catch_binding_name_collision_2.js | JavaScript | var e;
try {} catch (e1) {
log(e);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2019/tests/__swc_snapshots__/src/optional_catch_binding.rs/issue_411.js | JavaScript | try {} catch (e) {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2020/src/export_namespace_from.rs | Rust | use swc_atoms::Atom;
use swc_ecma_ast::*;
use swc_ecma_utils::private_ident;
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut};
use swc_trace_macro::swc_trace;
pub fn export_namespace_from() -> impl Pass {
visit_mut_pass(ExportNamespaceFrom)
}
struct ExportNamespaceFrom;
#[swc_trace]
impl VisitMut for ExportNamespaceFrom {
noop_visit_mut_type!(fail);
fn visit_mut_module_items(&mut self, items: &mut Vec<ModuleItem>) {
let count = items
.iter()
.filter(|m| {
matches!(m, ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(NamedExport {
specifiers,
src: Some(..),
type_only: false,
..
})) if specifiers.iter().any(|s| s.is_namespace()))
})
.count();
if count == 0 {
return;
}
let mut stmts = Vec::<ModuleItem>::with_capacity(items.len() + count);
for item in items.drain(..) {
match item {
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(NamedExport {
span,
specifiers,
src: Some(src),
type_only: false,
with,
})) if specifiers.iter().any(|s| s.is_namespace()) => {
let mut origin_specifiers = Vec::new();
let mut import_specifiers = Vec::new();
let mut export_specifiers = Vec::new();
for s in specifiers.into_iter() {
match s {
ExportSpecifier::Namespace(ExportNamespaceSpecifier { span, name }) => {
let local_bridge =
private_ident!(format!("_{}", normalize_name(&name)));
import_specifiers.push(ImportSpecifier::Namespace(
ImportStarAsSpecifier {
span,
local: local_bridge.clone(),
},
));
export_specifiers.push(ExportSpecifier::Named(
ExportNamedSpecifier {
span,
orig: local_bridge.into(),
exported: Some(name),
is_type_only: false,
},
))
}
ExportSpecifier::Default(..) | ExportSpecifier::Named(..) => {
origin_specifiers.push(s);
}
}
}
stmts.push(
ImportDecl {
span,
specifiers: import_specifiers,
src: src.clone(),
type_only: false,
with: with.clone(),
phase: Default::default(),
}
.into(),
);
stmts.push(
NamedExport {
span,
specifiers: export_specifiers,
src: None,
type_only: false,
with: None,
}
.into(),
);
if !origin_specifiers.is_empty() {
stmts.push(
NamedExport {
span,
specifiers: origin_specifiers,
src: Some(src),
type_only: false,
with,
}
.into(),
);
}
}
_ => {
stmts.push(item);
}
}
}
*items = stmts;
}
}
fn normalize_name(module_export_name: &ModuleExportName) -> &Atom {
match module_export_name {
ModuleExportName::Ident(Ident { sym: name, .. })
| ModuleExportName::Str(Str { value: name, .. }) => name,
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2020/src/lib.rs | Rust | use serde::Deserialize;
use swc_common::Mark;
use swc_ecma_ast::Pass;
pub use self::{
export_namespace_from::export_namespace_from, nullish_coalescing::nullish_coalescing,
optional_chaining::optional_chaining,
};
mod export_namespace_from;
pub mod nullish_coalescing;
pub mod optional_chaining;
pub fn es2020(config: Config, unresolved_mark: Mark) -> impl Pass {
(
nullish_coalescing(config.nullish_coalescing),
optional_chaining(config.optional_chaining, unresolved_mark),
export_namespace_from(),
)
}
#[derive(Debug, Clone, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
#[serde(flatten)]
pub nullish_coalescing: nullish_coalescing::Config,
#[serde(flatten)]
pub optional_chaining: optional_chaining::Config,
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2020/src/nullish_coalescing.rs | Rust | use std::mem::take;
use serde::Deserialize;
use swc_common::{util::take::Take, Span, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_utils::{alias_ident_for_simple_assign_tatget, alias_if_required, StmtLike};
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
pub fn nullish_coalescing(c: Config) -> impl Pass + 'static {
visit_mut_pass(NullishCoalescing {
c,
..Default::default()
})
}
#[derive(Debug, Default)]
struct NullishCoalescing {
vars: Vec<VarDeclarator>,
c: Config,
}
#[derive(Debug, Clone, Copy, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
#[serde(default)]
pub no_document_all: bool,
}
#[swc_trace]
impl NullishCoalescing {
fn visit_mut_stmt_like<T>(&mut self, stmts: &mut Vec<T>)
where
T: VisitMutWith<Self> + StmtLike,
{
let mut buf = Vec::with_capacity(stmts.len() + 2);
for mut stmt in stmts.take() {
stmt.visit_mut_with(self);
if !self.vars.is_empty() {
buf.push(T::from(
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: take(&mut self.vars),
declare: false,
..Default::default()
}
.into(),
));
}
buf.push(stmt);
}
*stmts = buf
}
}
#[swc_trace]
impl VisitMut for NullishCoalescing {
noop_visit_mut_type!(fail);
/// Prevents #1123
fn visit_mut_block_stmt(&mut self, s: &mut BlockStmt) {
let old_vars = self.vars.take();
s.visit_mut_children_with(self);
self.vars = old_vars;
}
/// Prevents #1123
fn visit_mut_switch_case(&mut self, s: &mut SwitchCase) {
// Prevents #6328
s.test.visit_mut_with(self);
let old_vars = self.vars.take();
s.cons.visit_mut_with(self);
self.vars = old_vars;
}
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
self.visit_mut_stmt_like(n)
}
fn visit_mut_stmts(&mut self, n: &mut Vec<Stmt>) {
self.visit_mut_stmt_like(n)
}
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
match e {
Expr::Bin(BinExpr {
span,
left,
op: op!("??"),
right,
}) => {
//
let (l, aliased) = alias_if_required(left, "ref");
if aliased {
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: l.clone().into(),
init: None,
definite: false,
});
}
let var_expr = if aliased {
AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: l.clone().into(),
right: left.take(),
}
.into()
} else {
l.clone().into()
};
*e = make_cond(self.c, *span, &l, var_expr, right.take());
}
Expr::Assign(ref mut assign @ AssignExpr { op: op!("??="), .. }) => {
match &mut assign.left {
AssignTarget::Simple(SimpleAssignTarget::Ident(i)) => {
*e = AssignExpr {
span: assign.span,
op: op!("="),
left: i.clone().into(),
right: Box::new(make_cond(
self.c,
assign.span,
&Ident::from(&*i),
Expr::Ident(Ident::from(&*i)),
assign.right.take(),
)),
}
.into();
}
AssignTarget::Simple(left) => {
let alias = alias_ident_for_simple_assign_tatget(left, "refs");
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: alias.clone().into(),
init: None,
definite: false,
});
// TODO: Check for computed.
let right_expr = AssignExpr {
span: assign.span,
left: left.clone().into(),
op: op!("="),
right: assign.right.take(),
}
.into();
let var_expr = AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: alias.clone().into(),
right: left.take().into(),
}
.into();
*e = AssignExpr {
span: assign.span,
op: op!("="),
left: alias.clone().into(),
right: Box::new(make_cond(
self.c,
assign.span,
&alias,
var_expr,
right_expr,
)),
}
.into();
}
_ => {}
}
}
_ => {}
}
}
fn visit_mut_block_stmt_or_expr(&mut self, n: &mut BlockStmtOrExpr) {
let vars = self.vars.take();
n.visit_mut_children_with(self);
if !self.vars.is_empty() {
if let BlockStmtOrExpr::Expr(expr) = n {
// expr
// { var decl = init; return expr; }
let stmts = vec![
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: self.vars.take(),
declare: false,
..Default::default()
}
.into(),
Stmt::Return(ReturnStmt {
span: DUMMY_SP,
arg: Some(expr.take()),
}),
];
*n = BlockStmtOrExpr::BlockStmt(BlockStmt {
span: DUMMY_SP,
stmts,
..Default::default()
});
}
}
self.vars = vars;
}
}
#[tracing::instrument(level = "info", skip_all)]
fn make_cond(c: Config, span: Span, alias: &Ident, var_expr: Expr, init: Box<Expr>) -> Expr {
if c.no_document_all {
CondExpr {
span,
test: BinExpr {
span: DUMMY_SP,
left: Box::new(var_expr),
op: op!("!="),
right: Box::new(Expr::Lit(Lit::Null(Null { span: DUMMY_SP }))),
}
.into(),
cons: alias.clone().into(),
alt: init,
}
} else {
CondExpr {
span,
test: BinExpr {
span: DUMMY_SP,
left: Box::new(Expr::Bin(BinExpr {
span: DUMMY_SP,
left: Box::new(var_expr),
op: op!("!=="),
right: Box::new(Expr::Lit(Lit::Null(Null { span: DUMMY_SP }))),
})),
op: op!("&&"),
right: Box::new(Expr::Bin(BinExpr {
span: DUMMY_SP,
left: Box::new(Expr::Ident(alias.clone())),
op: op!("!=="),
right: Expr::undefined(DUMMY_SP),
})),
}
.into(),
cons: alias.clone().into(),
alt: init,
}
}
.into()
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2020/src/optional_chaining.rs | Rust | use serde::Deserialize;
use swc_common::Mark;
use swc_ecma_ast::Pass;
use swc_ecma_compat_es2022::optional_chaining_impl::optional_chaining_impl;
use swc_ecma_visit::visit_mut_pass;
pub fn optional_chaining(c: Config, unresolved_mark: Mark) -> impl Pass {
visit_mut_pass(optional_chaining_impl(
swc_ecma_compat_es2022::optional_chaining_impl::Config {
no_document_all: c.no_document_all,
pure_getter: c.pure_getter,
},
unresolved_mark,
))
}
#[derive(Debug, Clone, Copy, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
#[serde(default)]
pub no_document_all: bool,
#[serde(default)]
pub pure_getter: bool,
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2021/src/lib.rs | Rust | use swc_ecma_ast::Pass;
pub use self::logical_assignments::logical_assignments;
mod logical_assignments;
pub fn es2021() -> impl Pass {
logical_assignments()
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2021/src/logical_assignments.rs | Rust | use std::mem;
use swc_common::{util::take::Take, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_transforms_base::perf::Parallel;
use swc_ecma_utils::{alias_ident_for, prepend_stmt};
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
pub fn logical_assignments() -> impl Pass {
visit_mut_pass(Operators::default())
}
#[derive(Debug, Default)]
struct Operators {
vars: Vec<VarDeclarator>,
}
impl Operators {
fn memorize_prop(&mut self, c: ComputedPropName) -> (ComputedPropName, ComputedPropName) {
let alias = alias_ident_for(&c.expr, "_ref");
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: alias.clone().into(),
init: None,
definite: false,
});
(
ComputedPropName {
span: c.span,
expr: AssignExpr {
span: DUMMY_SP,
left: alias.clone().into(),
op: op!("="),
right: c.expr,
}
.into(),
},
ComputedPropName {
span: c.span,
expr: Box::new(alias.into()),
},
)
}
}
#[swc_trace]
impl Parallel for Operators {
fn create(&self) -> Self {
Default::default()
}
fn merge(&mut self, other: Self) {
self.vars.extend(other.vars);
}
}
#[swc_trace]
impl VisitMut for Operators {
noop_visit_mut_type!(fail);
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
if let Expr::Assign(AssignExpr {
span,
op: op @ (op!("&&=") | op!("||=") | op!("??=")),
left: AssignTarget::Simple(left),
right,
}) = e
{
let (left_expr, r_assign_target) = match &mut *left {
SimpleAssignTarget::SuperProp(SuperPropExpr {
span,
obj,
prop: SuperProp::Computed(c),
}) => {
let (left, right) = self.memorize_prop(c.take());
(
Box::new(
SuperPropExpr {
span: *span,
obj: *obj,
prop: SuperProp::Computed(left),
}
.into(),
),
Box::new(
SuperPropExpr {
span: *span,
obj: *obj,
prop: SuperProp::Computed(right),
}
.into(),
),
)
}
SimpleAssignTarget::Member(m) => {
let (left_obj, right_obj) = match *m.obj.take() {
// TODO: local vars
obj @ Expr::This(_) => (obj.clone().into(), obj.into()),
obj => {
let alias = alias_ident_for(&obj, "_ref");
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: alias.clone().into(),
init: None,
definite: false,
});
(
AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: alias.clone().into(),
right: obj.into(),
}
.into(),
alias.into(),
)
}
};
let (left_prop, right_prop) = match m.prop.take() {
MemberProp::Computed(c) => {
let (left, right) = self.memorize_prop(c);
(left.into(), right.into())
}
prop => (prop.clone(), prop),
};
(
MemberExpr {
span: DUMMY_SP,
obj: left_obj,
prop: left_prop,
}
.into(),
MemberExpr {
span: DUMMY_SP,
obj: right_obj,
prop: right_prop,
}
.into(),
)
}
_ => {
let expr: Box<Expr> = left.take().into();
(expr.clone(), expr)
}
};
let right = AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: r_assign_target.try_into().unwrap(),
right: right.take(),
}
.into();
let op = match *op {
op!("??=") => op!("??"),
op!("&&=") => op!("&&"),
op!("||=") => op!("||"),
_ => unreachable!(),
};
*e = BinExpr {
span: *span,
op,
left: left_expr,
right,
}
.into();
}
}
/// [swc_ecma_ast::ModuleItem] is the top level Item in the current
/// implementation of JavaScript until the proposal for
/// [module-declarations] and [module-expressions] are officially added.
///
/// [module declarations]: https://github.com/tc39/proposal-module-declarations.
/// [module-expressions]: https://github.com/tc39/proposal-module-expressions
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
let vars = self.vars.take();
n.visit_mut_children_with(self);
let vars = mem::replace(&mut self.vars, vars);
if !vars.is_empty() {
prepend_stmt(
n,
VarDecl {
kind: VarDeclKind::Var,
decls: vars,
..Default::default()
}
.into(),
)
}
}
fn visit_mut_stmts(&mut self, n: &mut Vec<Stmt>) {
let vars = self.vars.take();
n.visit_mut_children_with(self);
let vars = mem::replace(&mut self.vars, vars);
if !vars.is_empty() {
prepend_stmt(
n,
VarDecl {
kind: VarDeclKind::Var,
decls: vars,
..Default::default()
}
.into(),
)
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/class_name_tdz.rs | Rust | use swc_common::DUMMY_SP;
use swc_ecma_ast::*;
use swc_ecma_transforms_base::helper;
use swc_ecma_utils::ExprFactory;
use swc_ecma_visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
pub(super) struct ClassNameTdzFolder<'a> {
pub class_name: &'a Ident,
}
#[swc_trace]
impl VisitMut for ClassNameTdzFolder<'_> {
noop_visit_mut_type!(fail);
fn visit_mut_expr(&mut self, expr: &mut Expr) {
match expr {
Expr::Ident(i) => {
//
if i.sym == self.class_name.sym {
*expr = SeqExpr {
span: DUMMY_SP,
exprs: vec![
Box::new(Expr::Call(CallExpr {
span: DUMMY_SP,
callee: helper!(class_name_tdz_error),
args: vec![Str {
span: i.span,
raw: None,
value: i.sym.clone(),
}
.as_arg()],
..Default::default()
})),
Box::new(Expr::Ident(i.clone())),
],
}
.into();
}
}
_ => {
expr.visit_mut_children_with(self);
}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/member_init.rs | Rust | use swc_common::{Span, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_transforms_base::helper;
use swc_ecma_utils::{prop_name_to_expr, prop_name_to_expr_value, quote_ident, ExprFactory};
use swc_trace_macro::swc_trace;
use super::Config;
pub(super) enum MemberInit {
PubProp(PubProp),
PrivProp(PrivProp),
PrivMethod(PrivMethod),
PrivAccessor(PrivAccessor),
StaticBlock(Box<Expr>),
}
pub(super) struct PubProp {
pub span: Span,
pub name: PropName,
pub value: Box<Expr>,
}
pub(super) struct PrivProp {
pub span: Span,
pub name: Ident,
pub value: Box<Expr>,
}
pub(super) struct PrivMethod {
pub span: Span,
pub name: Ident,
// only used in loose mode
pub fn_name: Ident,
}
pub(super) struct PrivAccessor {
pub span: Span,
pub name: Ident,
pub getter: Option<Ident>,
pub setter: Option<Ident>,
}
pub(super) struct MemberInitRecord {
c: Config,
pub record: Vec<MemberInit>,
}
#[swc_trace]
impl MemberInitRecord {
pub fn new(c: Config) -> Self {
Self {
c,
record: Vec::new(),
}
}
pub fn push(&mut self, member: MemberInit) -> bool {
// there shouldn't be many class field, so n^2 should be fine
if let MemberInit::PrivAccessor(accessor) = member {
if let Some(MemberInit::PrivAccessor(previous)) =
self.record.iter_mut().find(|item| matches!(item, MemberInit::PrivAccessor(PrivAccessor { name, .. }) if name.sym == accessor.name.sym))
{
previous.getter = previous.getter.take().or(accessor.getter);
previous.setter = previous.setter.take().or(accessor.setter);
false
} else {
self.record.push(MemberInit::PrivAccessor(accessor));
true
}
} else {
self.record.push(member);
true
}
}
pub fn into_init(self) -> Vec<Box<Expr>> {
let mut normal_init = Vec::new();
let mut value_init = Vec::new();
for init in self.record {
match init {
MemberInit::PrivMethod(PrivMethod {
span,
name,
fn_name,
}) => {
let (callee, args) = if self.c.private_as_properties {
(
obj_def_prop(),
vec![
ThisExpr { span: DUMMY_SP }.as_arg(),
name.as_arg(),
get_method_desc(Box::new(fn_name.into())).as_arg(),
],
)
} else {
(
helper!(class_private_method_init),
vec![ThisExpr { span: DUMMY_SP }.as_arg(), name.as_arg()],
)
};
normal_init.push(
CallExpr {
span,
callee,
args,
..Default::default()
}
.into(),
)
}
MemberInit::PrivProp(PrivProp { span, name, value }) => value_init.push(
CallExpr {
span,
callee: if self.c.private_as_properties {
obj_def_prop()
} else {
helper!(class_private_field_init)
},
args: vec![
ThisExpr { span: DUMMY_SP }.as_arg(),
name.as_arg(),
get_value_desc(value).as_arg(),
],
..Default::default()
}
.into(),
),
MemberInit::PrivAccessor(PrivAccessor {
span,
name,
getter,
setter,
}) => normal_init.push(
CallExpr {
span,
callee: if self.c.private_as_properties {
obj_def_prop()
} else {
helper!(class_private_field_init)
},
args: vec![
ThisExpr { span: DUMMY_SP }.as_arg(),
name.as_arg(),
get_accessor_desc(getter, setter).as_arg(),
],
..Default::default()
}
.into(),
),
MemberInit::PubProp(PubProp { span, name, value }) => value_init.push(
if self.c.set_public_fields {
let this = ThisExpr { span: DUMMY_SP };
Expr::from(AssignExpr {
span,
left: match name {
PropName::Ident(id) => this.make_member(id).into(),
_ => this.computed_member(prop_name_to_expr(name)).into(),
},
op: op!("="),
right: value,
})
} else {
CallExpr {
span,
callee: helper!(define_property),
args: vec![
ThisExpr { span: DUMMY_SP }.as_arg(),
prop_name_to_expr_value(name).as_arg(),
value.as_arg(),
],
..Default::default()
}
.into()
}
.into(),
),
MemberInit::StaticBlock(..) => unreachable!(),
}
}
normal_init.extend(value_init);
normal_init
}
pub fn into_init_static(self, class_ident: Ident) -> Vec<Stmt> {
let mut normal_init = Vec::new();
let mut value_init = Vec::new();
for value in self.record {
match value {
MemberInit::PubProp(PubProp { span, name, value }) => value_init.push(
ExprStmt {
span,
expr: (if self.c.set_public_fields {
let class = class_ident.clone();
Expr::from(AssignExpr {
span,
left: match name {
PropName::Ident(id) => class.make_member(id).into(),
_ => class.computed_member(prop_name_to_expr(name)).into(),
},
op: op!("="),
right: value,
})
} else {
CallExpr {
span,
callee: helper!(define_property),
args: vec![
class_ident.clone().as_arg(),
prop_name_to_expr_value(name).as_arg(),
value.as_arg(),
],
..Default::default()
}
.into()
})
.into(),
}
.into(),
),
MemberInit::PrivProp(PrivProp { span, name, value }) => {
value_init.push(if self.c.private_as_properties {
ExprStmt {
span,
expr: CallExpr {
span,
callee: obj_def_prop(),
args: vec![
class_ident.clone().as_arg(),
name.as_arg(),
get_value_desc(value).as_arg(),
],
..Default::default()
}
.into(),
}
.into()
} else {
VarDecl {
span,
kind: VarDeclKind::Var,
decls: vec![VarDeclarator {
span,
name: name.into(),
init: Some(Expr::Object(get_value_desc(value)).into()),
definite: false,
}],
..Default::default()
}
.into()
})
}
MemberInit::PrivAccessor(PrivAccessor {
span,
name,
getter,
setter,
}) => normal_init.push(if self.c.private_as_properties {
ExprStmt {
span,
expr: CallExpr {
span,
callee: obj_def_prop(),
args: vec![
class_ident.clone().as_arg(),
name.as_arg(),
get_accessor_desc(getter, setter).as_arg(),
],
..Default::default()
}
.into(),
}
.into()
} else {
VarDecl {
span,
kind: VarDeclKind::Var,
decls: vec![VarDeclarator {
span,
name: name.into(),
init: Some(Expr::Object(get_accessor_desc(getter, setter)).into()),
definite: false,
}],
..Default::default()
}
.into()
}),
MemberInit::PrivMethod(PrivMethod {
span,
name,
fn_name,
}) => {
if self.c.private_as_properties {
normal_init.push(
ExprStmt {
span,
expr: CallExpr {
span,
callee: obj_def_prop(),
args: vec![
class_ident.clone().as_arg(),
name.as_arg(),
get_method_desc(Box::new(fn_name.into())).as_arg(),
],
..Default::default()
}
.into(),
}
.into(),
)
} else {
unreachable!()
}
}
MemberInit::StaticBlock(expr) => value_init.push(expr.into_stmt()),
}
}
normal_init.extend(value_init);
normal_init
}
}
fn get_value_desc(value: Box<Expr>) -> ObjectLit {
ObjectLit {
span: DUMMY_SP,
props: vec![
// writeable: true
PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(quote_ident!("writable")),
value: true.into(),
}))),
// value: value,
PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(quote_ident!("value")),
value,
}))),
],
}
}
fn get_accessor_desc(getter: Option<Ident>, setter: Option<Ident>) -> ObjectLit {
ObjectLit {
span: DUMMY_SP,
props: vec![
PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(quote_ident!("get")),
value: getter
.map(|id| Box::new(id.into()))
.unwrap_or_else(|| Expr::undefined(DUMMY_SP)),
}))),
PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(quote_ident!("set")),
value: setter
.map(|id| Box::new(id.into()))
.unwrap_or_else(|| Expr::undefined(DUMMY_SP)),
}))),
],
}
}
fn get_method_desc(value: Box<Expr>) -> ObjectLit {
ObjectLit {
span: DUMMY_SP,
props: vec![
// value: value,
PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp {
key: PropName::Ident(quote_ident!("value")),
value,
}))),
],
}
}
fn obj_def_prop() -> Callee {
quote_ident!("Object")
.make_member(quote_ident!("defineProperty"))
.as_callee()
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/mod.rs | Rust | use rustc_hash::FxHashMap;
use swc_common::{
errors::HANDLER, source_map::PURE_SP, util::take::Take, Mark, Span, Spanned, SyntaxContext,
DUMMY_SP,
};
use swc_ecma_ast::*;
use swc_ecma_transforms_base::{helper, perf::Check};
use swc_ecma_transforms_classes::super_field::SuperFieldAccessFolder;
use swc_ecma_transforms_macros::fast_path;
use swc_ecma_utils::{
alias_ident_for, alias_if_required, constructor::inject_after_super,
default_constructor_with_span, is_literal, prepend_stmt, private_ident, quote_ident,
replace_ident, ExprFactory, ModuleItemLike, StmtLike,
};
use swc_ecma_visit::{
noop_visit_mut_type, noop_visit_type, visit_mut_pass, Visit, VisitMut, VisitMutWith, VisitWith,
};
use swc_trace_macro::swc_trace;
use self::{
class_name_tdz::ClassNameTdzFolder,
member_init::{MemberInit, MemberInitRecord, PrivAccessor, PrivMethod, PrivProp, PubProp},
private_field::{
dup_private_method, visit_private_in_expr, BrandCheckHandler, Private,
PrivateAccessVisitor, PrivateKind, PrivateRecord,
},
this_in_static::{NewTargetInProp, ThisInStaticFolder},
used_name::UsedNameCollector,
};
mod class_name_tdz;
mod member_init;
mod private_field;
mod this_in_static;
mod used_name;
///
///
///
///
/// # Impl note
///
/// We use custom helper to handle export default class
pub fn class_properties(config: Config, unresolved_mark: Mark) -> impl Pass {
visit_mut_pass(ClassProperties {
c: config,
private: PrivateRecord::new(),
extra: ClassExtra::default(),
unresolved_mark,
})
}
#[derive(Debug, Default, Clone, Copy)]
pub struct Config {
pub private_as_properties: bool,
pub set_public_fields: bool,
pub constant_super: bool,
pub no_document_all: bool,
pub pure_getter: bool,
}
struct ClassProperties {
extra: ClassExtra,
c: Config,
private: PrivateRecord,
unresolved_mark: Mark,
}
#[derive(Default)]
struct ClassExtra {
lets: Vec<VarDeclarator>,
vars: Vec<VarDeclarator>,
stmts: Vec<Stmt>,
}
#[swc_trace]
impl ClassExtra {
fn prepend_with<T: StmtLike>(self, stmts: &mut Vec<T>) {
if !self.vars.is_empty() {
prepend_stmt(
stmts,
T::from(Stmt::from(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: self.vars,
..Default::default()
})),
)
}
if !self.lets.is_empty() {
prepend_stmt(
stmts,
T::from(Stmt::from(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Let,
decls: self.lets,
..Default::default()
})),
)
}
stmts.extend(self.stmts.into_iter().map(|stmt| stmt.into()))
}
fn merge_with<T: StmtLike>(self, stmts: &mut Vec<T>, class: T) {
if !self.vars.is_empty() {
stmts.push(T::from(Stmt::from(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: self.vars,
..Default::default()
})))
}
if !self.lets.is_empty() {
stmts.push(T::from(
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Let,
decls: self.lets,
..Default::default()
}
.into(),
));
}
stmts.push(class);
stmts.extend(self.stmts.into_iter().map(|stmt| stmt.into()))
}
}
impl Take for ClassExtra {
fn dummy() -> Self {
Self::default()
}
}
#[swc_trace]
#[fast_path(ShouldWork)]
impl VisitMut for ClassProperties {
noop_visit_mut_type!(fail);
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
self.visit_mut_stmt_like(n);
self.extra.take().prepend_with(n)
}
fn visit_mut_stmts(&mut self, n: &mut Vec<Stmt>) {
let old = self.extra.take();
self.visit_mut_stmt_like(n);
self.extra.take().prepend_with(n);
self.extra = old;
}
fn visit_mut_block_stmt_or_expr(&mut self, body: &mut BlockStmtOrExpr) {
match body {
BlockStmtOrExpr::Expr(expr) if expr.is_class() => {
let ClassExpr { ident, class } = expr.take().class().unwrap();
let mut stmts = Vec::new();
let ident = ident.unwrap_or_else(|| private_ident!("_class"));
let (decl, extra) = self.visit_mut_class_as_decl(ident.clone(), class);
extra.merge_with(&mut stmts, decl.into());
stmts.push(
ReturnStmt {
span: DUMMY_SP,
arg: Some(ident.into()),
}
.into(),
);
*body = BlockStmtOrExpr::BlockStmt(BlockStmt {
span: DUMMY_SP,
stmts,
..Default::default()
});
}
_ => body.visit_mut_children_with(self),
};
}
fn visit_mut_expr(&mut self, expr: &mut Expr) {
if let Expr::Class(ClassExpr {
ident: orig_ident,
class,
}) = expr
{
let ident = private_ident!(orig_ident
.clone()
.map(|id| format!("_{}", id.sym))
.unwrap_or_else(|| "_class".into()));
let (decl, ClassExtra { lets, vars, stmts }) =
self.visit_mut_class_as_decl(ident.clone(), class.take());
let class = ClassExpr {
ident: orig_ident.clone(),
class: decl.class,
}
.into();
if vars.is_empty() && lets.is_empty() && stmts.is_empty() {
*expr = class;
return;
}
let mut exprs = Vec::new();
for mut var in vars {
let init = var.init.take();
if let Some(init) = init {
exprs.push(
AssignExpr {
span: var.span,
op: op!("="),
left: var.name.clone().try_into().unwrap(),
right: init,
}
.into(),
)
}
self.extra.vars.push(var);
}
for mut var in lets {
let init = var.init.take();
if let Some(init) = init {
exprs.push(
AssignExpr {
span: var.span,
op: op!("="),
left: var.name.clone().try_into().unwrap(),
right: init,
}
.into(),
)
}
self.extra.lets.push(var);
}
let mut extra_value = false;
if !stmts.is_empty() {
extra_value = true;
self.extra.vars.push(VarDeclarator {
span: DUMMY_SP,
name: ident.clone().into(),
init: None,
definite: false,
});
exprs.push(
AssignExpr {
span: DUMMY_SP,
left: ident.clone().into(),
op: op!("="),
right: class.into(),
}
.into(),
);
} else {
exprs.push(class.into());
}
for mut stmt in stmts {
if let Some(orig_ident) = orig_ident {
replace_ident(&mut stmt, orig_ident.clone().into(), &ident);
}
match stmt {
Stmt::Expr(e) => exprs.push(e.expr),
Stmt::Decl(Decl::Var(v)) => {
for mut decl in v.decls {
let init = decl.init.take();
if let Some(init) = init {
exprs.push(
AssignExpr {
span: decl.span,
op: op!("="),
left: decl.name.clone().try_into().unwrap(),
right: init,
}
.into(),
)
}
self.extra.vars.push(decl)
}
}
_ => self.extra.stmts.push(stmt),
}
}
if extra_value {
exprs.push(Box::new(ident.into()))
}
*expr = SeqExpr {
span: DUMMY_SP,
exprs,
}
.into()
} else {
expr.visit_mut_children_with(self);
};
}
}
#[swc_trace]
impl ClassProperties {
fn visit_mut_stmt_like<T>(&mut self, stmts: &mut Vec<T>)
where
T: StmtLike + ModuleItemLike + VisitMutWith<Self> + From<Stmt>,
{
let mut buf = Vec::with_capacity(stmts.len());
for stmt in stmts.drain(..) {
match T::try_into_stmt(stmt) {
Err(node) => match node.try_into_module_decl() {
Ok(mut decl) => {
match decl {
ModuleDecl::ExportDefaultDecl(ExportDefaultDecl {
span,
decl: DefaultDecl::Class(ClassExpr { ident, class }),
..
}) => {
let ident = ident.unwrap_or_else(|| private_ident!("_class"));
let (decl, extra) =
self.visit_mut_class_as_decl(ident.clone(), class);
extra.merge_with(&mut buf, T::from(decl.into()));
buf.push(
match T::try_from_module_decl(
NamedExport {
span,
specifiers: vec![ExportNamedSpecifier {
span: DUMMY_SP,
orig: ModuleExportName::Ident(ident),
exported: Some(ModuleExportName::Ident(
private_ident!("default"),
)),
is_type_only: false,
}
.into()],
src: None,
type_only: false,
with: None,
}
.into(),
) {
Ok(t) => t,
Err(..) => unreachable!(),
},
);
}
ModuleDecl::ExportDecl(ExportDecl {
span,
decl:
Decl::Class(ClassDecl {
ident,
declare: false,
class,
}),
..
}) => {
let (decl, extra) = self.visit_mut_class_as_decl(ident, class);
extra.merge_with(
&mut buf,
match T::try_from_module_decl(
ExportDecl {
span,
decl: decl.into(),
}
.into(),
) {
Ok(t) => t,
Err(..) => unreachable!(),
},
)
}
_ => {
decl.visit_mut_children_with(self);
buf.push(match T::try_from_module_decl(decl) {
Ok(t) => t,
Err(..) => unreachable!(),
})
}
};
}
Err(..) => unreachable!(),
},
Ok(mut stmt) => {
// Fold class
match stmt {
Stmt::Decl(Decl::Class(ClassDecl {
ident,
class,
declare: false,
})) => {
let (decl, extra) = self.visit_mut_class_as_decl(ident, class);
extra.merge_with(&mut buf, T::from(decl.into()))
}
_ => {
stmt.visit_mut_children_with(self);
buf.push(T::from(stmt))
}
}
}
}
}
*stmts = buf;
}
}
#[swc_trace]
impl ClassProperties {
fn visit_mut_class_as_decl(
&mut self,
class_ident: Ident,
mut class: Box<Class>,
) -> (ClassDecl, ClassExtra) {
// Create one mark per class
let private = Private {
mark: Mark::fresh(Mark::root()),
class_name: class_ident.clone(),
ident: {
let mut private_map = FxHashMap::default();
for member in class.body.iter() {
match member {
ClassMember::PrivateMethod(method) => {
if let Some(kind) = private_map.get_mut(&method.key.name) {
if dup_private_method(kind, method) {
let error =
format!("duplicate private name #{}.", method.key.name);
HANDLER.with(|handler| {
handler.struct_span_err(method.key.span, &error).emit()
});
} else {
match method.kind {
MethodKind::Getter => kind.has_getter = true,
MethodKind::Setter => kind.has_setter = true,
MethodKind::Method => unreachable!(),
}
}
} else {
private_map.insert(
method.key.name.clone(),
PrivateKind {
is_method: true,
is_static: method.is_static,
has_getter: method.kind == MethodKind::Getter,
has_setter: method.kind == MethodKind::Setter,
},
);
}
}
ClassMember::PrivateProp(prop) => {
if private_map.contains_key(&prop.key.name) {
let error = format!("duplicate private name #{}.", prop.key.name);
HANDLER.with(|handler| {
handler.struct_span_err(prop.key.span, &error).emit()
});
} else {
private_map.insert(
prop.key.name.clone(),
PrivateKind {
is_method: false,
is_static: prop.is_static,
has_getter: false,
has_setter: false,
},
);
};
}
_ => (),
};
}
private_map
},
};
self.private.push(private);
// we must collect outer class's private first
class.visit_mut_children_with(self);
let has_super = class.super_class.is_some();
let mut constructor_inits = MemberInitRecord::new(self.c);
let mut vars = Vec::new();
let mut lets = Vec::new();
let mut extra_inits = MemberInitRecord::new(self.c);
let mut private_method_fn_decls = Vec::new();
let mut members = Vec::new();
let mut constructor = None;
let mut used_names = Vec::new();
let mut used_key_names = Vec::new();
let mut super_ident = None;
class.body.visit_mut_with(&mut BrandCheckHandler {
private: &self.private,
});
let should_create_vars_for_method_names = class.body.iter().any(|m| match m {
ClassMember::Constructor(_)
| ClassMember::PrivateMethod(_)
| ClassMember::TsIndexSignature(_)
| ClassMember::Empty(_) => false,
ClassMember::Method(m) => contains_super(&m.key),
ClassMember::ClassProp(_)
| ClassMember::AutoAccessor(_)
| ClassMember::PrivateProp(_)
| ClassMember::StaticBlock(_) => true,
});
for member in class.body {
match member {
ClassMember::Empty(..) | ClassMember::TsIndexSignature(..) => members.push(member),
ClassMember::Method(method) => {
// we handle computed key here to preserve the execution order
let key = match method.key {
PropName::Computed(ComputedPropName {
span: c_span,
mut expr,
}) if should_create_vars_for_method_names && !is_literal(&*expr) => {
vars.extend(visit_private_in_expr(
&mut expr,
&self.private,
self.c,
self.unresolved_mark,
));
expr.visit_mut_with(&mut ClassNameTdzFolder {
class_name: &class_ident,
});
let ident = alias_ident_for(&expr, "tmp");
// Handle computed property
lets.push(VarDeclarator {
span: DUMMY_SP,
name: ident.clone().into(),
init: Some(expr),
definite: false,
});
// We use computed because `classes` pass converts PropName::Ident to
// string.
PropName::Computed(ComputedPropName {
span: c_span,
expr: ident.into(),
})
}
_ => method.key,
};
members.push(ClassMember::Method(ClassMethod { key, ..method }))
}
ClassMember::ClassProp(mut prop) => {
let prop_span = prop.span();
prop.key.visit_mut_with(&mut ClassNameTdzFolder {
class_name: &class_ident,
});
if !prop.is_static {
prop.key.visit_with(&mut UsedNameCollector {
used_names: &mut used_key_names,
});
prop.value.visit_with(&mut UsedNameCollector {
used_names: &mut used_names,
});
}
match &mut prop.key {
PropName::Computed(key) if !is_literal(&key.expr) => {
vars.extend(visit_private_in_expr(
&mut key.expr,
&self.private,
self.c,
self.unresolved_mark,
));
let (ident, aliased) = if let Expr::Ident(i) = &*key.expr {
if used_key_names.contains(&i.sym) {
(alias_ident_for(&key.expr, "_ref"), true)
} else {
alias_if_required(&key.expr, "_ref")
}
} else {
alias_if_required(&key.expr, "_ref")
};
if aliased {
// Handle computed property
lets.push(VarDeclarator {
span: DUMMY_SP,
name: ident.clone().into(),
init: Some(key.expr.take()),
definite: false,
});
}
*key.expr = ident.into();
}
_ => (),
};
let mut value = prop.value.unwrap_or_else(|| Expr::undefined(prop_span));
value.visit_mut_with(&mut NewTargetInProp);
vars.extend(visit_private_in_expr(
&mut value,
&self.private,
self.c,
self.unresolved_mark,
));
if prop.is_static {
if let (Some(super_class), None) = (&mut class.super_class, &super_ident) {
let (ident, aliased) = alias_if_required(&*super_class, "_ref");
super_ident = Some(ident.clone());
if aliased {
vars.push(VarDeclarator {
span: DUMMY_SP,
name: ident.clone().into(),
init: None,
definite: false,
});
let span = super_class.span();
**super_class = AssignExpr {
span,
op: op!("="),
left: ident.into(),
right: super_class.take(),
}
.into()
}
}
value.visit_mut_with(&mut SuperFieldAccessFolder {
class_name: &class_ident,
constructor_this_mark: None,
is_static: true,
folding_constructor: false,
in_injected_define_property_call: false,
in_nested_scope: false,
this_alias_mark: None,
constant_super: self.c.constant_super,
super_class: &super_ident,
in_pat: false,
});
value.visit_mut_with(&mut ThisInStaticFolder {
ident: class_ident.clone(),
});
}
let init = MemberInit::PubProp(PubProp {
span: prop_span,
name: prop.key,
value,
});
if prop.is_static {
extra_inits.push(init);
} else {
constructor_inits.push(init);
}
}
ClassMember::PrivateProp(mut prop) => {
let prop_span = prop.span();
let ident = Ident::new(
format!("_{}", prop.key.name).into(),
// We use `self.mark` for private variables.
prop.key.span,
SyntaxContext::empty().apply_mark(self.private.cur_mark()),
);
if let Some(value) = &mut prop.value {
value.visit_mut_with(&mut NewTargetInProp);
if prop.is_static {
value.visit_mut_with(&mut SuperFieldAccessFolder {
class_name: &class_ident,
constructor_this_mark: None,
is_static: true,
folding_constructor: false,
in_injected_define_property_call: false,
in_nested_scope: false,
this_alias_mark: None,
constant_super: self.c.constant_super,
super_class: &super_ident,
in_pat: false,
});
}
vars.extend(visit_private_in_expr(
&mut *value,
&self.private,
self.c,
self.unresolved_mark,
));
}
prop.value.visit_with(&mut UsedNameCollector {
used_names: &mut used_names,
});
if prop.is_static {
prop.value.visit_mut_with(&mut ThisInStaticFolder {
ident: class_ident.clone(),
});
}
let value = prop.value.unwrap_or_else(|| Expr::undefined(prop_span));
if prop.is_static && prop.key.span.is_placeholder() {
let init = MemberInit::StaticBlock(value);
extra_inits.push(init);
continue;
}
let init = MemberInit::PrivProp(PrivProp {
span: prop_span,
name: ident.clone(),
value,
});
let span = PURE_SP;
if self.c.private_as_properties {
vars.push(VarDeclarator {
span: DUMMY_SP,
definite: false,
name: ident.clone().into(),
init: Some(
CallExpr {
span,
callee: helper!(class_private_field_loose_key),
args: vec![ident.sym.as_arg()],
..Default::default()
}
.into(),
),
});
} else if !prop.is_static {
vars.push(VarDeclarator {
span: DUMMY_SP,
definite: false,
name: ident.into(),
init: Some(
NewExpr {
span,
callee: Box::new(quote_ident!("WeakMap").into()),
args: Some(Default::default()),
..Default::default()
}
.into(),
),
});
};
if prop.is_static {
extra_inits.push(init);
} else {
constructor_inits.push(init);
};
}
ClassMember::Constructor(c) => {
constructor = Some(c);
}
ClassMember::PrivateMethod(mut method) => {
let is_static = method.is_static;
let prop_span = method.span;
let fn_name = Ident::new(
match method.kind {
MethodKind::Getter => format!("get_{}", method.key.name).into(),
MethodKind::Setter => format!("set_{}", method.key.name).into(),
MethodKind::Method => {
if method.key.name.is_reserved_in_any() {
format!("__{}", method.key.name).into()
} else {
method.key.name.clone()
}
}
},
method.span,
SyntaxContext::empty().apply_mark(self.private.cur_mark()),
);
let weak_coll_var = Ident::new(
format!("_{}", method.key.name).into(),
// We use `self.mark` for private variables.
method.key.span,
SyntaxContext::empty().apply_mark(self.private.cur_mark()),
);
method.function.visit_with(&mut UsedNameCollector {
used_names: &mut used_names,
});
let extra_collect = match (method.kind, is_static) {
(MethodKind::Getter | MethodKind::Setter, false) => {
let is_getter = method.kind == MethodKind::Getter;
let inserted =
constructor_inits.push(MemberInit::PrivAccessor(PrivAccessor {
span: prop_span,
name: weak_coll_var.clone(),
getter: if is_getter {
Some(fn_name.clone())
} else {
None
},
setter: if !is_getter {
Some(fn_name.clone())
} else {
None
},
}));
if inserted {
Some(quote_ident!("WeakMap"))
} else {
None
}
}
(MethodKind::Getter | MethodKind::Setter, true) => {
let is_getter = method.kind == MethodKind::Getter;
let inserted =
extra_inits.push(MemberInit::PrivAccessor(PrivAccessor {
span: prop_span,
name: weak_coll_var.clone(),
getter: if is_getter {
Some(fn_name.clone())
} else {
None
},
setter: if !is_getter {
Some(fn_name.clone())
} else {
None
},
}));
if inserted && self.c.private_as_properties {
Some(IdentName::default())
} else {
None
}
}
(MethodKind::Method, false) => {
constructor_inits.push(MemberInit::PrivMethod(PrivMethod {
span: prop_span,
name: weak_coll_var.clone(),
fn_name: if self.c.private_as_properties {
fn_name.clone()
} else {
Ident::dummy()
},
}));
Some(quote_ident!("WeakSet"))
}
(MethodKind::Method, true) => {
if self.c.private_as_properties {
extra_inits.push(MemberInit::PrivMethod(PrivMethod {
span: prop_span,
name: weak_coll_var.clone(),
fn_name: fn_name.clone(),
}));
Some(Default::default())
} else {
None
}
}
};
if let Some(extra) = extra_collect {
let span = PURE_SP;
vars.push(VarDeclarator {
span: DUMMY_SP,
definite: false,
name: weak_coll_var.clone().into(),
init: Some(Box::new(if self.c.private_as_properties {
CallExpr {
span,
callee: helper!(class_private_field_loose_key),
args: vec![weak_coll_var.sym.as_arg()],
..Default::default()
}
.into()
} else {
NewExpr {
span,
callee: extra.into(),
args: Some(Default::default()),
..Default::default()
}
.into()
})),
})
};
method.function.visit_mut_with(&mut SuperFieldAccessFolder {
class_name: &class_ident,
constructor_this_mark: None,
is_static,
folding_constructor: false,
in_injected_define_property_call: false,
in_nested_scope: false,
this_alias_mark: None,
constant_super: self.c.constant_super,
super_class: &super_ident,
in_pat: false,
});
private_method_fn_decls.push(
FnDecl {
ident: fn_name,
function: method.function,
declare: false,
}
.into(),
)
}
ClassMember::StaticBlock(..) => {
unreachable!("static_blocks pass should remove this")
}
ClassMember::AutoAccessor(..) => {
unreachable!("auto_accessor pass should remove this")
}
}
}
let constructor =
self.process_constructor(class.span, constructor, has_super, constructor_inits);
if let Some(c) = constructor {
members.push(ClassMember::Constructor(c));
}
private_method_fn_decls.visit_mut_with(&mut PrivateAccessVisitor {
private: &self.private,
vars: Vec::new(),
private_access_type: Default::default(),
c: self.c,
unresolved_mark: self.unresolved_mark,
});
let mut extra_stmts = extra_inits.into_init_static(class_ident.clone());
extra_stmts.extend(private_method_fn_decls);
members.visit_mut_with(&mut PrivateAccessVisitor {
private: &self.private,
vars: Vec::new(),
private_access_type: Default::default(),
c: self.c,
unresolved_mark: self.unresolved_mark,
});
self.private.pop();
(
ClassDecl {
ident: class_ident,
declare: false,
class: Class {
body: members,
..*class
}
.into(),
},
ClassExtra {
vars,
lets,
stmts: extra_stmts,
},
)
}
/// # Legacy support.
///
/// ## Why is this required?
///
/// Hygiene data of
///
///```ts
/// class A {
/// b = this.a;
/// constructor(a){
/// this.a = a;
/// }
/// }
/// ```
///
/// is
///
///```ts
/// class A0 {
/// constructor(a1){
/// this.a0 = a0;
/// this.b0 = this.a0;
/// }
/// }
/// ```
///
/// which is valid only for es2020 properties.
///
/// Legacy proposal which is used by typescript requires different hygiene.
#[allow(clippy::vec_box)]
fn process_constructor(
&mut self,
class_span: Span,
constructor: Option<Constructor>,
has_super: bool,
constructor_exprs: MemberInitRecord,
) -> Option<Constructor> {
let constructor = constructor.or_else(|| {
if constructor_exprs.record.is_empty() {
None
} else {
Some(default_constructor_with_span(has_super, class_span))
}
});
if let Some(mut c) = constructor {
let constructor_exprs = constructor_exprs.into_init();
// Prepend properties
inject_after_super(&mut c, constructor_exprs);
Some(c)
} else {
None
}
}
}
#[derive(Default)]
struct ShouldWork {
found: bool,
}
#[swc_trace]
impl Visit for ShouldWork {
noop_visit_type!(fail);
fn visit_class_method(&mut self, _: &ClassMethod) {
self.found = true;
}
fn visit_class_prop(&mut self, _: &ClassProp) {
self.found = true;
}
fn visit_private_prop(&mut self, _: &PrivateProp) {
self.found = true;
}
fn visit_private_method(&mut self, _: &PrivateMethod) {
self.found = true;
}
fn visit_constructor(&mut self, _: &Constructor) {
self.found = true;
}
}
impl Check for ShouldWork {
fn should_handle(&self) -> bool {
self.found
}
}
// TODO: remove
struct SuperVisitor {
found: bool,
}
impl Visit for SuperVisitor {
noop_visit_type!(fail);
/// Don't recurse into constructor
fn visit_constructor(&mut self, _: &Constructor) {}
/// Don't recurse into fn
fn visit_fn_decl(&mut self, _: &FnDecl) {}
/// Don't recurse into fn
fn visit_fn_expr(&mut self, _: &FnExpr) {}
/// Don't recurse into fn
fn visit_function(&mut self, _: &Function) {}
/// Don't recurse into fn
fn visit_getter_prop(&mut self, n: &GetterProp) {
n.key.visit_with(self);
}
/// Don't recurse into fn
fn visit_method_prop(&mut self, n: &MethodProp) {
n.key.visit_with(self);
n.function.visit_with(self);
}
/// Don't recurse into fn
fn visit_setter_prop(&mut self, n: &SetterProp) {
n.key.visit_with(self);
n.param.visit_with(self);
}
fn visit_super(&mut self, _: &Super) {
self.found = true;
}
}
fn contains_super<N>(body: &N) -> bool
where
N: VisitWith<SuperVisitor>,
{
let mut visitor = SuperVisitor { found: false };
body.visit_with(&mut visitor);
visitor.found
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/private_field.rs | Rust | use std::iter;
use rustc_hash::FxHashMap;
use swc_atoms::Atom;
use swc_common::{errors::HANDLER, util::take::Take, Mark, Span, Spanned, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_transforms_base::helper;
use swc_ecma_utils::{alias_ident_for, alias_if_required, prepend_stmt, quote_ident, ExprFactory};
use swc_ecma_visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
use super::Config;
use crate::optional_chaining_impl::optional_chaining_impl;
pub(super) struct Private {
pub mark: Mark,
pub class_name: Ident,
pub ident: FxHashMap<Atom, PrivateKind>,
}
pub(super) struct PrivateRecord(Vec<Private>);
#[swc_trace]
impl PrivateRecord {
pub fn new() -> Self {
PrivateRecord(Vec::new())
}
pub fn curr_class(&self) -> &Ident {
&self.0.last().unwrap().class_name
}
pub fn cur_mark(&self) -> Mark {
self.0.last().unwrap().mark
}
pub fn push(&mut self, p: Private) {
self.0.push(p)
}
pub fn pop(&mut self) {
self.0.pop();
}
pub fn get(&self, span: Span, name: &Atom) -> (Mark, PrivateKind, &Ident) {
for p in self.0.iter().rev() {
if let Some(kind) = p.ident.get(name) {
return (p.mark, *kind, &p.class_name);
}
}
let error = format!("private name #{} is not defined.", name);
HANDLER.with(|handler| handler.struct_span_err(span, &error).emit());
(Mark::root(), PrivateKind::default(), &self.0[0].class_name)
}
}
#[derive(Copy, Clone, PartialEq, Default, Eq)]
pub(super) struct PrivateKind {
pub is_static: bool,
pub is_method: bool,
pub has_getter: bool,
pub has_setter: bool,
}
impl PrivateKind {
fn is_readonly(&self) -> bool {
self.is_method && !self.has_setter
}
fn is_writeonly(&self) -> bool {
// a private method can still be read
self.is_method && !self.has_getter && self.has_setter
}
fn is_method(&self) -> bool {
self.is_method && !self.has_getter && !self.has_setter
}
}
pub(super) struct BrandCheckHandler<'a> {
pub private: &'a PrivateRecord,
}
#[swc_trace]
impl VisitMut for BrandCheckHandler<'_> {
noop_visit_mut_type!(fail);
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
match e {
Expr::Bin(BinExpr {
span,
op: op!("in"),
left,
right,
}) if left.is_private_name() => {
let n = left.as_private_name().unwrap();
if let Expr::Ident(right) = &**right {
let curr_class = self.private.curr_class();
if curr_class.sym == right.sym && curr_class.ctxt == right.ctxt {
*e = BinExpr {
span: *span,
op: op!("==="),
left: curr_class.clone().into(),
right: right.clone().into(),
}
.into();
return;
}
}
let (mark, kind, class_name) = self.private.get(n.span, &n.name);
if mark == Mark::root() {
return;
}
if kind.is_static {
*e = BinExpr {
span: *span,
op: op!("==="),
left: right.take(),
right: class_name.clone().into(),
}
.into();
return;
}
let weak_coll_ident = Ident::new(
format!("_{}", n.name).into(),
n.span,
SyntaxContext::empty().apply_mark(mark),
);
*e = CallExpr {
span: *span,
callee: weak_coll_ident.make_member(quote_ident!("has")).as_callee(),
args: vec![right.take().as_arg()],
..Default::default()
}
.into();
}
_ => {}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(super) enum PrivateAccessType {
Get,
DestructureSet,
Update,
}
impl Default for PrivateAccessType {
fn default() -> Self {
Self::Get
}
}
pub(super) struct PrivateAccessVisitor<'a> {
pub vars: Vec<VarDeclarator>,
pub private: &'a PrivateRecord,
pub private_access_type: PrivateAccessType,
pub c: Config,
pub unresolved_mark: Mark,
}
macro_rules! take_vars {
($name:ident, $T:tt) => {
fn $name(&mut self, f: &mut $T) {
let old_var = self.vars.take();
if f.body.is_none() {
return;
}
f.visit_mut_children_with(self);
if !self.vars.is_empty() {
prepend_stmt(
&mut f.body.as_mut().unwrap().stmts,
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: self.vars.take(),
..Default::default()
}
.into(),
)
}
self.vars = old_var;
}
};
}
// super.#sdsa is invalid
#[swc_trace]
impl VisitMut for PrivateAccessVisitor<'_> {
noop_visit_mut_type!(fail);
take_vars!(visit_mut_function, Function);
take_vars!(visit_mut_constructor, Constructor);
fn visit_mut_expr(&mut self, e: &mut Expr) {
if let Expr::OptChain(opt) = e {
let is_private_access = match &*opt.base {
OptChainBase::Member(MemberExpr {
prop: MemberProp::PrivateName(..),
..
}) => true,
OptChainBase::Call(OptCall { callee, .. }) => matches!(
&**callee,
Expr::Member(MemberExpr {
prop: MemberProp::PrivateName(..),
..
})
),
_ => false,
};
if is_private_access {
let mut v = optional_chaining_impl(
crate::optional_chaining_impl::Config {
no_document_all: self.c.no_document_all,
pure_getter: self.c.pure_getter,
},
self.unresolved_mark,
);
e.visit_mut_with(&mut v);
assert!(!e.is_opt_chain(), "optional chaining should be removed");
self.vars.extend(v.take_vars());
}
}
if self.c.private_as_properties {
if let Expr::Member(MemberExpr {
span,
obj,
prop: MemberProp::PrivateName(n),
}) = e
{
obj.visit_mut_children_with(self);
let (mark, _, _) = self.private.get(n.span, &n.name);
let ident = Ident::new(
format!("_{}", n.name).into(),
n.span,
SyntaxContext::empty().apply_mark(mark),
);
*e = CallExpr {
callee: helper!(class_private_field_loose_base),
span: *span,
args: vec![obj.take().as_arg(), ident.clone().as_arg()],
..Default::default()
}
.computed_member(ident)
.into();
} else {
e.visit_mut_children_with(self)
}
return;
}
match e {
Expr::Update(UpdateExpr { arg, .. }) if arg.is_member() => {
let old_access_type = self.private_access_type;
self.private_access_type = PrivateAccessType::Update;
arg.visit_mut_with(self);
self.private_access_type = old_access_type;
}
Expr::Assign(AssignExpr {
span,
left,
op,
right,
}) if left.as_simple().is_some() && left.as_simple().unwrap().is_member() => {
let mut left: MemberExpr = left.take().expect_simple().expect_member();
left.visit_mut_with(self);
right.visit_mut_with(self);
let n = match &left.prop {
MemberProp::PrivateName(n) => n.clone(),
_ => {
*e = AssignExpr {
span: *span,
left: left.into(),
op: *op,
right: right.take(),
}
.into();
return;
}
};
let obj = left.obj.clone();
let (mark, kind, class_name) = self.private.get(n.span, &n.name);
if mark == Mark::root() {
return;
}
let ident = Ident::new(
format!("_{}", n.name).into(),
n.span,
SyntaxContext::empty().apply_mark(mark),
);
let var = alias_ident_for(&obj, "_ref");
let this = if matches!(*obj, Expr::This(..)) {
Box::new(ThisExpr { span: DUMMY_SP }.into())
} else if *op == op!("=") {
obj
} else {
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: var.clone().into(),
init: None,
definite: false,
});
Box::new(
AssignExpr {
span: obj.span(),
left: var.clone().into(),
op: op!("="),
right: obj,
}
.into(),
)
};
let value = if *op == op!("=") {
right.take()
} else {
let left = Box::new(self.visit_mut_private_get(&mut left, Some(var)).0);
Box::new(
BinExpr {
span: DUMMY_SP,
left,
op: op.to_update().unwrap(),
right: right.take(),
}
.into(),
)
};
if kind.is_static {
*e = CallExpr {
span: DUMMY_SP,
callee: helper!(class_static_private_field_spec_set),
args: vec![
this.as_arg(),
class_name.clone().as_arg(),
ident.as_arg(),
value.as_arg(),
],
..Default::default()
}
.into();
} else if kind.is_readonly() {
let err = CallExpr {
span: DUMMY_SP,
callee: helper!(read_only_error),
args: vec![format!("#{}", n.name).as_arg()],
..Default::default()
}
.into();
*e = SeqExpr {
span: *span,
exprs: vec![this, value, err],
}
.into();
} else {
let set = helper!(class_private_field_set);
*e = CallExpr {
span: DUMMY_SP,
callee: set,
args: vec![this.as_arg(), ident.as_arg(), value.as_arg()],
..Default::default()
}
.into();
}
}
Expr::Assign(AssignExpr {
left: AssignTarget::Pat(left),
right,
..
}) => {
right.visit_mut_with(self);
let old_access_type = self.private_access_type;
self.private_access_type = PrivateAccessType::DestructureSet;
left.visit_mut_with(self);
self.private_access_type = old_access_type;
}
// Actually this is a call and we should bind `this`.
Expr::TaggedTpl(TaggedTpl { span, tag, tpl, .. }) if tag.is_member() => {
let mut tag = tag.take().member().unwrap();
tag.visit_mut_with(self);
tpl.visit_mut_with(self);
let (expr, this) = self.visit_mut_private_get(&mut tag, None);
if let Some(this) = this {
*e = TaggedTpl {
span: *span,
tag: CallExpr {
span: DUMMY_SP,
callee: expr.make_member(quote_ident!("bind")).as_callee(),
args: vec![this.as_arg()],
..Default::default()
}
.into(),
tpl: tpl.take(),
..Default::default()
}
.into();
} else {
*e = TaggedTpl {
span: *span,
tag: Box::new(expr),
tpl: tpl.take(),
..Default::default()
}
.into();
}
}
Expr::Call(CallExpr {
span,
callee: Callee::Expr(callee),
args,
..
}) if callee.is_member() => {
let mut callee = callee.take().member().unwrap();
callee.visit_mut_with(self);
args.visit_mut_with(self);
let (expr, this) = self.visit_mut_private_get(&mut callee, None);
if let Some(this) = this {
*e = CallExpr {
span: *span,
callee: expr.make_member(quote_ident!("call")).as_callee(),
args: iter::once(this.as_arg()).chain(args.take()).collect(),
..Default::default()
}
.into();
} else {
*e = CallExpr {
span: *span,
callee: expr.as_callee(),
args: args.take(),
..Default::default()
}
.into();
}
}
Expr::Member(member_expr) => {
member_expr.visit_mut_children_with(self);
*e = self.visit_mut_private_get(member_expr, None).0;
}
_ => e.visit_mut_children_with(self),
};
}
fn visit_mut_simple_assign_target(&mut self, e: &mut SimpleAssignTarget) {
if let SimpleAssignTarget::OptChain(opt) = e {
let is_private_access = match &*opt.base {
OptChainBase::Member(MemberExpr {
prop: MemberProp::PrivateName(..),
..
}) => true,
OptChainBase::Call(OptCall { callee, .. }) => matches!(
&**callee,
Expr::Member(MemberExpr {
prop: MemberProp::PrivateName(..),
..
})
),
_ => false,
};
if is_private_access {
let mut v = optional_chaining_impl(
crate::optional_chaining_impl::Config {
no_document_all: self.c.no_document_all,
pure_getter: self.c.pure_getter,
},
self.unresolved_mark,
);
e.visit_mut_with(&mut v);
assert!(!e.is_opt_chain(), "optional chaining should be removed");
self.vars.extend(v.take_vars());
}
}
if self.c.private_as_properties {
if let SimpleAssignTarget::Member(MemberExpr {
span,
obj,
prop: MemberProp::PrivateName(n),
}) = e
{
obj.visit_mut_children_with(self);
let (mark, _, _) = self.private.get(n.span, &n.name);
let ident = Ident::new(
format!("_{}", n.name).into(),
n.span,
SyntaxContext::empty().apply_mark(mark),
);
*e = CallExpr {
callee: helper!(class_private_field_loose_base),
span: *span,
args: vec![obj.take().as_arg(), ident.clone().as_arg()],
..Default::default()
}
.computed_member(ident)
.into();
} else {
e.visit_mut_children_with(self)
}
return;
}
e.visit_mut_children_with(self)
}
fn visit_mut_pat(&mut self, p: &mut Pat) {
if let Pat::Expr(expr) = &p {
if let Expr::Member(me) = &**expr {
if let MemberProp::PrivateName(..) = &me.prop {
let old_access_type = self.private_access_type;
self.private_access_type = PrivateAccessType::DestructureSet;
p.visit_mut_children_with(self);
self.private_access_type = old_access_type;
return;
}
}
}
self.private_access_type = Default::default();
p.visit_mut_children_with(self);
}
}
pub(super) fn visit_private_in_expr(
expr: &mut Expr,
private: &PrivateRecord,
config: Config,
unresolved_mark: Mark,
) -> Vec<VarDeclarator> {
let mut priv_visitor = PrivateAccessVisitor {
private,
vars: Vec::new(),
private_access_type: Default::default(),
c: config,
unresolved_mark,
};
expr.visit_mut_with(&mut priv_visitor);
priv_visitor.vars
}
#[swc_trace]
impl PrivateAccessVisitor<'_> {
/// Returns `(expr, thisObject)`
///
/// - `obj_alias`: If alias is already declared, this method will use
/// `obj_alias` instead of declaring a new one.
fn visit_mut_private_get(
&mut self,
e: &mut MemberExpr,
obj_alias: Option<Ident>,
) -> (Expr, Option<Expr>) {
let is_alias_initialized = obj_alias.is_some();
let n = match &e.prop {
MemberProp::PrivateName(n) => n,
_ => return (e.take().into(), None),
};
let mut obj = e.obj.take();
let (mark, kind, class_name) = self.private.get(n.span, &n.name);
if mark == Mark::root() {
return (Expr::dummy(), None);
}
let method_name = Ident::new(
if n.name.is_reserved_in_any() {
format!("__{}", n.name).into()
} else {
n.name.clone()
},
n.span,
SyntaxContext::empty().apply_mark(mark),
);
let ident = Ident::new(
format!("_{}", n.name).into(),
n.span,
SyntaxContext::empty().apply_mark(mark),
);
if kind.is_static {
match self.private_access_type {
PrivateAccessType::DestructureSet => {
let set = helper!(class_static_private_field_destructure);
return (
CallExpr {
span: DUMMY_SP,
callee: set,
args: vec![
obj.clone().as_arg(),
class_name.clone().as_arg(),
ident.as_arg(),
],
..Default::default()
}
.make_member(quote_ident!("value"))
.into(),
Some(*obj),
);
}
PrivateAccessType::Update => {
let set = helper!(class_static_private_field_update);
return (
CallExpr {
span: DUMMY_SP,
callee: set,
args: vec![
obj.clone().as_arg(),
class_name.clone().as_arg(),
ident.as_arg(),
],
..Default::default()
}
.make_member(quote_ident!("value"))
.into(),
Some(*obj),
);
}
_ => {}
}
if kind.is_method() {
let h = helper!(class_static_private_method_get);
return (
CallExpr {
span: DUMMY_SP,
callee: h,
args: vec![
obj.as_arg(),
class_name.clone().as_arg(),
method_name.as_arg(),
],
..Default::default()
}
.into(),
Some(class_name.clone().into()),
);
}
let get = helper!(class_static_private_field_spec_get);
(
CallExpr {
span: DUMMY_SP,
callee: get,
args: vec![obj.as_arg(), class_name.clone().as_arg(), ident.as_arg()],
..Default::default()
}
.into(),
Some(class_name.clone().into()),
)
} else {
match self.private_access_type {
PrivateAccessType::DestructureSet => {
let set = helper!(class_private_field_destructure);
(
CallExpr {
span: DUMMY_SP,
callee: set,
args: vec![obj.clone().as_arg(), ident.as_arg()],
..Default::default()
}
.make_member(quote_ident!("value"))
.into(),
Some(*obj),
)
}
PrivateAccessType::Update => {
let set = helper!(class_private_field_update);
(
CallExpr {
span: DUMMY_SP,
callee: set,
args: vec![obj.clone().as_arg(), ident.as_arg()],
..Default::default()
}
.make_member(quote_ident!("value"))
.into(),
Some(*obj),
)
}
PrivateAccessType::Get if kind.is_writeonly() => {
let helper = helper!(write_only_error);
let expr = Box::new(
CallExpr {
span: DUMMY_SP,
callee: helper,
args: vec![format!("#{}", n.name).as_arg()],
..Default::default()
}
.into(),
);
(
SeqExpr {
span: DUMMY_SP,
exprs: vec![obj.clone(), expr],
}
.into(),
Some(*obj),
)
}
PrivateAccessType::Get => {
let get = if self.c.private_as_properties {
helper!(class_private_field_loose_base)
} else if kind.is_method() {
helper!(class_private_method_get)
} else {
helper!(class_private_field_get)
};
match &*obj {
Expr::This(this) => (
if kind.is_method() && !self.c.private_as_properties {
CallExpr {
span: DUMMY_SP,
callee: get,
args: vec![
obj.clone().as_arg(),
ident.as_arg(),
method_name.as_arg(),
],
..Default::default()
}
.into()
} else {
CallExpr {
span: DUMMY_SP,
callee: get,
args: vec![this.as_arg(), ident.as_arg()],
..Default::default()
}
.into()
},
Some(Expr::This(*this)),
),
_ => {
let mut aliased = false;
let var = obj_alias.unwrap_or_else(|| {
let (var, a) = alias_if_required(&obj, "_ref");
if a {
aliased = true;
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: var.clone().into(),
init: None,
definite: false,
});
}
var
});
let first_arg = if is_alias_initialized {
var.clone().as_arg()
} else if aliased {
AssignExpr {
span: DUMMY_SP,
left: var.clone().into(),
op: op!("="),
right: obj.take(),
}
.as_arg()
} else {
var.clone().as_arg()
};
let args = if kind.is_method() {
vec![first_arg, ident.as_arg(), method_name.as_arg()]
} else {
vec![first_arg, ident.as_arg()]
};
(
CallExpr {
span: DUMMY_SP,
callee: get,
args,
..Default::default()
}
.into(),
Some(var.into()),
)
}
}
}
}
}
}
}
/// only getter and setter in same scope could coexist
pub(super) fn dup_private_method(kind: &PrivateKind, method: &PrivateMethod) -> bool {
if !kind.is_method || kind.is_static != method.is_static || method.kind == MethodKind::Method {
return true;
}
!matches!(
(method.kind, kind.has_getter, kind.has_setter),
(MethodKind::Getter, false, true) | (MethodKind::Setter, true, false)
)
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/this_in_static.rs | Rust | use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
pub(super) struct ThisInStaticFolder {
pub ident: Ident,
}
#[swc_trace]
impl VisitMut for ThisInStaticFolder {
noop_visit_mut_type!(fail);
// once again, for computed props
fn visit_mut_constructor(&mut self, _: &mut Constructor) {}
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
if let Expr::This(..) = e {
*e = self.ident.clone().into()
}
}
fn visit_mut_function(&mut self, _: &mut Function) {}
}
pub(super) struct NewTargetInProp;
#[swc_trace]
impl VisitMut for NewTargetInProp {
noop_visit_mut_type!(fail);
// once again, for computed props
fn visit_mut_constructor(&mut self, _: &mut Constructor) {}
fn visit_mut_expr(&mut self, e: &mut Expr) {
e.visit_mut_children_with(self);
if let Expr::MetaProp(MetaPropExpr {
span,
kind: MetaPropKind::NewTarget,
}) = e
{
*e = *Expr::undefined(*span);
}
}
fn visit_mut_function(&mut self, _: &mut Function) {}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/class_properties/used_name.rs | Rust | use swc_atoms::Atom;
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use swc_trace_macro::swc_trace;
pub(super) struct UsedNameCollector<'a> {
pub used_names: &'a mut Vec<Atom>,
}
macro_rules! noop {
($name:ident, $T:path) => {
/// no-op
fn $name(&mut self, _: &$T) {}
};
}
#[swc_trace]
impl Visit for UsedNameCollector<'_> {
noop_visit_type!(fail);
noop!(visit_arrow_expr, ArrowExpr);
noop!(visit_function, Function);
noop!(visit_setter_prop, SetterProp);
noop!(visit_getter_prop, GetterProp);
noop!(visit_method_prop, MethodProp);
noop!(visit_constructor, Constructor);
fn visit_expr(&mut self, expr: &Expr) {
match *expr {
Expr::Ident(ref i) => self.used_names.push(i.sym.clone()),
_ => expr.visit_children_with(self),
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/lib.rs | Rust | #![allow(clippy::vec_box)]
use swc_common::Mark;
use swc_ecma_ast::Pass;
use swc_ecma_compat_common::regexp::{self, regexp};
pub use self::{
class_properties::class_properties, private_in_object::private_in_object,
static_blocks::static_blocks,
};
pub mod class_properties;
pub mod optional_chaining_impl;
pub mod private_in_object;
pub mod static_blocks;
pub fn es2022(config: Config, unresolved_mark: Mark) -> impl Pass {
(
regexp(regexp::Config {
dot_all_regex: true,
has_indices: true,
lookbehind_assertion: true,
named_capturing_groups_regex: true,
sticky_regex: false,
unicode_property_regex: true,
unicode_regex: false,
unicode_sets_regex: false,
}),
static_blocks(),
class_properties(config.class_properties, unresolved_mark),
private_in_object(),
)
}
#[derive(Debug, Clone, Default)]
pub struct Config {
pub class_properties: class_properties::Config,
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/optional_chaining_impl.rs | Rust | use std::mem;
use swc_common::{util::take::Take, Mark, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_utils::{alias_ident_for, prepend_stmt, quote_ident, ExprFactory, StmtLike};
use swc_ecma_visit::{noop_visit_mut_type, VisitMut, VisitMutWith};
/// Not a public API and may break any time. Don't use it directly.
pub fn optional_chaining_impl(c: Config, unresolved_mark: Mark) -> OptionalChaining {
OptionalChaining {
c,
unresolved_ctxt: SyntaxContext::empty().apply_mark(unresolved_mark),
..Default::default()
}
}
#[derive(Default)]
pub struct OptionalChaining {
vars: Vec<VarDeclarator>,
unresolved_ctxt: SyntaxContext,
c: Config,
}
impl OptionalChaining {
pub fn take_vars(&mut self) -> Vec<VarDeclarator> {
mem::take(&mut self.vars)
}
}
/// Not a public API and may break any time. Don't use it directly.
#[derive(Debug, Clone, Copy, Default)]
pub struct Config {
pub no_document_all: bool,
pub pure_getter: bool,
}
impl VisitMut for OptionalChaining {
noop_visit_mut_type!(fail);
fn visit_mut_block_stmt_or_expr(&mut self, expr: &mut BlockStmtOrExpr) {
if let BlockStmtOrExpr::Expr(e) = expr {
let mut stmt = BlockStmt {
span: DUMMY_SP,
stmts: vec![Stmt::Return(ReturnStmt {
span: DUMMY_SP,
arg: Some(e.take()),
})],
..Default::default()
};
stmt.visit_mut_with(self);
// If there are optional chains in this expression, then the visitor will have
// injected an VarDecl statement and we need to transform into a
// block. If not, then we can keep the expression.
match &mut stmt.stmts[..] {
[Stmt::Return(ReturnStmt { arg: Some(e), .. })] => {
*expr = BlockStmtOrExpr::Expr(e.take())
}
_ => *expr = BlockStmtOrExpr::BlockStmt(stmt),
}
} else {
expr.visit_mut_children_with(self);
}
}
fn visit_mut_expr(&mut self, e: &mut Expr) {
match e {
// foo?.bar -> foo == null ? void 0 : foo.bar
Expr::OptChain(v) => {
let data = self.gather(v.take(), Vec::new());
*e = self.construct(data, false);
}
Expr::Unary(UnaryExpr {
arg,
op: op!("delete"),
..
}) => {
match &mut **arg {
// delete foo?.bar -> foo == null ? true : delete foo.bar
Expr::OptChain(v) => {
let data = self.gather(v.take(), Vec::new());
*e = self.construct(data, true);
}
_ => e.visit_mut_children_with(self),
}
}
e => e.visit_mut_children_with(self),
}
}
fn visit_mut_pat(&mut self, n: &mut Pat) {
// The default initializer of an assignment pattern must not leak the memo
// variable into the enclosing scope.
// function(a, b = a?.b) {} -> function(a, b = (() => var _a; …)()) {}
let Pat::Assign(a) = n else {
n.visit_mut_children_with(self);
return;
};
let uninit = self.vars.take();
a.right.visit_mut_with(self);
// If we found an optional chain, we need to transform into an arrow IIFE to
// capture the memo variable.
if !self.vars.is_empty() {
let stmts = vec![
Stmt::Decl(Decl::Var(Box::new(VarDecl {
kind: VarDeclKind::Var,
decls: mem::take(&mut self.vars),
..Default::default()
}))),
Stmt::Return(ReturnStmt {
span: DUMMY_SP,
arg: Some(a.right.take()),
}),
];
a.right = CallExpr {
span: DUMMY_SP,
callee: ArrowExpr {
span: DUMMY_SP,
params: Vec::new(),
body: Box::new(BlockStmtOrExpr::BlockStmt(BlockStmt {
span: DUMMY_SP,
stmts,
..Default::default()
})),
is_async: false,
is_generator: false,
..Default::default()
}
.as_callee(),
args: Vec::new(),
..Default::default()
}
.into();
}
self.vars = uninit;
a.left.visit_mut_with(self);
}
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
self.visit_mut_stmt_like(n);
}
fn visit_mut_stmts(&mut self, n: &mut Vec<Stmt>) {
self.visit_mut_stmt_like(n);
}
}
#[derive(Debug, Clone)]
enum Memo {
Cache(Ident),
Raw(Box<Expr>),
}
impl Memo {
fn into_expr(self) -> Expr {
match self {
Memo::Cache(i) => i.into(),
Memo::Raw(e) => *e,
}
}
}
#[derive(Debug)]
enum Gathering {
Call(CallExpr),
Member(MemberExpr),
OptCall(CallExpr, Memo),
OptMember(MemberExpr, Memo),
}
impl OptionalChaining {
/// Transforms the left-nested structure into a flat vec. The obj/callee
/// of every node in the chain will be Invalid, to be replaced with a
/// constructed node in the construct step.
/// The top member/call will be first, and the deepest obj/callee will be
/// last.
fn gather(
&mut self,
v: OptChainExpr,
mut chain: Vec<Gathering>,
) -> (Expr, usize, Vec<Gathering>) {
let mut current = v;
let mut count = 0;
loop {
let OptChainExpr {
optional, mut base, ..
} = current;
if optional {
count += 1;
}
let next;
match &mut *base {
OptChainBase::Member(m) => {
next = m.obj.take();
m.prop.visit_mut_with(self);
chain.push(if optional {
Gathering::OptMember(m.take(), self.memoize(&next, false))
} else {
Gathering::Member(m.take())
});
}
OptChainBase::Call(c) => {
next = c.callee.take();
c.args.visit_mut_with(self);
// I don't know why c is an OptCall instead of a CallExpr.
chain.push(if optional {
Gathering::OptCall(c.take().into(), self.memoize(&next, true))
} else {
Gathering::Call(c.take().into())
});
}
}
match *next {
Expr::OptChain(next) => {
current = next;
}
mut base => {
base.visit_mut_children_with(self);
return (base, count, chain);
}
}
}
}
/// Constructs a rightward nested conditional expression out of our
/// flattened chain.
fn construct(&mut self, data: (Expr, usize, Vec<Gathering>), is_delete: bool) -> Expr {
let (mut current, count, chain) = data;
// Stores partially constructed CondExprs for us to assemble later on.
let mut committed_cond = Vec::with_capacity(count);
// Stores the memo used to construct an optional chain, so that it can be used
// as the this context of an optional call:
// foo?.bar?.() ->
// (_foo = foo) == null
// ? void 0
// : (_foo_bar = _foo.bar) == null
// ? void 0 : _foo_bar.call(_foo)
let mut ctx = None;
// In the first pass, we construct a "current" node and several committed
// CondExprs. The conditionals will have an invalid alt, waiting for the
// second pass to properly construct them.
// We reverse iterate so that we can construct a rightward conditional
// `(_a = a) == null ? void 0 : (_a_b = _a.b) == null ? void 0 : _a_b.c`
// instead of a leftward one
// `(_a_b = (_a = a) == null ? void 0 : _a.b) == null ? void 0 : _a_b.c`
for v in chain.into_iter().rev() {
current = match v {
Gathering::Call(mut c) => {
c.callee = current.as_callee();
ctx = None;
c.into()
}
Gathering::Member(mut m) => {
m.obj = Box::new(current);
ctx = None;
m.into()
}
Gathering::OptCall(mut c, memo) => {
let mut call = false;
// foo.bar?.() -> (_foo_bar == null) ? void 0 : _foo_bar.call(foo)
match &mut current {
Expr::Member(m) => {
call = true;
let this = ctx.unwrap_or_else(|| {
let this = self.memoize(&m.obj, true);
match &this {
Memo::Cache(i) => {
m.obj = AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: i.clone().into(),
right: m.obj.take(),
}
.into();
this
}
Memo::Raw(_) => this,
}
});
c.args.insert(0, this.into_expr().as_arg());
}
Expr::SuperProp(s) => {
call = true;
c.args.insert(0, ThisExpr { span: s.obj.span }.as_arg());
}
_ => {}
}
committed_cond.push(CondExpr {
span: DUMMY_SP,
test: init_and_eq_null_or_undefined(&memo, current, self.c.no_document_all),
cons: if is_delete {
true.into()
} else {
Expr::undefined(DUMMY_SP)
},
alt: Take::dummy(),
});
c.callee = if call {
memo.into_expr()
.make_member(quote_ident!("call"))
.as_callee()
} else {
memo.into_expr().as_callee()
};
ctx = None;
c.into()
}
Gathering::OptMember(mut m, memo) => {
committed_cond.push(CondExpr {
span: DUMMY_SP,
test: init_and_eq_null_or_undefined(&memo, current, self.c.no_document_all),
cons: if is_delete {
true.into()
} else {
Expr::undefined(DUMMY_SP)
},
alt: Take::dummy(),
});
ctx = Some(memo.clone());
m.obj = memo.into_expr().into();
m.into()
}
};
}
// At this point, `current` is the right-most expression `_a_b.c` in `a?.b?.c`
if is_delete {
current = UnaryExpr {
span: DUMMY_SP,
op: op!("delete"),
arg: Box::new(current),
}
.into();
}
// We now need to reverse iterate the conditionals to construct out tree.
for mut cond in committed_cond.into_iter().rev() {
cond.alt = Box::new(current);
current = cond.into()
}
current
}
fn should_memo(&self, expr: &Expr, is_call: bool) -> bool {
fn is_simple_member(e: &Expr) -> bool {
match e {
Expr::This(..) => true,
Expr::Ident(_) => true,
Expr::SuperProp(s) if !s.prop.is_computed() => true,
Expr::Member(m) if !m.prop.is_computed() => is_simple_member(&m.obj),
_ => false,
}
}
match expr {
Expr::Ident(i) if i.ctxt != self.unresolved_ctxt => false,
_ => {
if is_call && self.c.pure_getter {
!is_simple_member(expr)
} else {
true
}
}
}
}
fn memoize(&mut self, expr: &Expr, is_call: bool) -> Memo {
if self.should_memo(expr, is_call) {
let memo = alias_ident_for(expr, "_this");
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: memo.clone().into(),
init: None,
definite: false,
});
Memo::Cache(memo)
} else {
Memo::Raw(Box::new(expr.to_owned()))
}
}
fn visit_mut_stmt_like<T>(&mut self, stmts: &mut Vec<T>)
where
T: Send + Sync + StmtLike + VisitMutWith<Self>,
Vec<T>: VisitMutWith<Self>,
{
let uninit = self.vars.take();
for stmt in stmts.iter_mut() {
stmt.visit_mut_with(self);
}
if !self.vars.is_empty() {
prepend_stmt(
stmts,
T::from(
VarDecl {
span: DUMMY_SP,
declare: false,
kind: VarDeclKind::Var,
decls: mem::take(&mut self.vars),
..Default::default()
}
.into(),
),
);
}
self.vars = uninit;
}
}
fn init_and_eq_null_or_undefined(i: &Memo, init: Expr, no_document_all: bool) -> Box<Expr> {
let lhs = match i {
Memo::Cache(i) => AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: i.clone().into(),
right: Box::new(init),
}
.into(),
Memo::Raw(e) => e.to_owned(),
};
if no_document_all {
return BinExpr {
span: DUMMY_SP,
left: lhs,
op: op!("=="),
right: Box::new(Lit::Null(Null { span: DUMMY_SP }).into()),
}
.into();
}
let null_cmp = BinExpr {
span: DUMMY_SP,
left: lhs,
op: op!("==="),
right: Box::new(Lit::Null(Null { span: DUMMY_SP }).into()),
}
.into();
let left_expr = match i {
Memo::Cache(i) => Box::new(i.clone().into()),
Memo::Raw(e) => e.to_owned(),
};
let void_cmp = BinExpr {
span: DUMMY_SP,
left: left_expr,
op: op!("==="),
right: Expr::undefined(DUMMY_SP),
}
.into();
BinExpr {
span: DUMMY_SP,
left: null_cmp,
op: op!("||"),
right: void_cmp,
}
.into()
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/private_in_object.rs | Rust | use std::{
borrow::Cow,
mem::{replace, take},
};
use rustc_hash::FxHashSet;
use swc_atoms::Atom;
use swc_common::{pass::CompilerPass, util::take::Take, Mark, Spanned, SyntaxContext, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_utils::{
default_constructor_with_span, prepend_stmt, private_ident, quote_ident, ExprFactory,
};
use swc_ecma_visit::{
noop_visit_mut_type, noop_visit_type, visit_mut_pass, Visit, VisitMut, VisitMutWith, VisitWith,
};
/// https://github.com/tc39/proposal-private-fields-in-in
pub fn private_in_object() -> impl Pass {
visit_mut_pass(PrivateInObject::default())
}
#[derive(Debug)]
enum Mode {
ClassExpr {
vars: Vec<VarDeclarator>,
init_exprs: Vec<Box<Expr>>,
},
ClassDecl {
vars: Vec<VarDeclarator>,
},
}
impl Default for Mode {
fn default() -> Self {
Self::ClassDecl {
vars: Default::default(),
}
}
}
impl Mode {
fn push_var(&mut self, n: Ident, init: Option<Box<Expr>>) {
match self {
Mode::ClassExpr { vars, init_exprs } => {
vars.push(VarDeclarator {
span: DUMMY_SP,
name: n.clone().into(),
init: None,
definite: Default::default(),
});
if let Some(init) = init {
init_exprs.push(
AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: n.into(),
right: init,
}
.into(),
);
}
}
Mode::ClassDecl { vars } => {
vars.push(VarDeclarator {
span: DUMMY_SP,
name: n.into(),
init,
definite: Default::default(),
});
}
}
}
}
#[derive(Default)]
struct PrivateInObject {
vars: Vec<VarDeclarator>,
prepend_exprs: Vec<Box<Expr>>,
injected_vars: FxHashSet<Id>,
cls: ClassData,
}
#[derive(Default)]
struct ClassData {
ident: Option<Ident>,
vars: Mode,
/// [Mark] for the current class.
///
/// This is modified by the class visitor.
mark: Mark,
privates: FxHashSet<Atom>,
/// Name of private methods.
methods: Vec<Atom>,
/// Name of private statics.
statics: Vec<Atom>,
constructor_exprs: Vec<Box<Expr>>,
names_used_for_brand_checks: FxHashSet<Atom>,
}
impl CompilerPass for PrivateInObject {
fn name(&self) -> Cow<'static, str> {
Cow::Borrowed("private-in-object")
}
}
impl PrivateInObject {
fn var_name_for_brand_check(&self, n: &PrivateName) -> Ident {
let is_static = self.cls.statics.contains(&n.name);
let span = n.span;
let ctxt = SyntaxContext::empty().apply_mark(self.cls.mark);
if !is_static && self.cls.methods.contains(&n.name) {
if let Some(cls_name) = &self.cls.ident {
return Ident::new(format!("_brand_check_{}", cls_name.sym).into(), span, ctxt);
}
}
Ident::new(format!("_brand_check_{}", n.name).into(), span, ctxt)
}
}
impl VisitMut for PrivateInObject {
noop_visit_mut_type!(fail);
fn visit_mut_class(&mut self, n: &mut Class) {
{
n.visit_children_with(&mut ClassAnalyzer {
brand_check_names: &mut self.cls.names_used_for_brand_checks,
ignore_class: true,
})
}
for m in &n.body {
match m {
ClassMember::PrivateMethod(m) => {
self.cls.privates.insert(m.key.name.clone());
self.cls.methods.push(m.key.name.clone());
if m.is_static {
self.cls.statics.push(m.key.name.clone());
}
}
ClassMember::PrivateProp(m) => {
self.cls.privates.insert(m.key.name.clone());
if m.is_static {
self.cls.statics.push(m.key.name.clone());
}
}
_ => {}
}
}
n.visit_mut_children_with(self);
if !self.cls.constructor_exprs.is_empty() {
let has_constructor = n
.body
.iter()
.any(|m| matches!(m, ClassMember::Constructor(_)));
if !has_constructor {
let has_super = n.super_class.is_some();
n.body
.push(ClassMember::Constructor(default_constructor_with_span(
has_super, n.span,
)));
}
for m in &mut n.body {
if let ClassMember::Constructor(Constructor {
body: Some(body), ..
}) = m
{
for expr in take(&mut self.cls.constructor_exprs) {
body.stmts.push(
ExprStmt {
span: DUMMY_SP,
expr,
}
.into(),
);
}
}
}
}
}
fn visit_mut_class_decl(&mut self, n: &mut ClassDecl) {
let old_cls = take(&mut self.cls);
self.cls.mark = Mark::fresh(Mark::root());
self.cls.ident = Some(n.ident.clone());
self.cls.vars = Mode::ClassDecl {
vars: Default::default(),
};
n.visit_mut_children_with(self);
match &mut self.cls.vars {
Mode::ClassDecl { vars } => {
self.vars.extend(take(vars));
}
_ => {
unreachable!()
}
}
self.cls = old_cls;
}
fn visit_mut_class_expr(&mut self, n: &mut ClassExpr) {
let old_cls = take(&mut self.cls);
self.cls.mark = Mark::fresh(Mark::root());
self.cls.ident.clone_from(&n.ident);
self.cls.vars = Mode::ClassExpr {
vars: Default::default(),
init_exprs: Default::default(),
};
n.visit_mut_children_with(self);
match &mut self.cls.vars {
Mode::ClassExpr { vars, init_exprs } => {
self.vars.extend(take(vars));
self.prepend_exprs.extend(take(init_exprs));
}
_ => {
unreachable!()
}
}
self.cls = old_cls;
}
fn visit_mut_assign_pat(&mut self, p: &mut AssignPat) {
p.left.visit_mut_with(self);
{
let mut buf = FxHashSet::default();
let mut v = ClassAnalyzer {
brand_check_names: &mut buf,
ignore_class: false,
};
p.right.visit_with(&mut v);
if buf.is_empty() {
p.right.visit_mut_with(self);
} else {
let mut bs = BlockStmt {
span: DUMMY_SP,
stmts: Vec::new(),
..Default::default()
};
bs.stmts.push(
ReturnStmt {
span: DUMMY_SP,
arg: Some(p.right.take()),
}
.into(),
);
bs.visit_mut_with(self);
p.right = CallExpr {
span: DUMMY_SP,
callee: ArrowExpr {
span: DUMMY_SP,
params: Default::default(),
body: Box::new(BlockStmtOrExpr::BlockStmt(bs)),
is_async: false,
is_generator: false,
..Default::default()
}
.as_callee(),
args: Default::default(),
..Default::default()
}
.into();
}
}
}
fn visit_mut_expr(&mut self, e: &mut Expr) {
let prev_prepend_exprs = take(&mut self.prepend_exprs);
e.visit_mut_children_with(self);
let mut prepend_exprs = replace(&mut self.prepend_exprs, prev_prepend_exprs);
if !prepend_exprs.is_empty() {
match e {
Expr::Seq(e) => {
e.exprs = prepend_exprs.into_iter().chain(e.exprs.take()).collect();
}
_ => {
prepend_exprs.push(Box::new(e.take()));
*e = SeqExpr {
span: DUMMY_SP,
exprs: prepend_exprs,
}
.into();
}
}
return;
}
match e {
Expr::Bin(BinExpr {
span,
op: op!("in"),
left,
right,
}) if left.is_private_name() => {
let left = left.take().expect_private_name();
let is_static = self.cls.statics.contains(&left.name);
let is_method = self.cls.methods.contains(&left.name);
if let Some(cls_ident) = self.cls.ident.clone() {
if is_static && is_method {
*e = BinExpr {
span: *span,
op: op!("==="),
left: cls_ident.into(),
right: right.take(),
}
.into();
return;
}
}
let var_name = self.var_name_for_brand_check(&left);
if self.cls.privates.contains(&left.name)
&& self.injected_vars.insert(var_name.to_id())
{
self.cls.vars.push_var(
var_name.clone(),
Some(
NewExpr {
span: DUMMY_SP,
callee: Box::new(quote_ident!("WeakSet").into()),
args: Some(Default::default()),
..Default::default()
}
.into(),
),
);
if is_method {
self.cls.constructor_exprs.push(
CallExpr {
span: DUMMY_SP,
callee: var_name
.clone()
.make_member(quote_ident!("add"))
.as_callee(),
args: vec![ThisExpr { span: DUMMY_SP }.as_arg()],
..Default::default()
}
.into(),
);
}
}
*e = CallExpr {
span: *span,
callee: var_name.make_member(quote_ident!("has")).as_callee(),
args: vec![right.take().as_arg()],
..Default::default()
}
.into();
}
_ => {}
}
}
fn visit_mut_module_items(&mut self, ns: &mut Vec<ModuleItem>) {
ns.visit_mut_children_with(self);
if !self.vars.is_empty() {
prepend_stmt(
ns,
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: Default::default(),
decls: take(&mut self.vars),
..Default::default()
}
.into(),
);
}
}
fn visit_mut_private_prop(&mut self, n: &mut PrivateProp) {
n.visit_mut_children_with(self);
if self.cls.names_used_for_brand_checks.contains(&n.key.name) {
let var_name = self.var_name_for_brand_check(&n.key);
match &mut n.value {
Some(init) => {
let init_span = init.span();
let tmp = private_ident!("_tmp");
self.cls.vars.push_var(tmp.clone(), None);
let assign = AssignExpr {
span: DUMMY_SP,
op: op!("="),
left: tmp.clone().into(),
right: init.take(),
}
.into();
let add_to_checker = CallExpr {
span: DUMMY_SP,
callee: var_name.make_member(quote_ident!("add")).as_callee(),
args: vec![ThisExpr { span: DUMMY_SP }.as_arg()],
..Default::default()
}
.into();
*init = SeqExpr {
span: init_span,
exprs: vec![assign, add_to_checker, Box::new(tmp.into())],
}
.into();
}
None => {
n.value = Some(
UnaryExpr {
span: DUMMY_SP,
op: op!("void"),
arg: Box::new(
CallExpr {
span: DUMMY_SP,
callee: var_name.make_member(quote_ident!("add")).as_callee(),
args: vec![ThisExpr { span: DUMMY_SP }.as_arg()],
..Default::default()
}
.into(),
),
}
.into(),
)
}
}
}
}
fn visit_mut_prop_name(&mut self, n: &mut PropName) {
if let PropName::Computed(_) = n {
n.visit_mut_children_with(self);
}
}
fn visit_mut_stmts(&mut self, s: &mut Vec<Stmt>) {
s.visit_mut_children_with(self);
if !self.vars.is_empty() {
prepend_stmt(
s,
VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: Default::default(),
decls: take(&mut self.vars),
..Default::default()
}
.into(),
);
}
}
}
struct ClassAnalyzer<'a> {
brand_check_names: &'a mut FxHashSet<Atom>,
ignore_class: bool,
}
impl Visit for ClassAnalyzer<'_> {
noop_visit_type!(fail);
fn visit_bin_expr(&mut self, n: &BinExpr) {
n.visit_children_with(self);
if n.op == op!("in") {
if let Expr::PrivateName(left) = &*n.left {
self.brand_check_names.insert(left.name.clone());
}
}
}
/// Noop
fn visit_class(&mut self, n: &Class) {
if self.ignore_class {
return;
}
n.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es2022/src/static_blocks.rs | Rust | use rustc_hash::FxHashSet;
use swc_atoms::Atom;
use swc_common::{source_map::PLACEHOLDER_SP, util::take::Take};
use swc_ecma_ast::*;
use swc_ecma_utils::ExprFactory;
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
use swc_trace_macro::swc_trace;
struct ClassStaticBlock;
pub fn static_blocks() -> impl Pass {
visit_mut_pass(ClassStaticBlock)
}
#[swc_trace]
impl ClassStaticBlock {
fn transform_static_block(
&mut self,
mut static_block: StaticBlock,
private_id: Atom,
) -> PrivateProp {
let mut stmts = static_block.body.stmts.take();
let span = static_block.span;
// We special-case the single expression case to avoid the iife, since it's
// common.
let value = if stmts.len() == 1 && stmts[0].is_expr() {
stmts[0].take().expr().map(|expr_stmt| expr_stmt.expr)
} else {
static_block.body.stmts = stmts;
let expr = CallExpr {
callee: ArrowExpr {
body: Box::new(BlockStmtOrExpr::BlockStmt(static_block.body)),
..Default::default()
}
.as_callee(),
..Default::default()
}
.into();
Some(Box::new(expr))
};
PrivateProp {
span,
is_static: true,
key: PrivateName {
span: PLACEHOLDER_SP,
name: private_id,
},
value,
..Default::default()
}
}
}
#[swc_trace]
impl VisitMut for ClassStaticBlock {
noop_visit_mut_type!(fail);
fn visit_mut_class(&mut self, class: &mut Class) {
class.visit_mut_children_with(self);
let mut private_names = FxHashSet::default();
for member in &class.body {
if let ClassMember::PrivateProp(private_property) = member {
private_names.insert(private_property.key.name.clone());
}
}
let mut count = 0;
for member in class.body.iter_mut() {
if let ClassMember::StaticBlock(static_block) = member {
if static_block.body.stmts.is_empty() {
*member = ClassMember::dummy();
continue;
}
let static_block_private_id = generate_uid(&private_names, &mut count);
*member = self
.transform_static_block(static_block.take(), static_block_private_id)
.into();
};
}
}
}
fn generate_uid(deny_list: &FxHashSet<Atom>, i: &mut u32) -> Atom {
*i += 1;
let mut uid: Atom = if *i == 1 {
"_".to_string()
} else {
format!("_{i}")
}
.into();
while deny_list.contains(&uid) {
*i += 1;
uid = format!("_{i}").into();
}
uid
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/src/lib.rs | Rust | use swc_ecma_ast::Pass;
pub use self::{
member_expr_lits::member_expression_literals, prop_lits::property_literals,
reserved_word::reserved_words,
};
mod member_expr_lits;
mod prop_lits;
mod reserved_word;
/// Make output es3-compatible.
pub fn es3(preserve_import: bool) -> impl Pass {
(
property_literals(),
member_expression_literals(),
reserved_words(preserve_import),
)
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/src/member_expr_lits.rs | Rust | use swc_ecma_ast::*;
use swc_ecma_utils::is_valid_ident;
use swc_ecma_visit::{fold_pass, standard_only_fold, Fold, FoldWith};
use swc_trace_macro::swc_trace;
/// babel: `transform-member-expression-literals`
///
/// # Input
/// ```js
/// obj["foo"] = "isValid";
///
/// obj.const = "isKeyword";
/// obj["var"] = "isKeyword";
/// ```
///
/// # Output
/// ```js
/// obj.foo = "isValid";
///
/// obj["const"] = "isKeyword";
/// obj["var"] = "isKeyword";
/// ```
pub fn member_expression_literals() -> impl Pass {
fold_pass(MemberExprLit)
}
#[derive(Default, Clone, Copy)]
struct MemberExprLit;
#[swc_trace]
impl Fold for MemberExprLit {
standard_only_fold!();
fn fold_member_expr(&mut self, e: MemberExpr) -> MemberExpr {
let e: MemberExpr = e.fold_children_with(self);
if let MemberProp::Ident(i) = e.prop {
if i.sym.is_reserved() || i.sym.is_reserved_in_strict_mode(true)
|| i.sym.is_reserved_in_es3()
// it's not bind, so you could use eval
|| !is_valid_ident(&i.sym)
{
return MemberExpr {
prop: MemberProp::Computed(ComputedPropName {
span: i.span,
expr: Lit::Str(Str {
span: i.span,
raw: None,
value: i.sym,
})
.into(),
}),
..e
};
} else {
return MemberExpr {
prop: MemberProp::Ident(IdentName::new(i.sym, i.span)),
..e
};
}
};
e
}
}
#[cfg(test)]
mod tests {
use swc_ecma_transforms_testing::test;
use super::*;
test!(
::swc_ecma_parser::Syntax::default(),
|_| fold_pass(MemberExprLit),
basic,
r#"obj["foo"] = "isValid";
obj.const = "isKeyword";
obj["var"] = "isKeyword";"#,
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| fold_pass(MemberExprLit),
issue_206,
"const number = foo[bar1][baz1]"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| fold_pass(MemberExprLit),
issue_211,
"_query[idx]=$this.attr('data-ref');"
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/src/prop_lits.rs | Rust | use swc_ecma_ast::*;
use swc_ecma_utils::is_valid_ident;
use swc_ecma_visit::{fold_pass, standard_only_fold, Fold, FoldWith};
use swc_trace_macro::swc_trace;
/// babel: `transform-property-literals`
///
/// # Input
/// ```js
/// var foo = {
/// // changed
/// "bar": function () {},
/// "1": function () {},
///
/// // not changed
/// "default": 1,
/// [a]: 2,
/// foo: 1
/// };
/// ```
///
/// # Output
/// ```js
/// var foo = {
/// bar: function () {},
/// 1: function () {},
///
/// "default": 1,
/// [a]: 2,
/// foo: 1
/// };
/// ```
pub fn property_literals() -> impl Pass {
fold_pass(PropertyLiteral)
}
struct PropertyLiteral;
#[swc_trace]
impl Fold for PropertyLiteral {
standard_only_fold!();
fn fold_prop_name(&mut self, n: PropName) -> PropName {
let n = n.fold_children_with(self);
match n {
PropName::Str(Str {
raw, value, span, ..
}) => {
if value.is_reserved() || !is_valid_ident(&value) {
PropName::Str(Str { span, raw, value })
} else {
PropName::Ident(IdentName::new(value, span))
}
}
PropName::Ident(i) => {
let IdentName { sym, span, .. } = i;
if sym.is_reserved() || sym.contains('-') || sym.contains('.') {
PropName::Str(Str {
span,
raw: None,
value: sym,
})
} else {
PropName::Ident(IdentName { span, sym })
}
}
_ => n,
}
}
}
#[cfg(test)]
mod tests {
use swc_ecma_transforms_testing::test;
use super::*;
test!(
::swc_ecma_parser::Syntax::default(),
|_| fold_pass(PropertyLiteral),
babel_basic,
r#"var foo = {
// changed
"bar": function () {},
"1": function () {},
// not changed
"default": 1,
[a]: 2,
foo: 1
};"#,
ok_if_code_eq
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| fold_pass(PropertyLiteral),
str_lit,
r#"'use strict';
var x = {
'foo.bar': true
};"#,
ok_if_code_eq
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/src/reserved_word.rs | Rust | use swc_common::{util::take::Take, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
/// babel: `@babel/plugin-transform-reserved-words`
///
/// Some words were reserved in ES3 as potential future keywords but were not
/// reserved in ES5 and later. This plugin, to be used when targeting ES3
/// environments, renames variables from that set of words.
///
/// # Input
/// ```js
/// var abstract = 1;
/// var x = abstract + 1;
/// ```
///
/// # Output
/// ```js
/// var _abstract = 1;
/// var x = _abstract + 1;
/// ```
pub fn reserved_words(preserve_import: bool) -> impl Pass {
visit_mut_pass(ReservedWord { preserve_import })
}
struct ReservedWord {
pub preserve_import: bool,
}
impl VisitMut for ReservedWord {
noop_visit_mut_type!(fail);
fn visit_mut_module_items(&mut self, n: &mut Vec<ModuleItem>) {
let mut extra_exports = Vec::new();
n.iter_mut().for_each(|module_item| {
match module_item {
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(ExportDecl {
decl: decl @ Decl::Fn(..) | decl @ Decl::Class(..),
..
})) => {
let ident = match decl {
Decl::Class(d) => d.ident.clone(),
Decl::Fn(d) => d.ident.clone(),
_ => {
unreachable!()
}
};
if !ident.is_reserved_in_es3() {
return;
}
*module_item = decl.take().into();
let mut orig = ident.clone();
orig.visit_mut_with(self);
extra_exports.push(
ExportNamedSpecifier {
span: DUMMY_SP,
orig: orig.into(),
exported: Some(ident.into()),
is_type_only: false,
}
.into(),
);
}
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(ExportDecl {
decl: Decl::Var(var),
..
})) => {
if var.decls.iter().all(|var| {
if let Pat::Ident(i) = &var.name {
!i.sym.is_reserved_in_es3()
} else {
true
}
}) {
return;
}
for var in &var.decls {
let ident = Ident::from(var.name.clone().expect_ident());
if !ident.is_reserved_in_es3() {
return;
}
let mut orig = ident.clone();
orig.visit_mut_with(self);
extra_exports.push(
ExportNamedSpecifier {
span: DUMMY_SP,
orig: orig.into(),
exported: Some(ident.into()),
is_type_only: false,
}
.into(),
);
}
*module_item = var.take().into();
}
_ => {}
}
module_item.visit_mut_with(self);
});
if !extra_exports.is_empty() {
let module_item = NamedExport {
span: DUMMY_SP,
specifiers: extra_exports,
src: None,
type_only: false,
with: None,
}
.into();
n.push(module_item);
}
}
fn visit_mut_export_named_specifier(&mut self, n: &mut ExportNamedSpecifier) {
if matches!(&n.orig, ModuleExportName::Ident(ident) if ident.is_reserved_in_es3()) {
n.exported.get_or_insert_with(|| n.orig.clone());
n.orig.visit_mut_with(self);
}
}
fn visit_mut_named_export(&mut self, n: &mut NamedExport) {
if n.src.is_none() {
n.visit_mut_children_with(self);
}
}
fn visit_mut_ident(&mut self, i: &mut Ident) {
if self.preserve_import && i.sym == *"import" {
return;
}
if i.is_reserved_in_es3() {
i.sym = format!("_{}", i.sym).into()
}
}
fn visit_mut_import_named_specifier(&mut self, s: &mut ImportNamedSpecifier) {
if s.local.is_reserved_in_es3() {
s.imported.get_or_insert_with(|| s.local.clone().into());
s.local.visit_mut_with(self);
}
}
fn visit_mut_member_expr(&mut self, e: &mut MemberExpr) {
e.obj.visit_mut_with(self);
if let MemberProp::Computed(c) = &mut e.prop {
c.visit_mut_with(self);
}
}
fn visit_mut_prop_name(&mut self, _: &mut PropName) {}
}
#[cfg(test)]
mod tests {
use swc_ecma_transforms_testing::test;
use super::*;
macro_rules! identical {
($name:ident, $src:literal) => {
test!(
::swc_ecma_parser::Syntax::default(),
|_| reserved_words(false),
$name,
$src
);
};
}
test!(
::swc_ecma_parser::Syntax::default(),
|_| reserved_words(false),
babel_issue_6477,
r#"
function utf8CheckByte(byte) {
if (byte <= 0x7F) return 0;
else if (byte >> 5 === 0x06) return 2;
else if (byte >> 4 === 0x0E) return 3;
else if (byte >> 3 === 0x1E) return 4;
return -1;
}
"#
);
identical!(export_as_default, "export { Foo as default }");
test!(
::swc_ecma_parser::Syntax::default(),
|_| reserved_words(false),
issue_7164,
r#"
import { int } from './a.js'
console.log(int)
export { int };
"#
);
test!(
Default::default(),
|_| reserved_words(false),
issue_7237,
r#"
export function char() {
console.log("char====char");
return "";
}
"#
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/member_expr_lits.rs/basic.js | JavaScript | obj["foo"] = "isValid";
obj["const"] = "isKeyword";
obj["var"] = "isKeyword";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/member_expr_lits.rs/issue_206.js | JavaScript | const number = foo[bar1][baz1];
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/member_expr_lits.rs/issue_211.js | JavaScript | _query[idx] = $this.attr('data-ref');
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/prop_lits.rs/babel_basic.js | JavaScript | var foo = {
// changed
bar: function() {},
"1": function() {},
// not changed
"default": 1,
[a]: 2,
foo: 1
};
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/prop_lits.rs/str_lit.js | JavaScript | 'use strict';
var x = {
'foo.bar': true
};
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/reserved_word.rs/babel_issue_6477.js | JavaScript | function utf8CheckByte(_byte) {
if (_byte <= 0x7F) return 0;
else if (_byte >> 5 === 0x06) return 2;
else if (_byte >> 4 === 0x0E) return 3;
else if (_byte >> 3 === 0x1E) return 4;
return -1;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/reserved_word.rs/export_as_default.js | JavaScript | export { Foo as default };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/reserved_word.rs/issue_7164.js | JavaScript | import { int as _int } from './a.js';
console.log(_int);
export { _int as int };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_compat_es3/tests/__swc_snapshots__/src/reserved_word.rs/issue_7237.js | JavaScript | function _char() {
console.log("char====char");
return "";
}
export { _char as char };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_ext_transforms/src/jest.rs | Rust | use phf::phf_set;
use swc_common::util::take::Take;
use swc_ecma_ast::*;
use swc_ecma_utils::{prepend_stmts, StmtLike};
use swc_ecma_visit::{noop_visit_mut_type, visit_mut_pass, VisitMut, VisitMutWith};
static HOIST_METHODS: phf::Set<&str> = phf_set![
"mock",
"unmock",
"enableAutomock",
"disableAutomock",
"deepUnmock"
];
pub fn jest() -> impl Pass {
visit_mut_pass(Jest::default())
}
#[derive(Default)]
struct Jest {
imported: Vec<Id>,
}
impl Jest {
fn visit_mut_stmt_like<T>(&mut self, orig: &mut Vec<T>)
where
T: StmtLike + VisitMutWith<Self>,
{
for item in &mut *orig {
item.visit_mut_with(self);
}
let items = orig.take();
let mut new = Vec::with_capacity(items.len());
let mut hoisted = Vec::with_capacity(8);
items.into_iter().for_each(|item| {
match item.try_into_stmt() {
Ok(stmt) => match &stmt {
Stmt::Expr(ExprStmt { expr, .. }) => match &**expr {
Expr::Call(CallExpr {
callee: Callee::Expr(callee),
..
}) => {
if self.should_hoist(callee) {
hoisted.push(T::from(stmt))
} else {
new.push(T::from(stmt))
}
}
_ => new.push(T::from(stmt)),
},
_ => new.push(T::from(stmt)),
},
Err(node) => new.push(node),
};
});
prepend_stmts(&mut new, hoisted.into_iter());
*orig = new;
}
fn should_hoist(&self, e: &Expr) -> bool {
match e {
Expr::Ident(i) => self.imported.iter().any(|imported| *imported == i.to_id()),
Expr::Member(
callee @ MemberExpr {
prop: MemberProp::Ident(prop),
..
},
) => is_global_jest(&callee.obj) && HOIST_METHODS.contains(&*prop.sym),
_ => false,
}
}
}
impl VisitMut for Jest {
noop_visit_mut_type!();
fn visit_mut_module_items(&mut self, items: &mut Vec<ModuleItem>) {
for item in items.iter() {
if let ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl {
specifiers, src, ..
})) = item
{
if src.value == "@jest/globals" {
for s in specifiers {
match s {
ImportSpecifier::Named(ImportNamedSpecifier {
local,
imported: None,
is_type_only: false,
..
}) => {
if HOIST_METHODS.contains(&*local.sym) {
self.imported.push(local.to_id());
}
}
ImportSpecifier::Named(ImportNamedSpecifier {
local,
imported: Some(exported),
is_type_only: false,
..
}) => {
if HOIST_METHODS.contains(exported.atom()) {
self.imported.push(local.to_id());
}
}
_ => {}
}
}
}
}
}
self.visit_mut_stmt_like(items)
}
fn visit_mut_stmts(&mut self, stmts: &mut Vec<Stmt>) {
self.visit_mut_stmt_like(stmts)
}
}
fn is_global_jest(e: &Expr) -> bool {
match e {
Expr::Ident(i) => i.sym == *"jest",
Expr::Member(MemberExpr { obj, .. }) => is_global_jest(obj),
Expr::Call(CallExpr {
callee: Callee::Expr(callee),
..
}) => is_global_jest(callee),
_ => false,
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_ext_transforms/src/lib.rs | Rust | pub mod jest;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/benches/lexer.rs | Rust | extern crate swc_malloc;
use codspeed_criterion_compat::{black_box, criterion_group, criterion_main, Bencher, Criterion};
use swc_common::FileName;
use swc_ecma_fast_parser::{token::TokenType, JscTarget, Lexer, Syntax};
fn bench_module(b: &mut Bencher, syntax: Syntax, src: &'static str) {
let _ = ::testing::run_test(false, |cm, _| {
let fm = cm.new_source_file(FileName::Anon.into(), src.into());
b.iter(|| {
let mut count = 0;
let mut lexer = Lexer::new(&fm.src, JscTarget::EsNext, syntax, None);
loop {
if lexer.current.token_type == TokenType::EOF {
break;
}
count += 1;
let token = lexer.next_token();
black_box(token).unwrap_or_else(|err| {
let loc = cm.lookup_char_pos(err.span.lo);
panic!("{err:?}: {loc:?}");
});
}
assert_ne!(count, 0);
});
Ok(())
});
}
fn bench_files(c: &mut Criterion) {
c.bench_function("es/fast-lexer/angular", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/angular-1.2.5.js"),
)
});
c.bench_function("es/fast-lexer/backbone", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/backbone-1.1.0.js"),
)
});
c.bench_function("es/fast-lexer/jquery", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/jquery-1.9.1.js"),
)
});
c.bench_function("es/fast-lexer/jquery mobile", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/jquery.mobile-1.4.2.js"),
)
});
c.bench_function("es/fast-lexer/mootools", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/mootools-1.4.5.js"),
)
});
c.bench_function("es/fast-lexer/underscore", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/underscore-1.5.2.js"),
)
});
c.bench_function("es/fast-lexer/three", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/three-0.138.3.js"),
)
});
c.bench_function("es/fast-lexer/yui", |b| {
bench_module(
b,
Default::default(),
include_str!("../../swc_ecma_parser/benches/files/yui-3.12.0.js"),
)
});
}
criterion_group!(benches, bench_files);
criterion_main!(benches);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/error.rs | Rust | //! Error types for the ECMAScript/TypeScript parser
use std::fmt;
use swc_common::Span;
/// Result type for parser operations
pub type Result<T> = std::result::Result<T, Error>;
/// Parser error
#[derive(Debug)]
pub struct Error {
/// Type of error
pub kind: ErrorKind,
/// Source span where the error occurred
pub span: Span,
}
/// Types of parser errors
#[derive(Debug)]
pub enum ErrorKind {
/// Unexpected token encountered
UnexpectedToken {
expected: Option<&'static str>,
got: String,
},
/// Unexpected end of file
UnexpectedEof { expected: Option<&'static str> },
/// Invalid numeric literal
InvalidNumber { reason: &'static str },
/// Invalid string literal (unterminated, invalid escape sequence, etc.)
InvalidString { reason: &'static str },
/// Invalid regular expression
InvalidRegExp { reason: &'static str },
/// Invalid template literal
InvalidTemplate { reason: &'static str },
/// Invalid identifier
InvalidIdentifier { reason: &'static str },
/// Invalid assignment target
InvalidAssignmentTarget,
/// Invalid destructuring pattern
InvalidDestructuringPattern,
/// Invalid use of await (outside async function)
InvalidAwait,
/// Invalid use of yield (outside generator function)
InvalidYield,
/// Invalid use of super
InvalidSuper,
/// Invalid use of new.target
InvalidNewTarget,
/// Invalid use of import.meta
InvalidImportMeta,
/// Unexpected keyword in this position
UnexpectedKeyword { keyword: &'static str },
/// Unexpected reserved word
UnexpectedReservedWord { word: String },
/// Duplicate binding
DuplicateBinding { name: String },
/// General parser error
General { message: String },
/// Unterminated string literal
UnterminatedString,
/// Invalid hex escape sequence in string
InvalidHexEscape,
/// Invalid unicode escape sequence in string
InvalidUnicodeEscape,
/// Invalid BigInt literal
InvalidBigInt,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.kind {
ErrorKind::UnexpectedToken { expected, got } => {
if let Some(expected) = expected {
write!(f, "Expected {}, got {}", expected, got)
} else {
write!(f, "Unexpected token {}", got)
}
}
ErrorKind::UnexpectedEof { expected } => {
if let Some(expected) = expected {
write!(f, "Unexpected end of file, expected {}", expected)
} else {
write!(f, "Unexpected end of file")
}
}
ErrorKind::InvalidNumber { reason } => {
write!(f, "Invalid numeric literal: {}", reason)
}
ErrorKind::InvalidString { reason } => {
write!(f, "Invalid string literal: {}", reason)
}
ErrorKind::InvalidRegExp { reason } => {
write!(f, "Invalid regular expression: {}", reason)
}
ErrorKind::InvalidTemplate { reason } => {
write!(f, "Invalid template literal: {}", reason)
}
ErrorKind::InvalidIdentifier { reason } => {
write!(f, "Invalid identifier: {}", reason)
}
ErrorKind::InvalidAssignmentTarget => {
write!(f, "Invalid assignment target")
}
ErrorKind::InvalidDestructuringPattern => {
write!(f, "Invalid destructuring pattern")
}
ErrorKind::InvalidAwait => {
write!(f, "await is only valid in async functions")
}
ErrorKind::InvalidYield => {
write!(f, "yield is only valid in generator functions")
}
ErrorKind::InvalidSuper => {
write!(f, "Invalid use of super")
}
ErrorKind::InvalidNewTarget => {
write!(f, "new.target can only be used in functions")
}
ErrorKind::InvalidImportMeta => {
write!(f, "import.meta can only be used in modules")
}
ErrorKind::UnexpectedKeyword { keyword } => {
write!(f, "Unexpected keyword '{}'", keyword)
}
ErrorKind::UnexpectedReservedWord { word } => {
write!(f, "Unexpected reserved word '{}'", word)
}
ErrorKind::DuplicateBinding { name } => {
write!(f, "Duplicate binding '{}'", name)
}
ErrorKind::General { message } => {
write!(f, "{}", message)
}
ErrorKind::UnterminatedString => {
write!(f, "Unterminated string literal")
}
ErrorKind::InvalidHexEscape => {
write!(f, "Invalid hexadecimal escape sequence")
}
ErrorKind::InvalidUnicodeEscape => {
write!(f, "Invalid unicode escape sequence")
}
ErrorKind::InvalidBigInt => {
write!(f, "Invalid BigInt literal")
}
}
}
}
impl std::error::Error for Error {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/common.rs | Rust | //! Common helpers for the lexer
//!
//! This module contains shared functionality used across different lexer
//! modules.
use super::Lexer;
use crate::error::{Error, ErrorKind, Result};
impl Lexer<'_> {
/// Read a hexadecimal escape sequence of specified length
pub(super) fn read_hex_escape(&mut self, len: usize) -> Result<u32> {
let mut result = 0u32;
for _ in 0..len {
let digit = match self.cursor.peek() {
Some(b'0'..=b'9') => self.cursor.peek().unwrap() - b'0',
Some(b'a'..=b'f') => self.cursor.peek().unwrap() - b'a' + 10,
Some(b'A'..=b'F') => self.cursor.peek().unwrap() - b'A' + 10,
_ => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidString {
reason: "Invalid hexadecimal escape sequence",
},
span,
});
}
};
result = (result << 4) | (digit as u32);
self.cursor.advance();
}
Ok(result)
}
/// Read a Unicode escape sequence
pub(super) fn read_unicode_escape(&mut self) -> Result<char> {
match self.cursor.peek() {
// Unicode code point escape: \u{HHHHHH}
Some(b'{') => {
self.cursor.advance();
let mut codepoint = 0u32;
let mut digit_count = 0;
loop {
match self.cursor.peek() {
Some(b'}') => {
self.cursor.advance();
break;
}
Some(b'0'..=b'9') => {
let digit = self.cursor.peek().unwrap() - b'0';
codepoint = (codepoint << 4) | (digit as u32);
self.cursor.advance();
digit_count += 1;
}
Some(b'a'..=b'f') => {
let digit = self.cursor.peek().unwrap() - b'a' + 10;
codepoint = (codepoint << 4) | (digit as u32);
self.cursor.advance();
digit_count += 1;
}
Some(b'A'..=b'F') => {
let digit = self.cursor.peek().unwrap() - b'A' + 10;
codepoint = (codepoint << 4) | (digit as u32);
self.cursor.advance();
digit_count += 1;
}
_ => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidString {
reason: "Invalid Unicode escape sequence",
},
span,
});
}
}
// Too many digits or value is too large
if digit_count > 6 || codepoint > 0x10ffff {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidString {
reason: "Unicode codepoint must be less than or equal to 0x10FFFF",
},
span,
});
}
}
if digit_count == 0 {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidString {
reason: "Empty Unicode escape sequence",
},
span,
});
}
std::char::from_u32(codepoint).ok_or_else(|| Error {
kind: ErrorKind::InvalidString {
reason: "Invalid Unicode codepoint",
},
span: self.span(),
})
}
// Regular 4-digit Unicode escape: \uHHHH
_ => {
let codepoint = self.read_hex_escape(4)?;
std::char::from_u32(codepoint).ok_or_else(|| Error {
kind: ErrorKind::InvalidString {
reason: "Invalid Unicode codepoint",
},
span: self.span(),
})
}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/cursor.rs | Rust | //! Byte-level cursor for fast input traversal
//!
//! This cursor operates directly on UTF-8 bytes for maximum performance.
use assume::assume;
use swc_common::BytePos;
use wide::u8x16;
use crate::util::unlikely;
/// High-performance cursor for traversing input bytes
#[repr(C)] // Ensure predictable memory layout for better cache behavior
pub struct Cursor<'a> {
/// Input source as bytes
input: &'a [u8],
/// Current position in bytes
pos: u32,
/// Length of the input in bytes
len: u32,
}
impl<'a> Cursor<'a> {
/// Create a new cursor from a string
#[inline(always)]
pub fn new(input: &'a str) -> Self {
let bytes = input.as_bytes();
Self {
input: bytes,
pos: 0,
len: bytes.len() as u32,
}
}
/// Get the current position as BytePos
#[inline(always)]
pub fn pos(&self) -> BytePos {
BytePos(self.pos)
}
/// Check if the cursor is at the end of the input
#[inline(always)]
pub fn is_eof(&self) -> bool {
self.pos >= self.len
}
/// Peek at the current byte without advancing
#[inline(always)]
pub fn peek(&self) -> Option<u8> {
if unlikely(self.is_eof()) {
None
} else {
// SAFETY: We've checked that pos < len
Some(unsafe { *self.input.get_unchecked(self.pos as usize) })
}
}
/// Peek at a byte at a specific offset from the current position
#[inline(always)]
pub fn peek_at(&self, offset: u32) -> Option<u8> {
let target_pos = self.pos + offset;
if unlikely(target_pos >= self.len) {
None
} else {
// SAFETY: We've checked that target_pos < len
Some(unsafe { *self.input.get_unchecked(target_pos as usize) })
}
}
/// Peek at multiple bytes without advancing
#[inline(always)]
pub fn peek_n(&self, n: u32) -> &[u8] {
let end = (self.pos + n).min(self.len);
// SAFETY: We've ensured end <= len
unsafe { self.input.get_unchecked(self.pos as usize..end as usize) }
}
/// Advance the cursor by one byte
#[inline(always)]
pub fn advance(&mut self) {
assume!(unsafe: !self.is_eof());
self.pos += 1;
}
/// Advance the cursor by n bytes
#[inline(always)]
pub fn advance_n(&mut self, n: u32) {
assume!(unsafe: self.pos + n <= self.len);
self.pos += n;
}
/// Advance until the predicate returns false or EOF is reached
#[inline]
pub fn advance_while<F>(&mut self, mut predicate: F) -> u32
where
F: FnMut(u8) -> bool,
{
let start = self.pos;
self.advance_while_scalar(&mut predicate);
self.pos - start
}
/// Scalar (non-SIMD) implementation of advance_while
#[inline]
fn advance_while_scalar<F>(&mut self, predicate: &mut F)
where
F: FnMut(u8) -> bool,
{
const BATCH_SIZE: u32 = 32;
// Process in batches if we have more than BATCH_SIZE bytes
while self.pos + BATCH_SIZE <= self.len {
let mut should_stop = false;
// Check all bytes in the batch
for i in 0..BATCH_SIZE {
// SAFETY: We've verified bounds above
let byte = unsafe { *self.input.get_unchecked((self.pos + i) as usize) };
if !predicate(byte) {
should_stop = true;
break;
}
}
if should_stop {
// Found stopping byte, switch to byte-by-byte
break;
}
// Skip the entire batch
self.pos += BATCH_SIZE;
}
// Byte-by-byte for the remainder
while let Some(byte) = self.peek() {
if !predicate(byte) {
break;
}
self.advance();
}
}
/// Get slice from the current position to the end
#[inline(always)]
pub fn rest(&self) -> &'a [u8] {
// SAFETY: pos is always <= len
unsafe { self.input.get_unchecked(self.pos as usize..) }
}
/// Get a slice of the input
#[inline(always)]
pub fn slice(&self, start: u32, end: u32) -> &'a [u8] {
let real_start = start.min(self.len);
let real_end = end.min(self.len);
// SAFETY: We've validated bounds
unsafe {
self.input
.get_unchecked(real_start as usize..real_end as usize)
}
}
/// Get the current position
#[inline(always)]
pub fn position(&self) -> u32 {
self.pos
}
/// Reset the cursor to a specific position
#[inline(always)]
pub fn reset_to(&mut self, pos: BytePos) {
self.pos = pos.0;
}
/// Find the next occurrence of a byte
#[inline]
pub fn find_byte(&self, byte: u8) -> Option<u32> {
// If we're at or near EOF, use the standard implementation
if unlikely(self.pos + 16 > self.len) {
return self.find_byte_scalar(byte);
}
// SIMD implementation using wide crate
self.find_byte_simd(byte)
}
/// SIMD-accelerated implementation of find_byte
#[inline]
fn find_byte_simd(&self, byte: u8) -> Option<u32> {
let input = &self.input[self.pos as usize..];
let mut position = 0u32;
// Process 16 bytes at a time
while position + 16 <= input.len() as u32 {
// Create a vector with our pattern
let needle = u8x16::splat(byte);
// Create a vector with current chunk of data
let mut data = [0u8; 16];
data.copy_from_slice(&input[position as usize..(position + 16) as usize]);
let chunk = u8x16::new(data);
// Compare for equality
let mask = chunk.cmp_eq(needle);
// Converting to array to check byte-by-byte (no move_mask available)
let mask_array = mask.to_array();
// Check for any matches
#[allow(clippy::needless_range_loop)]
for i in 0..16 {
if mask_array[i] != 0 {
return Some(self.pos + position + i as u32);
}
}
position += 16;
}
// Handle the remainder with the scalar implementation
if position < input.len() as u32 {
return input[position as usize..]
.iter()
.position(|&b| b == byte)
.map(|pos| self.pos + position + pos as u32);
}
None
}
/// Standard fallback implementation
#[inline]
fn find_byte_scalar(&self, byte: u8) -> Option<u32> {
self.input[self.pos as usize..]
.iter()
.position(|&b| b == byte)
.map(|pos| self.pos + pos as u32)
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/identifier.rs | Rust | //! Identifier processing for the lexer
//!
//! This module handles the parsing of ECMAScript/TypeScript identifiers.
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::Result,
token::{keyword_to_token_type, Token, TokenType, TokenValue},
};
/// Fast mapping from ASCII to check if a character is valid for identifier
/// start or continuation using bit flags
static IDENT_CHAR: [u8; 128] = {
let mut table = [0u8; 128];
// Mark identifier start characters (a-z, A-Z, _, $)
let mut i = 0;
while i < 26 {
table[(b'a' + i) as usize] |= 3; // Both start and continue
table[(b'A' + i) as usize] |= 3; // Both start and continue
i += 1;
}
table[b'_' as usize] |= 3; // Both start and continue
table[b'$' as usize] |= 3; // Both start and continue
// Mark digits (0-9) as continue only
i = 0;
while i < 10 {
table[(b'0' + i) as usize] |= 2; // Continue only
i += 1;
}
table
};
/// Direct lookup tables for common 2-6 letter keywords
/// These give a huge performance benefit by allowing direct match without any
/// loops Each table is specific to a keyword length for maximum performance
// 2-letter keywords (do, if, in, of)
static KEYWORDS_LEN2: [(u16, TokenType); 4] = [
(u16::from_be_bytes(*b"do"), TokenType::Do),
(u16::from_be_bytes(*b"if"), TokenType::If),
(u16::from_be_bytes(*b"in"), TokenType::In),
(u16::from_be_bytes(*b"of"), TokenType::Of),
];
// 3-letter keywords (for, let, new, try, var)
static KEYWORDS_LEN3: [(u32, TokenType); 5] = [
(
(u32::from_be_bytes([0, 0, 0, 0]) | u32::from_be_bytes(*b"for\0")) >> 8,
TokenType::For,
),
(
(u32::from_be_bytes([0, 0, 0, 0]) | u32::from_be_bytes(*b"let\0")) >> 8,
TokenType::Let,
),
(
(u32::from_be_bytes([0, 0, 0, 0]) | u32::from_be_bytes(*b"new\0")) >> 8,
TokenType::New,
),
(
(u32::from_be_bytes([0, 0, 0, 0]) | u32::from_be_bytes(*b"try\0")) >> 8,
TokenType::Try,
),
(
(u32::from_be_bytes([0, 0, 0, 0]) | u32::from_be_bytes(*b"var\0")) >> 8,
TokenType::Var,
),
];
// 4-letter keywords (case, else, this, true, null, void, with)
static KEYWORDS_LEN4: [(u32, TokenType); 7] = [
(u32::from_be_bytes(*b"case"), TokenType::Case),
(u32::from_be_bytes(*b"else"), TokenType::Else),
(u32::from_be_bytes(*b"null"), TokenType::Null),
(u32::from_be_bytes(*b"this"), TokenType::This),
(u32::from_be_bytes(*b"true"), TokenType::True),
(u32::from_be_bytes(*b"void"), TokenType::Void),
(u32::from_be_bytes(*b"with"), TokenType::With),
];
// 5-letter keywords (async, await, break, class, const, false, super, throw,
// while, yield)
static KEYWORDS_LEN5: [(u64, TokenType); 10] = [
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"async\0\0\0")) >> 24,
TokenType::Async,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"await\0\0\0")) >> 24,
TokenType::Await,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"break\0\0\0")) >> 24,
TokenType::Break,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"class\0\0\0")) >> 24,
TokenType::Class,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"const\0\0\0")) >> 24,
TokenType::Const,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"false\0\0\0")) >> 24,
TokenType::False,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"super\0\0\0")) >> 24,
TokenType::Super,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"throw\0\0\0")) >> 24,
TokenType::Throw,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"while\0\0\0")) >> 24,
TokenType::While,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"yield\0\0\0")) >> 24,
TokenType::Yield,
),
];
// 6-letter keywords (delete, export, import, return, static, switch, typeof)
static KEYWORDS_LEN6: [(u64, TokenType); 7] = [
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"delete\0\0")) >> 16,
TokenType::Delete,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"export\0\0")) >> 16,
TokenType::Export,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"import\0\0")) >> 16,
TokenType::Import,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"return\0\0")) >> 16,
TokenType::Return,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"static\0\0")) >> 16,
TokenType::Static,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"switch\0\0")) >> 16,
TokenType::Switch,
),
(
(u64::from_be_bytes([0, 0, 0, 0, 0, 0, 0, 0]) | u64::from_be_bytes(*b"typeof\0\0")) >> 16,
TokenType::TypeOf,
),
];
/// Direct keyword lookup table for longer keywords
/// This allows for much faster keyword checks without going through the PHF map
/// Each entry contains the keyword and its corresponding TokenType (or None if
/// not a keyword)
struct KeywordEntry {
keyword: &'static str,
token_type: Option<TokenType>,
}
static KEYWORD_LOOKUP: [KeywordEntry; 25] = [
// Common longer keywords (length > 6)
KeywordEntry {
keyword: "function",
token_type: Some(TokenType::Function),
},
KeywordEntry {
keyword: "continue",
token_type: Some(TokenType::Continue),
},
KeywordEntry {
keyword: "debugger",
token_type: Some(TokenType::Debugger),
},
KeywordEntry {
keyword: "default",
token_type: Some(TokenType::Default),
},
KeywordEntry {
keyword: "finally",
token_type: Some(TokenType::Finally),
},
KeywordEntry {
keyword: "extends",
token_type: Some(TokenType::Extends),
},
KeywordEntry {
keyword: "catch",
token_type: Some(TokenType::Catch),
},
// TypeScript-specific common keywords
KeywordEntry {
keyword: "interface",
token_type: Some(TokenType::Interface),
},
KeywordEntry {
keyword: "type",
token_type: Some(TokenType::Type),
},
KeywordEntry {
keyword: "public",
token_type: Some(TokenType::Public),
},
KeywordEntry {
keyword: "private",
token_type: Some(TokenType::Private),
},
KeywordEntry {
keyword: "protected",
token_type: Some(TokenType::Protected),
},
KeywordEntry {
keyword: "abstract",
token_type: Some(TokenType::Abstract),
},
KeywordEntry {
keyword: "implements",
token_type: Some(TokenType::Implements),
},
KeywordEntry {
keyword: "readonly",
token_type: Some(TokenType::Readonly),
},
KeywordEntry {
keyword: "namespace",
token_type: Some(TokenType::Namespace),
},
KeywordEntry {
keyword: "declare",
token_type: Some(TokenType::Declare),
},
KeywordEntry {
keyword: "keyof",
token_type: Some(TokenType::Keyof),
},
KeywordEntry {
keyword: "enum",
token_type: Some(TokenType::Enum),
},
KeywordEntry {
keyword: "instanceof",
token_type: Some(TokenType::InstanceOf),
},
KeywordEntry {
keyword: "constructor",
token_type: Some(TokenType::Constructor),
},
KeywordEntry {
keyword: "undefined",
token_type: Some(TokenType::Undefined),
},
KeywordEntry {
keyword: "boolean",
token_type: Some(TokenType::Boolean),
},
KeywordEntry {
keyword: "number",
token_type: Some(TokenType::Number),
},
KeywordEntry {
keyword: "string",
token_type: Some(TokenType::String),
},
];
/// Fast keyword lookup by length and first character
/// This is a nested table indexed by [length][first_char] that contains indices
/// into KEYWORD_LOOKUP where length is capped at 16 chars and first_char is a
/// lowercase ASCII letter
///
/// A value of 255 indicates no entry
static KEYWORD_INDEX: [[u8; 26]; 16] = {
let mut table = [[255u8; 26]; 16];
// Initialize with 255 (meaning no entry)
let mut i = 0;
while i < KEYWORD_LOOKUP.len() {
let entry = &KEYWORD_LOOKUP[i];
let word = entry.keyword;
let len = word.len();
if len > 0 && len <= 16 {
let first_char = word.as_bytes()[0];
if first_char >= b'a' && first_char <= b'z' {
// Index by (length-1) and (first_char - 'a')
table[len - 1][(first_char - b'a') as usize] = i as u8;
}
}
i += 1;
}
table
};
impl Lexer<'_> {
/// Read an identifier or keyword
#[inline(always)]
pub(super) fn read_identifier(&mut self) -> Result<Token> {
let start_pos = self.start_pos;
// Skip the first character (already verified as identifier start)
self.cursor.advance();
// Read as many identifier continue chars as possible
self.cursor.advance_while(Self::is_identifier_continue);
// Extract the identifier text
let span = self.span();
let ident_start = start_pos.0;
let ident_end = self.cursor.position();
let ident_bytes = self.cursor.slice(ident_start, ident_end);
let ident_str = unsafe { std::str::from_utf8_unchecked(ident_bytes) };
let had_line_break_bool: bool = self.had_line_break.into();
// Ultra-fast path for common 2-6 letter keywords using direct table lookup
let len = ident_bytes.len();
// Only process if first byte is an ASCII lowercase letter (all keywords start
// with a-z)
if len > 0 && ident_bytes[0] >= b'a' && ident_bytes[0] <= b'z' {
match len {
// Direct lookup for 2-letter keywords
2 => {
let word_bytes =
unsafe { [*ident_bytes.get_unchecked(0), *ident_bytes.get_unchecked(1)] };
let word_value = u16::from_be_bytes(word_bytes);
for &(keyword_value, token_type) in &KEYWORDS_LEN2 {
if word_value == keyword_value {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
// Direct lookup for 3-letter keywords
3 => {
let word_bytes = unsafe {
[
*ident_bytes.get_unchecked(0),
*ident_bytes.get_unchecked(1),
*ident_bytes.get_unchecked(2),
0,
]
};
let word_value = (u32::from_be_bytes(word_bytes)) >> 8;
for &(keyword_value, token_type) in &KEYWORDS_LEN3 {
if word_value == keyword_value {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
// Direct lookup for 4-letter keywords
4 => {
let word_bytes = unsafe {
[
*ident_bytes.get_unchecked(0),
*ident_bytes.get_unchecked(1),
*ident_bytes.get_unchecked(2),
*ident_bytes.get_unchecked(3),
]
};
let word_value = u32::from_be_bytes(word_bytes);
for &(keyword_value, token_type) in &KEYWORDS_LEN4 {
if word_value == keyword_value {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
// Direct lookup for 5-letter keywords
5 => {
let word_bytes = unsafe {
[
*ident_bytes.get_unchecked(0),
*ident_bytes.get_unchecked(1),
*ident_bytes.get_unchecked(2),
*ident_bytes.get_unchecked(3),
*ident_bytes.get_unchecked(4),
0,
0,
0,
]
};
let word_value = (u64::from_be_bytes(word_bytes)) >> 24;
for &(keyword_value, token_type) in &KEYWORDS_LEN5 {
if word_value == keyword_value {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
// Direct lookup for 6-letter keywords
6 => {
let word_bytes = unsafe {
[
*ident_bytes.get_unchecked(0),
*ident_bytes.get_unchecked(1),
*ident_bytes.get_unchecked(2),
*ident_bytes.get_unchecked(3),
*ident_bytes.get_unchecked(4),
*ident_bytes.get_unchecked(5),
0,
0,
]
};
let word_value = (u64::from_be_bytes(word_bytes)) >> 16;
for &(keyword_value, token_type) in &KEYWORDS_LEN6 {
if word_value == keyword_value {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
// Fast path for longer keywords using the lookup table
7..=16 => {
// Get index in KEYWORD_LOOKUP using our index table
let first_char_idx = unsafe { (*ident_bytes.get_unchecked(0) - b'a') as usize };
let lookup_idx = unsafe {
*KEYWORD_INDEX
.get_unchecked(len - 1)
.get_unchecked(first_char_idx)
};
if lookup_idx != 255 {
// Check if the word matches the entry
let entry = unsafe { KEYWORD_LOOKUP.get_unchecked(lookup_idx as usize) };
if entry.keyword == ident_str {
if let Some(token_type) = entry.token_type {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
}
}
}
_ => {}
}
}
// Fallback path: Check in the PHF map if this is a keyword
// Only runs for potential keywords not in our direct lookup tables
if let Some(token_type) = keyword_to_token_type(ident_str) {
return Ok(Token::new(
token_type,
span,
had_line_break_bool,
TokenValue::None,
));
}
// Not a keyword, return as identifier with the word value
Ok(Token::new(
TokenType::Ident,
span,
had_line_break_bool,
TokenValue::Word(Atom::from(ident_str)),
))
}
/// Super fast check for ASCII identifier start character
#[inline(always)]
pub(crate) fn is_ascii_id_start(ch: u8) -> bool {
ch < 128 && unsafe { (IDENT_CHAR.get_unchecked(ch as usize) & 1) != 0 }
}
/// Super fast check for ASCII identifier continue character
#[inline(always)]
pub(crate) fn is_ascii_id_continue(ch: u8) -> bool {
ch < 128 && unsafe { (IDENT_CHAR.get_unchecked(ch as usize) & 2) != 0 }
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/jsx.rs | Rust | //! JSX syntax processing for the lexer
//!
//! This module handles the parsing of JSX syntax in React-style templates.
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::Result,
token::{Token, TokenType, TokenValue},
};
impl Lexer<'_> {
/// Read a JSX token when inside JSX context
pub(super) fn read_jsx_token(&mut self, had_line_break: bool) -> Result<Token> {
match self.cursor.peek() {
// Start of JSX element or fragment
Some(b'<') => {
self.cursor.advance();
// Check for JSX fragment opening
if self.cursor.peek() == Some(b'>') {
self.cursor.advance();
Ok(Token::new(
TokenType::JSXTagStart,
self.span(),
had_line_break,
TokenValue::None,
))
} else {
Ok(Token::new(
TokenType::JSXTagStart,
self.span(),
had_line_break,
TokenValue::None,
))
}
}
// End of JSX element or fragment
Some(b'>') => {
self.cursor.advance();
Ok(Token::new(
TokenType::Gt,
self.span(),
had_line_break,
TokenValue::None,
))
}
// JSX closing tag or fragment closing
Some(b'/') => {
self.cursor.advance();
if self.cursor.peek() == Some(b'>') {
self.cursor.advance();
// Self-closing tag
Ok(Token::new(
TokenType::JSXTagEnd,
self.span(),
had_line_break,
TokenValue::None,
))
} else {
// Closing tag start
Ok(Token::new(
TokenType::Slash,
self.span(),
had_line_break,
TokenValue::None,
))
}
}
// JSX attribute value start
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::Eq,
self.span(),
had_line_break,
TokenValue::None,
))
}
// JSX quoted attribute value
Some(b'"') | Some(b'\'') => self.read_string(self.cursor.peek().unwrap()),
// JSX expression in attributes or children
Some(b'{') => {
self.cursor.advance();
self.in_jsx_element = false; // Exit JSX context
Ok(Token::new(
TokenType::LBrace,
self.span(),
had_line_break,
TokenValue::None,
))
}
// JSX text content
_ => self.read_jsx_text(had_line_break),
}
}
/// Read JSX text content
fn read_jsx_text(&mut self, had_line_break: bool) -> Result<Token> {
let start_pos = self.start_pos;
let start_idx = start_pos.0;
let mut text = String::new();
// Read until we find <, {, or >
loop {
match self.cursor.peek() {
Some(b'<') | Some(b'{') | Some(b'>') | None => {
break;
}
Some(_) => {
// For performance, read chunks of text at once if possible
let start = self.cursor.position();
self.cursor
.advance_while(|c| c != b'<' && c != b'{' && c != b'>');
let end = self.cursor.position();
if end > start {
let slice = self.cursor.slice(start, end);
text.push_str(unsafe { std::str::from_utf8_unchecked(slice) });
}
}
}
}
// Skip whitespace-only JSX text
if text.trim().is_empty() {
// Return either a new token or the next token
if self.cursor.peek().is_none() {
return Ok(Token::new(
TokenType::EOF,
self.span(),
had_line_break,
TokenValue::None,
));
} else {
return self.read_jsx_token(had_line_break);
}
}
// Extract the raw text
let end_idx = self.cursor.position();
let raw_bytes = self.cursor.slice(start_idx, end_idx);
let raw_str = unsafe { std::str::from_utf8_unchecked(raw_bytes) };
let span = self.span();
Ok(Token::new(
TokenType::JSXText,
span,
self.had_line_break.into(),
TokenValue::Str {
value: Atom::from(text),
raw: Atom::from(raw_str),
},
))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/mod.rs | Rust | //! High-performance lexer for ECMAScript/TypeScript
//!
//! This lexer is designed for maximum performance and operates at the byte
//! level directly on the input string for optimal throughput.
#![allow(clippy::redundant_closure_call)]
mod common;
mod cursor;
mod identifier;
mod jsx;
mod number;
mod operators;
mod regex;
mod string;
mod template;
#[cfg(test)]
mod tests;
use std::rc::Rc;
use cursor::Cursor;
use swc_common::{BytePos, Span, DUMMY_SP};
use wide::u8x16;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
util::{likely, unlikely},
JscTarget, SingleThreadedComments, Syntax,
};
/// Represents line break detection
/// Optimized to fit in a single byte and provide performant conversions
#[derive(Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
enum LineBreak {
None = 0,
Present = 1,
}
impl From<bool> for LineBreak {
#[inline(always)]
fn from(b: bool) -> Self {
// Use direct transmute for faster conversion - avoid branching
unsafe { std::mem::transmute(b as u8) }
}
}
impl From<LineBreak> for bool {
#[inline(always)]
fn from(lb: LineBreak) -> Self {
// Direct conversion to boolean with no branching
lb as u8 != 0
}
}
/// High-performance lexer for ECMAScript/TypeScript
///
/// This lexer processes input as UTF-8 bytes for maximum performance.
#[repr(C)] // Ensure predictable memory layout
pub struct Lexer<'a> {
/// Current token
pub current: Token,
/// Byte-level cursor to the input source
cursor: Cursor<'a>,
/// Syntax configuration for the parser
pub syntax: Syntax,
/// Target ECMAScript version
pub target: JscTarget,
/// Start position of the current token
start_pos: BytePos,
/// Comments storage
pub comments: Option<Rc<SingleThreadedComments>>,
/// Whether the lexer is in strict mode
pub strict_mode: bool,
/// Whether the lexer is in JSX element context
pub in_jsx_element: bool,
/// Whether the lexer is in template literal context
pub in_template: bool,
pub in_template_expr: bool,
/// Whether we had a line break before the current token
had_line_break: LineBreak,
}
// Bit flags for character classification - used in lookup tables
const CHAR_WHITESPACE: u8 = 0b0000_0001;
const CHAR_LINEBREAK: u8 = 0b0000_0010;
const CHAR_ID_START: u8 = 0b0000_0100;
const CHAR_ID_CONTINUE: u8 = 0b0000_1000;
const CHAR_DIGIT: u8 = 0b0001_0000;
const CHAR_HEX_DIGIT: u8 = 0b0010_0000;
const CHAR_OPERATOR: u8 = 0b0100_0000;
const CHAR_SPECIAL: u8 = 0b1000_0000;
// Extended lookup table for faster character checks (ASCII only)
static ASCII_LOOKUP: [u8; 256] = {
let mut table = [0u8; 256];
// Mark whitespace characters
table[b' ' as usize] = CHAR_WHITESPACE;
table[b'\t' as usize] = CHAR_WHITESPACE;
table[b'\n' as usize] = CHAR_WHITESPACE | CHAR_LINEBREAK;
table[b'\r' as usize] = CHAR_WHITESPACE | CHAR_LINEBREAK;
table[0x0c_usize] = CHAR_WHITESPACE; // Form feed
table[0x0b_usize] = CHAR_WHITESPACE; // Vertical tab
// Mark identifier start characters (a-z, A-Z, _, $)
let mut i = 0;
while i < 26 {
table[(b'a' + i) as usize] |=
CHAR_ID_START | CHAR_ID_CONTINUE | (CHAR_HEX_DIGIT * ((i < 6) as u8));
table[(b'A' + i) as usize] |=
CHAR_ID_START | CHAR_ID_CONTINUE | (CHAR_HEX_DIGIT * ((i < 6) as u8));
i += 1;
}
table[b'_' as usize] |= CHAR_ID_START | CHAR_ID_CONTINUE;
table[b'$' as usize] |= CHAR_ID_START | CHAR_ID_CONTINUE;
// Mark digits (0-9)
i = 0;
while i < 10 {
table[(b'0' + i) as usize] |= CHAR_ID_CONTINUE | CHAR_DIGIT | CHAR_HEX_DIGIT;
i += 1;
}
// Mark common operators
table[b'+' as usize] |= CHAR_OPERATOR;
table[b'-' as usize] |= CHAR_OPERATOR;
table[b'*' as usize] |= CHAR_OPERATOR;
table[b'/' as usize] |= CHAR_OPERATOR;
table[b'%' as usize] |= CHAR_OPERATOR;
table[b'=' as usize] |= CHAR_OPERATOR;
table[b'<' as usize] |= CHAR_OPERATOR;
table[b'>' as usize] |= CHAR_OPERATOR;
table[b'&' as usize] |= CHAR_OPERATOR;
table[b'|' as usize] |= CHAR_OPERATOR;
table[b'^' as usize] |= CHAR_OPERATOR;
table[b'!' as usize] |= CHAR_OPERATOR;
table[b'~' as usize] |= CHAR_OPERATOR | CHAR_SPECIAL; // Both special char and operator
table[b'?' as usize] |= CHAR_OPERATOR;
table[b'.' as usize] |= CHAR_OPERATOR;
table[b':' as usize] |= CHAR_SPECIAL; // Colon is only a special char, not an operator
// Mark special characters (frequently used in parsing decisions)
table[b'{' as usize] |= CHAR_SPECIAL;
table[b'}' as usize] |= CHAR_SPECIAL;
table[b'(' as usize] |= CHAR_SPECIAL;
table[b')' as usize] |= CHAR_SPECIAL;
table[b'[' as usize] |= CHAR_SPECIAL;
table[b']' as usize] |= CHAR_SPECIAL;
table[b';' as usize] |= CHAR_SPECIAL;
table[b',' as usize] |= CHAR_SPECIAL;
table[b'"' as usize] |= CHAR_SPECIAL;
table[b'\'' as usize] |= CHAR_SPECIAL;
table[b'`' as usize] |= CHAR_SPECIAL;
table[b'#' as usize] |= CHAR_SPECIAL;
table[b'@' as usize] |= CHAR_SPECIAL;
table
};
// Token type dispatch table to avoid large match statements - this stores
// TokenType by character
static TOKEN_DISPATCH: [TokenType; 128] = {
let mut table = [TokenType::Invalid; 128];
// Single-character tokens
table[b'(' as usize] = TokenType::LParen;
table[b')' as usize] = TokenType::RParen;
table[b'{' as usize] = TokenType::LBrace;
table[b'}' as usize] = TokenType::RBrace;
table[b'[' as usize] = TokenType::LBracket;
table[b']' as usize] = TokenType::RBracket;
table[b';' as usize] = TokenType::Semi;
table[b',' as usize] = TokenType::Comma;
table[b':' as usize] = TokenType::Colon;
table[b'~' as usize] = TokenType::Tilde;
table[b'@' as usize] = TokenType::At;
table
};
impl<'a> Lexer<'a> {
/// Create a new lexer from a string input
#[inline(always)]
pub fn new(
input: &'a str,
target: JscTarget,
syntax: Syntax,
comments: Option<Rc<SingleThreadedComments>>,
) -> Self {
let cursor = Cursor::new(input);
let dummy_token = Token::new(TokenType::EOF, DUMMY_SP, false, TokenValue::None);
let mut lexer = Self {
cursor,
current: dummy_token,
syntax,
target,
strict_mode: false,
in_jsx_element: false,
in_template: false,
in_template_expr: false,
comments,
start_pos: BytePos(0),
had_line_break: LineBreak::None,
};
// Prime the lexer with the first token
let _ = lexer.next_token();
lexer
}
/// Get the next token
#[inline(always)]
pub fn next_token(&mut self) -> Result<Token> {
if likely(!self.in_template || self.in_template_expr) {
// Skip whitespaces and comments
self.skip_whitespace();
}
// Remember if there were line breaks before this token
let had_line_break = self.had_line_break;
self.had_line_break = LineBreak::None;
// Remember the start position of this token
self.start_pos = self.cursor.pos();
// If we're in JSX mode, use the JSX tokenizer
if unlikely(self.in_jsx_element) {
return self.read_jsx_token(had_line_break.into());
}
// Get the next character - fast path for EOF
let ch = match self.cursor.peek() {
Some(ch) => ch,
None => {
// End of file - reuse the same EOF token object
let token = Token::new(
TokenType::EOF,
self.span(),
had_line_break.into(),
TokenValue::None,
);
return Ok(std::mem::replace(&mut self.current, token));
}
};
// Process the character to determine the token type
let token = self.read_token(ch, had_line_break.into())?;
// Update the current token and return a clone of the previous one
Ok(std::mem::replace(&mut self.current, token))
}
/// Read the next token starting with the given character
#[inline(always)]
fn read_token(&mut self, ch: u8, had_line_break: bool) -> Result<Token> {
if unlikely(self.in_template && !self.in_template_expr) {
return self.read_template_content(had_line_break);
}
// Fast path for ASCII tokens using lookup table
if likely(ch < 128) {
let char_type = unsafe { *ASCII_LOOKUP.get_unchecked(ch as usize) };
// Fast path for single-character tokens (very common)
if char_type & CHAR_SPECIAL != 0 {
match ch {
// Group frequent tokens together for better branch prediction
// Use direct table lookup for single-character tokens
b'{' | b'}' | b'(' | b')' | b'[' | b']' | b';' | b',' | b':' | b'~' | b'@' => {
// Special case for closing brace in template
if unlikely(ch == b'}' && self.in_template) {
// End of template expression
self.in_template_expr = false;
}
let token_type = unsafe { *TOKEN_DISPATCH.get_unchecked(ch as usize) };
self.cursor.advance();
Ok(Token::new(
token_type,
self.span(),
had_line_break,
TokenValue::None,
))
}
// String literals - group together for better branch prediction
b'"' | b'\'' => self.read_string(ch),
b'`' => {
self.in_template = true;
self.cursor.advance();
Ok(Token::new(
TokenType::BackQuote,
self.span(),
had_line_break,
TokenValue::None,
))
}
// Other special characters that need custom handling
b'#' => self.read_hash(),
// This should not happen given our table design, but handle it anyway
_ => {
self.cursor.advance();
let span = self.span();
Err(Error {
kind: ErrorKind::General {
message: format!("Unexpected character: '{}'", ch as char),
},
span,
})
}
}
}
// Check for digits (numeric literals)
else if char_type & CHAR_DIGIT != 0 {
self.read_number()
}
// Check for operator characters
else if char_type & CHAR_OPERATOR != 0 {
// Dispatch to specific operator handlers based on the character
match ch {
b'.' => self.read_dot(),
b'=' => self.read_equals(),
b'+' => self.read_plus(),
b'-' => self.read_minus(),
b'/' => self.read_slash(had_line_break),
b'<' => self.read_less_than(),
b'>' => self.read_greater_than(),
b'!' => self.read_exclamation_mark(),
b'?' => self.read_question_mark(),
b'*' => self.read_asterisk(),
b'%' => self.read_percent(),
b'|' => self.read_pipe(),
b'&' => self.read_ampersand(),
b'^' => self.read_caret(),
_ => {
// This should never happen with our table design
self.cursor.advance();
let span = self.span();
Err(Error {
kind: ErrorKind::General {
message: format!("Unexpected character: '{}'", ch as char),
},
span,
})
}
}
}
// Identifier start characters
else if char_type & CHAR_ID_START != 0 {
self.read_identifier()
}
// Any other ASCII character (error case)
else {
self.cursor.advance();
let span = self.span();
Err(Error {
kind: ErrorKind::General {
message: format!("Unexpected character: '{}'", ch as char),
},
span,
})
}
} else {
// Non-ASCII character path (less common)
if Self::is_identifier_start(ch) {
self.read_identifier()
} else {
self.cursor.advance();
let span = self.span();
Err(Error {
kind: ErrorKind::General {
message: format!("Unexpected character: '{}'", ch as char),
},
span,
})
}
}
}
/// Create a span from the start position to the current position
#[inline(always)]
fn span(&self) -> Span {
Span::new(self.start_pos, self.cursor.pos())
}
/// Skip whitespace and comments - optimized hot path
#[inline(always)]
fn skip_whitespace(&mut self) {
// Process whitespace in SIMD batches when possible
while !self.cursor.is_eof() {
// First, handle SIMD optimized whitespace skipping for common ASCII whitespace
if self.process_whitespace_simd() {
continue;
}
// Fallback to standard processing for comments and special cases
let ch = match self.cursor.peek() {
Some(c) => c,
None => break,
};
// Handle ASCII characters
if likely(ch < 128) {
let char_type = unsafe { *ASCII_LOOKUP.get_unchecked(ch as usize) };
// Fast path for common whitespace
if char_type & CHAR_WHITESPACE != 0 {
// Special handling for line breaks
if unlikely(char_type & CHAR_LINEBREAK != 0) {
if ch == b'\n' {
self.cursor.advance();
self.had_line_break = LineBreak::Present;
continue;
} else if ch == b'\r' {
self.cursor.advance();
// Skip the following \n if it exists (CRLF sequence)
if let Some(b'\n') = self.cursor.peek() {
self.cursor.advance();
}
self.had_line_break = LineBreak::Present;
continue;
}
} else {
// Regular whitespace (space, tab, etc.)
self.cursor.advance();
continue;
}
}
// Handle comments - uses frequency-based ordering
if ch == b'/' {
match self.cursor.peek_at(1) {
// Line comment - very common in JS
Some(b'/') => {
self.cursor.advance_n(2);
self.skip_line_comment();
continue;
}
// Block comment - less common
Some(b'*') => {
self.cursor.advance_n(2);
self.skip_block_comment();
continue;
}
_ => break,
}
}
// Not whitespace or comment
break;
} else {
// Handle Unicode whitespace - rare case
if ch == 0xe2 {
// Check for line separator (U+2028) and paragraph separator (U+2029)
let bytes = self.cursor.peek_n(3);
if bytes.len() == 3
&& bytes[0] == 0xe2
&& bytes[1] == 0x80
&& (bytes[2] == 0xa8 || bytes[2] == 0xa9)
{
self.cursor.advance_n(3);
self.had_line_break = LineBreak::Present;
continue;
}
} else if ch == 0xef {
// BOM - extremely rare in middle of file
let bytes = self.cursor.peek_n(3);
if bytes.len() == 3 && bytes[0] == 0xef && bytes[1] == 0xbb && bytes[2] == 0xbf
{
self.cursor.advance_n(3);
continue;
}
}
// Not Unicode whitespace
break;
}
}
}
/// Process whitespace using SIMD acceleration
/// Returns true if it processed something, false if it found a
/// non-whitespace character
#[inline]
fn process_whitespace_simd(&mut self) -> bool {
// Need at least 16 bytes to use SIMD
let rest_len = self.cursor.rest().len();
if rest_len < 16 || self.cursor.position() + 16 > rest_len as u32 {
return false;
}
// Get current 16 bytes and load them directly into SIMD vector
let input = self.cursor.rest();
let data = unsafe {
// SAFETY: We've checked that we have at least 16 bytes
let mut bytes = [0u8; 16];
std::ptr::copy_nonoverlapping(input.as_ptr(), bytes.as_mut_ptr(), 16);
u8x16::new(bytes)
};
// Handle special characters separately for better branch prediction
let first_byte = unsafe { *input.get_unchecked(0) };
// Check for special cases that need individual handling
match first_byte {
b'\n' => {
self.cursor.advance();
self.had_line_break = LineBreak::Present;
return true;
}
b'\r' => {
self.cursor.advance();
if let Some(b'\n') = self.cursor.peek() {
self.cursor.advance();
}
self.had_line_break = LineBreak::Present;
return true;
}
b'/' => {
// Check if this could be a comment start
if let Some(b'/') | Some(b'*') = self.cursor.peek_at(1) {
return false; // Let the caller handle comments
}
return false; // Not a whitespace
}
0xe2 => {
// Check for line separator (U+2028) and paragraph separator (U+2029)
let bytes = self.cursor.peek_n(3);
if bytes.len() == 3
&& bytes[0] == 0xe2
&& bytes[1] == 0x80
&& (bytes[2] == 0xa8 || bytes[2] == 0xa9)
{
self.cursor.advance_n(3);
self.had_line_break = LineBreak::Present;
return true;
}
return false;
}
_ => {}
}
// Create SIMD vectors for common whitespace characters
let space_vec = u8x16::splat(b' ');
let tab_vec = u8x16::splat(b'\t');
let form_feed_vec = u8x16::splat(0x0c); // Form feed
let vert_tab_vec = u8x16::splat(0x0b); // Vertical tab
// Fast path for regular whitespace (space, tab, form feed, vertical tab)
// Compare with our whitespace vectors
let is_space = data.cmp_eq(space_vec);
let is_tab = data.cmp_eq(tab_vec);
let is_ff = data.cmp_eq(form_feed_vec);
let is_vt = data.cmp_eq(vert_tab_vec);
// Combine masks for regular whitespace
let is_basic_ws = is_space | is_tab | is_ff | is_vt;
// Convert SIMD mask to array to process consecutive whitespace
let ws_array = is_basic_ws.to_array();
// If the first byte is whitespace, process consecutive whitespace
if ws_array[0] != 0 {
// Count consecutive whitespace characters
let mut count = 0;
for ws_char in ws_array {
if ws_char == 0 {
break;
}
count += 1;
}
// Skip all consecutive basic whitespace characters at once
if count > 0 {
self.cursor.advance_n(count);
return true;
}
}
// No whitespace found
false
}
#[inline(always)]
fn skip_line_comment(&mut self) {
// Fast path using find_byte (which uses SIMD internally when available)
if let Some(newline_pos) = self.cursor.find_byte(b'\n') {
// Skip to the newline
let from_cursor = newline_pos - self.cursor.position();
self.cursor.advance_n(from_cursor);
self.cursor.advance(); // Skip the newline
self.had_line_break = LineBreak::Present;
return;
}
// Slower fallback path for when no newline is found
while let Some(ch) = self.cursor.peek() {
self.cursor.advance();
if ch == b'\n' {
self.had_line_break = LineBreak::Present;
break;
} else if ch == b'\r' {
self.had_line_break = LineBreak::Present;
// Skip the following \n if it exists (CRLF sequence)
if let Some(b'\n') = self.cursor.peek() {
self.cursor.advance();
}
break;
} else if ch == 0xe2 {
// Check for line separator (U+2028) and paragraph separator (U+2029)
let bytes = self.cursor.peek_n(2);
if bytes.len() == 2 && bytes[0] == 0x80 && (bytes[1] == 0xa8 || bytes[1] == 0xa9) {
self.cursor.advance_n(2); // Already advanced the first byte
self.had_line_break = LineBreak::Present;
break;
}
}
}
}
/// Skip a block comment - optimized for faster scanning with chunk-based
/// approach
#[inline(always)]
fn skip_block_comment(&mut self) {
let mut had_line_break = false;
// Use a specialized loop with chunk-based scanning for non-special chars
'outer: while let Some(ch) = self.cursor.peek() {
match ch {
// Check for end of comment
b'*' => {
self.cursor.advance();
if let Some(b'/') = self.cursor.peek() {
self.cursor.advance();
if had_line_break {
self.had_line_break = LineBreak::Present;
}
return;
}
}
// Handle line breaks
b'\n' => {
self.cursor.advance();
had_line_break = true;
}
b'\r' => {
self.cursor.advance();
// Skip the following \n if it exists (CRLF sequence)
if let Some(b'\n') = self.cursor.peek() {
self.cursor.advance();
}
had_line_break = true;
}
// Handle Unicode line breaks
0xe2 => {
// Check for line separator (U+2028) and paragraph separator (U+2029)
let bytes = self.cursor.peek_n(3);
if bytes.len() == 3
&& bytes[0] == 0xe2
&& bytes[1] == 0x80
&& (bytes[2] == 0xa8 || bytes[2] == 0xa9)
{
self.cursor.advance_n(3);
had_line_break = true;
continue;
}
self.cursor.advance();
}
// Fast path: skip chunks of regular characters
_ => {
// Process in larger chunks for better efficiency
let mut count = 1;
// Use a much larger chunk size (512) for better throughput
while count < 512 {
match self.cursor.peek_at(count) {
// Stop at special characters that need special handling
Some(b'*') | Some(b'\n') | Some(b'\r') | Some(0xe2) => break,
Some(_) => count += 1,
None => {
// End of input
self.cursor.advance_n(count);
break 'outer;
}
}
}
self.cursor.advance_n(count);
}
}
}
// If we reach here, the comment was not closed
if had_line_break {
self.had_line_break = LineBreak::Present;
}
}
/// Check if a byte is a valid identifier start character
#[inline(always)]
fn is_identifier_start(byte: u8) -> bool {
// ASCII fast path using optimized identifier functions
if likely(byte < 128) {
Self::is_ascii_id_start(byte)
} else {
// Non-ASCII, needs further checking in read_identifier
true
}
}
/// Check if a byte is a valid identifier continue character
#[inline(always)]
fn is_identifier_continue(byte: u8) -> bool {
// ASCII fast path using optimized identifier functions
if likely(byte < 128) {
Self::is_ascii_id_continue(byte)
} else {
// Non-ASCII, needs further checking in read_identifier
true
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/number.rs | Rust | //! Number literal processing for the lexer
//!
//! This module handles the parsing of numeric literals in
//! ECMAScript/TypeScript.
use std::borrow::Cow;
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
// Digit value lookup table for fast parsing
static DIGIT_VALUES: [u8; 256] = {
let mut table = [255u8; 256];
// Decimal digits
let mut i = 0;
while i < 10 {
table[b'0' as usize + i] = i as u8;
i += 1;
}
// Hex digits
let mut i = 0;
while i < 6 {
table[b'a' as usize + i] = (10 + i) as u8;
table[b'A' as usize + i] = (10 + i) as u8;
i += 1;
}
table
};
impl<'a> Lexer<'a> {
/// Read a numeric literal
#[inline]
pub(super) fn read_number(&mut self) -> Result<Token> {
let start_pos = self.start_pos;
let start_idx = start_pos.0;
// Check for leading dot (e.g. .123)
let starts_with_dot = self.cursor.peek() == Some(b'.');
if starts_with_dot {
self.cursor.advance();
// Make sure it's followed by a digit
if !matches!(self.cursor.peek(), Some(b'0'..=b'9')) {
// Just a dot, not a number
return Ok(Token::new(
TokenType::Dot,
self.span(),
bool::from(self.had_line_break),
TokenValue::None,
));
}
// Read digits after the dot
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'_'));
// Read exponent if present
if matches!(self.cursor.peek(), Some(b'e') | Some(b'E')) {
self.cursor.advance();
// Optional sign
if matches!(self.cursor.peek(), Some(b'+') | Some(b'-')) {
self.cursor.advance();
}
// Must have at least one digit in exponent
if !matches!(self.cursor.peek(), Some(b'0'..=b'9')) {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidNumber {
reason: "invalid numeric separator",
},
span,
});
}
// Read exponent digits
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'_'));
}
// Parse as decimal
let value = self.parse_decimal_number(start_idx, true);
// Create the token
let raw_str = self.extract_number_str(start_idx);
let span = self.span();
return Ok(Token::new(
TokenType::Num,
span,
bool::from(self.had_line_break),
TokenValue::Num {
value,
raw: Atom::from(raw_str),
},
));
}
// First check for a binary, octal, or hex literal
let mut is_binary = false;
let mut is_octal = false;
let mut is_hex = false;
if !starts_with_dot && self.cursor.peek() == Some(b'0') {
self.cursor.advance();
match self.cursor.peek() {
// Binary literal: 0b or 0B
Some(b'b') | Some(b'B') => {
self.cursor.advance();
is_binary = true;
// Must have at least one binary digit
if !matches!(self.cursor.peek(), Some(b'0'..=b'1')) {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidNumber {
reason: "expected binary digit",
},
span,
});
}
}
// Octal literal: 0o or 0O
Some(b'o') | Some(b'O') => {
self.cursor.advance();
is_octal = true;
// Must have at least one octal digit
if !matches!(self.cursor.peek(), Some(b'0'..=b'7')) {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidNumber {
reason: "expected octal digit",
},
span,
});
}
}
// Hex literal: 0x or 0X
Some(b'x') | Some(b'X') => {
self.cursor.advance();
is_hex = true;
// Must have at least one hex digit
if !matches!(
self.cursor.peek(),
Some(b'0'..=b'9') | Some(b'a'..=b'f') | Some(b'A'..=b'F')
) {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidNumber {
reason: "expected hex digit",
},
span,
});
}
}
// Decimal literal starting with 0
_ => {}
}
}
// Read the rest of the digits
if is_binary {
// Binary literals: 0b[01]+
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'1' | b'_'));
} else if is_octal {
// Octal literals: 0o[0-7]+
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'7' | b'_'));
} else if is_hex {
// Hex literals: 0x[0-9a-fA-F]+
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F' | b'_'));
} else {
// Decimal literals
// Read integer part
if !starts_with_dot {
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'_'));
}
// Read fractional part if present
if self.cursor.peek() == Some(b'.')
&& (starts_with_dot || !matches!(self.cursor.peek_at(1), Some(b'.')))
{
// Consume the dot
self.cursor.advance();
// Read decimal digits after the dot
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'_'));
}
// Read exponent part if present
if matches!(self.cursor.peek(), Some(b'e') | Some(b'E')) {
self.cursor.advance();
// Optional sign
if matches!(self.cursor.peek(), Some(b'+') | Some(b'-')) {
self.cursor.advance();
}
// Must have at least one digit in exponent
if !matches!(self.cursor.peek(), Some(b'0'..=b'9')) {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidNumber {
reason: "invalid numeric separator",
},
span,
});
}
// Read exponent digits
self.cursor
.advance_while(|ch| matches!(ch, b'0'..=b'9' | b'_'));
}
}
// Check if this is a BigInt literal (ends with n)
let is_bigint = self.cursor.peek() == Some(b'n');
if is_bigint {
self.cursor.advance(); // Consume the 'n'
// BigInt can't have decimal points or exponents
if !is_binary && !is_octal && !is_hex {
let raw_str = self.extract_number_str(start_idx);
if raw_str.contains('.') || raw_str.contains('e') || raw_str.contains('E') {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidBigInt,
span,
});
}
}
return self.create_bigint_token(start_idx);
}
// Parse the number directly for faster processing
let value = if is_binary {
self.parse_binary_number(start_idx)
} else if is_octal {
self.parse_octal_number(start_idx)
} else if is_hex {
self.parse_hex_number(start_idx)
} else {
self.parse_decimal_number(start_idx, starts_with_dot)
};
// Extract the raw string representation
let raw_str = self.extract_number_str(start_idx);
// Create and return the token
let span = self.span();
Ok(Token::new(
TokenType::Num,
span,
bool::from(self.had_line_break),
TokenValue::Num {
value,
raw: Atom::from(raw_str),
},
))
}
/// Extract the raw string representation of a number
#[inline]
fn extract_number_str(&self, start_idx: u32) -> Cow<'a, str> {
let end_idx = self.cursor.position();
let num_slice = self.cursor.slice(start_idx, end_idx);
// Filter out the underscore separators
if num_slice.contains(&b'_') {
let mut result = String::with_capacity(num_slice.len());
for &byte in num_slice {
if byte != b'_' {
result.push(byte as char);
}
}
Cow::Owned(result)
} else {
// Fast path: no underscores
Cow::Borrowed(unsafe { std::str::from_utf8_unchecked(num_slice) })
}
}
/// Parse a binary number (0b...)
#[inline]
fn parse_binary_number(&self, start_idx: u32) -> f64 {
let start = start_idx + 2; // Skip '0b'
let end = self.cursor.position();
let mut value: u64 = 0;
for i in start..end {
let byte = unsafe { *self.cursor.slice(i, i + 1).get_unchecked(0) };
if byte == b'_' {
continue;
}
value = value * 2 + (byte - b'0') as u64;
}
value as f64
}
/// Parse an octal number (0o...)
#[inline]
fn parse_octal_number(&self, start_idx: u32) -> f64 {
let start = start_idx + 2; // Skip '0o'
let end = self.cursor.position();
let mut value: u64 = 0;
for i in start..end {
let byte = unsafe { *self.cursor.slice(i, i + 1).get_unchecked(0) };
if byte == b'_' {
continue;
}
value = value * 8 + (byte - b'0') as u64;
}
value as f64
}
/// Parse a hexadecimal number (0x...)
#[inline]
fn parse_hex_number(&self, start_idx: u32) -> f64 {
let start = start_idx + 2; // Skip '0x'
let end = self.cursor.position();
let mut value: u64 = 0;
for i in start..end {
let byte = unsafe { *self.cursor.slice(i, i + 1).get_unchecked(0) };
if byte == b'_' {
continue;
}
let digit = DIGIT_VALUES[byte as usize];
value = value * 16 + digit as u64;
}
value as f64
}
/// Parse a decimal number
#[inline]
fn parse_decimal_number(&self, start_idx: u32, starts_with_dot: bool) -> f64 {
// Extract the raw string representation
let raw_str = self.extract_number_str(start_idx);
// Special case for dot-prefixed numbers
if starts_with_dot {
// High-performance parsing for .123 format
// Use a stack-allocated buffer to avoid heap allocation
const STACK_BUF_SIZE: usize = 32;
if raw_str.len() < STACK_BUF_SIZE - 1 {
// Create a stack-allocated buffer with a leading '0'
let mut buffer = [0u8; STACK_BUF_SIZE];
buffer[0] = b'0';
// Fast memcpy of the original bytes (including the dot)
let src_bytes = raw_str.as_bytes();
let src_len = src_bytes.len();
// SAFETY: We've checked that src_len < STACK_BUF_SIZE - 1
unsafe {
std::ptr::copy_nonoverlapping(
src_bytes.as_ptr(),
buffer.as_mut_ptr().add(1),
src_len,
);
// Parse from the buffer - avoid allocation
return std::str::from_utf8_unchecked(&buffer[0..src_len + 1])
.parse::<f64>()
.unwrap_or(f64::NAN);
}
} else {
// Fall back to string with capacity for very long numbers (rare case)
let mut with_leading_zero = String::with_capacity(raw_str.len() + 1);
with_leading_zero.push('0');
with_leading_zero.push_str(&raw_str);
return with_leading_zero.parse::<f64>().unwrap_or(f64::NAN);
}
}
// Standard case - use Rust's parser
raw_str.parse::<f64>().unwrap_or(f64::NAN)
}
/// Create a BigInt token
#[inline]
fn create_bigint_token(&self, start_idx: u32) -> Result<Token> {
use num_bigint::BigInt;
let end_idx = self.cursor.position();
let span = self.span();
// Extract the raw string excluding the 'n' suffix
let raw_str = {
let num_slice = self.cursor.slice(start_idx, end_idx - 1);
if num_slice.contains(&b'_') {
// Filter out underscores
let mut result = String::with_capacity(num_slice.len());
for &byte in num_slice {
if byte != b'_' {
result.push(byte as char);
}
}
Cow::Owned(result)
} else {
// Fast path: no underscores
Cow::Borrowed(unsafe { std::str::from_utf8_unchecked(num_slice) })
}
};
// Parse the BigInt value
let value = if raw_str.starts_with("0b") || raw_str.starts_with("0B") {
// Binary
BigInt::parse_bytes(&raw_str.as_bytes()[2..], 2)
} else if raw_str.starts_with("0o") || raw_str.starts_with("0O") {
// Octal
BigInt::parse_bytes(&raw_str.as_bytes()[2..], 8)
} else if raw_str.starts_with("0x") || raw_str.starts_with("0X") {
// Hexadecimal
BigInt::parse_bytes(&raw_str.as_bytes()[2..], 16)
} else {
// Decimal
BigInt::parse_bytes(raw_str.as_bytes(), 10)
};
// Create the token
if let Some(value) = value {
Ok(Token::new(
TokenType::BigInt,
span,
bool::from(self.had_line_break),
TokenValue::BigInt {
value: Box::new(value),
raw: Atom::from(raw_str),
},
))
} else {
Err(Error {
kind: ErrorKind::InvalidBigInt,
span,
})
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/operators.rs | Rust | //! Operator tokens processing for the lexer
//!
//! This module handles the parsing of operators in ECMAScript/TypeScript.
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::Result,
token::{Token, TokenType, TokenValue},
};
impl Lexer<'_> {
/// Read a dot token (. or ... or numeric with leading dot)
pub(super) fn read_dot(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '.'
// Check for spread operator '...'
if self.cursor.peek() == Some(b'.') && self.cursor.peek_at(1) == Some(b'.') {
self.cursor.advance_n(2);
return Ok(Token::new(
TokenType::DotDotDot,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Check for numeric literal with leading dot (e.g. .123)
if let Some(b'0'..=b'9') = self.cursor.peek() {
// Backtrack to include the dot in the number
self.cursor.reset_to(self.start_pos); // Reset cursor to start position
return self.read_number();
}
// Just a single dot
Ok(Token::new(
TokenType::Dot,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read a question mark token (? or ?? or ?. or ??=)
pub(super) fn read_question_mark(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '?'
// Check for nullish coalescing operator '??'
if self.cursor.peek() == Some(b'?') {
self.cursor.advance();
// Check for nullish assignment '??='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::NullishEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Nullish coalescing
return Ok(Token::new(
TokenType::NullishCoalescing,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Check for optional chaining operator '?.'
if self.cursor.peek() == Some(b'.') {
self.cursor.advance();
return Ok(Token::new(
TokenType::OptionalChain,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just a single question mark
Ok(Token::new(
TokenType::QuestionMark,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read an exclamation mark token (! or != or !==)
pub(super) fn read_exclamation_mark(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '!'
// Check for inequality operator '!='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
// Check for strict inequality '!=='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::NotEqEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Non-strict inequality
return Ok(Token::new(
TokenType::NotEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just a single exclamation mark
Ok(Token::new(
TokenType::Bang,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read a plus token (+ or ++ or +=)
pub(super) fn read_plus(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '+'
match self.cursor.peek() {
// Increment operator '++'
Some(b'+') => {
self.cursor.advance();
Ok(Token::new(
TokenType::PlusPlus,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Addition assignment '+='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::PlusEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single plus
_ => Ok(Token::new(
TokenType::Plus,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read a minus token (- or -- or -=)
pub(super) fn read_minus(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '-'
match self.cursor.peek() {
// Decrement operator '--'
Some(b'-') => {
self.cursor.advance();
Ok(Token::new(
TokenType::MinusMinus,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Subtraction assignment '-='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::MinusEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single minus
_ => Ok(Token::new(
TokenType::Minus,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read an asterisk token (* or ** or *= or **=)
pub(super) fn read_asterisk(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '*'
// Check for exponentiation operator '**'
if self.cursor.peek() == Some(b'*') {
self.cursor.advance();
// Check for exponentiation assignment '**='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::ExpEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just exponentiation
return Ok(Token::new(
TokenType::Exp,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Check for multiplication assignment '*='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::MulEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just a single asterisk
Ok(Token::new(
TokenType::Asterisk,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read a slash token (/ or /= or start of regex)
pub(super) fn read_slash(&mut self, had_line_break: bool) -> Result<Token> {
self.cursor.advance(); // Skip the initial '/'
// Check for division assignment '/='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::DivEq,
self.span(),
had_line_break,
TokenValue::None,
));
}
// Check if this could be a regex literal
if self.is_regex_start() {
return self.read_regex(had_line_break);
}
// Just a single slash (division operator)
Ok(Token::new(
TokenType::Slash,
self.span(),
had_line_break,
TokenValue::None,
))
}
/// Read a percent token (% or %=)
pub(super) fn read_percent(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '%'
// Check for modulo assignment '%='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::ModEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just a single percent
Ok(Token::new(
TokenType::Percent,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read a less-than token (< or <= or << or <=)
pub(super) fn read_less_than(&mut self) -> Result<Token> {
let start_pos = self.start_pos;
self.cursor.advance(); // Skip the initial '<'
// Check for JSX mode
if self.in_jsx_element {
self.cursor.reset_to(start_pos);
return self.read_jsx_token(self.had_line_break.into());
}
match self.cursor.peek() {
// Less than or equal '<='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::LtEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Left shift '<<'
Some(b'<') => {
self.cursor.advance();
// Check for left shift assignment '<<='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::LShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just left shift
Ok(Token::new(
TokenType::LShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single less-than
_ => Ok(Token::new(
TokenType::Lt,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read a greater-than token (> or >= or >> or >>>)
pub(super) fn read_greater_than(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '>'
match self.cursor.peek() {
// Greater than or equal '>='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::GtEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Right shift '>>'
Some(b'>') => {
self.cursor.advance();
// Check for zero-fill right shift '>>>'
if self.cursor.peek() == Some(b'>') {
self.cursor.advance();
// Check for zero-fill right shift assignment '>>>='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::ZeroFillRShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just zero-fill right shift
return Ok(Token::new(
TokenType::ZeroFillRShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Check for right shift assignment '>>='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::RShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just right shift
Ok(Token::new(
TokenType::RShift,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single greater-than
_ => Ok(Token::new(
TokenType::Gt,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read an equals token (= or == or === or => or =)
pub(super) fn read_equals(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '='
match self.cursor.peek() {
// Arrow function '=>'
Some(b'>') => {
self.cursor.advance();
Ok(Token::new(
TokenType::Arrow,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Equality operator '=='
Some(b'=') => {
self.cursor.advance();
// Check for strict equality '==='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::EqEqEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just non-strict equality
Ok(Token::new(
TokenType::EqEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single equals
_ => Ok(Token::new(
TokenType::Eq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read a pipe token (| or || or |= or ||=)
pub(super) fn read_pipe(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '|'
match self.cursor.peek() {
// Logical OR operator '||'
Some(b'|') => {
self.cursor.advance();
// Check for logical OR assignment '||='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::LogicalOrEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just logical OR
Ok(Token::new(
TokenType::LogicalOr,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Bitwise OR assignment '|='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::BitOrEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single pipe
_ => Ok(Token::new(
TokenType::Pipe,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read an ampersand token (& or && or &= or &&=)
pub(super) fn read_ampersand(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '&'
match self.cursor.peek() {
// Logical AND operator '&&'
Some(b'&') => {
self.cursor.advance();
// Check for logical AND assignment '&&='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::LogicalAndEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just logical AND
Ok(Token::new(
TokenType::LogicalAnd,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Bitwise AND assignment '&='
Some(b'=') => {
self.cursor.advance();
Ok(Token::new(
TokenType::BitAndEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
// Just a single ampersand
_ => Ok(Token::new(
TokenType::Ampersand,
self.span(),
self.had_line_break.into(),
TokenValue::None,
)),
}
}
/// Read a caret token (^ or ^=)
pub(super) fn read_caret(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '^'
// Check for bitwise XOR assignment '^='
if self.cursor.peek() == Some(b'=') {
self.cursor.advance();
return Ok(Token::new(
TokenType::BitXorEq,
self.span(),
self.had_line_break.into(),
TokenValue::None,
));
}
// Just a single caret
Ok(Token::new(
TokenType::Caret,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
/// Read a hash token (#)
pub(super) fn read_hash(&mut self) -> Result<Token> {
self.cursor.advance(); // Skip the initial '#'
// Check for shebang at the start of the file
if self.cursor.position() == 1 && self.cursor.peek() == Some(b'!') {
// This is a shebang, read until the end of the line
self.cursor.advance(); // Skip the '!'
let start = self.cursor.position();
while let Some(ch) = self.cursor.peek() {
if ch == b'\n' || ch == b'\r' {
break;
}
self.cursor.advance();
}
let end = self.cursor.position();
let shebang_str =
unsafe { std::str::from_utf8_unchecked(self.cursor.slice(start, end)) };
return Ok(Token::new(
TokenType::Shebang,
self.span(),
self.had_line_break.into(),
TokenValue::Shebang(Atom::from(shebang_str)),
));
}
// Just a hash token (for private fields)
Ok(Token::new(
TokenType::Hash,
self.span(),
self.had_line_break.into(),
TokenValue::None,
))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/regex.rs | Rust | //! Regular expression literals processing for the lexer
//!
//! This module handles the parsing of RegExp literals in ECMAScript/TypeScript.
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
impl Lexer<'_> {
/// Read a regular expression literal
/// Assumes the initial '/' has been consumed
pub(super) fn read_regex(&mut self, had_line_break: bool) -> Result<Token> {
let start_pos = self.start_pos;
let start_idx = start_pos.0;
// Read the pattern
let mut in_class = false; // Whether we're in a character class [...]
let mut escaped = false; // Whether the previous character was escaped
// Regular expression pattern
loop {
match self.cursor.peek() {
// End of pattern
Some(b'/') if !in_class && !escaped => {
self.cursor.advance();
break;
}
// End of file (unterminated regex)
None => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidRegExp {
reason: "Unterminated regular expression literal",
},
span,
});
}
// Line break (illegal in regex literals)
Some(b'\n') | Some(b'\r') => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidRegExp {
reason: "Line break in regular expression literal",
},
span,
});
}
// Start of character class
Some(b'[') if !escaped => {
in_class = true;
self.cursor.advance();
escaped = false;
}
// End of character class
Some(b']') if in_class && !escaped => {
in_class = false;
self.cursor.advance();
escaped = false;
}
// Escape sequence
Some(b'\\') if !escaped => {
self.cursor.advance();
escaped = true;
}
// Regular character
Some(_) => {
self.cursor.advance();
escaped = false;
}
}
}
// Read the flags
let mut flags = String::new();
while let Some(ch) = self.cursor.peek() {
if Self::is_identifier_continue(ch) {
flags.push(ch as char);
self.cursor.advance();
} else {
break;
}
}
// Validate flags (basic validation)
let mut seen_flags = [false; 128];
for ch in flags.bytes() {
if ch as usize >= seen_flags.len() || seen_flags[ch as usize] {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidRegExp {
reason: "Duplicate flag in regular expression",
},
span,
});
}
seen_flags[ch as usize] = true;
}
// Extract the raw regex
let end_idx = self.cursor.position();
let regex_bytes = self.cursor.slice(start_idx, end_idx);
let regex_str = unsafe { std::str::from_utf8_unchecked(regex_bytes) };
// Split into pattern and flags (skip the leading and trailing '/')
let pattern_end = regex_str.rfind('/').unwrap_or(0);
let pattern = ®ex_str[1..pattern_end];
let span = self.span();
Ok(Token::new(
TokenType::Regex,
span,
had_line_break,
TokenValue::Regex {
exp: Atom::from(pattern),
flags: Atom::from(flags),
},
))
}
/// Check if the slash is the start of a regex literal
pub(super) fn is_regex_start(&self) -> bool {
// We generally decide this based on context (whether a slash could be a
// division operator) Usually, a slash starts a regex if the previous
// token can precede an expression and is not a ++ or -- operator (which
// would make the slash a division operator)
self.current.before_expr()
&& self.current.token_type != TokenType::PlusPlus
&& self.current.token_type != TokenType::MinusMinus
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/string.rs | Rust | //! String literals processing for the lexer
//!
//! This module handles the parsing of string literals in ECMAScript/TypeScript.
use swc_atoms::Atom;
use swc_common::Span;
use wide::u8x16;
use super::Lexer;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
// Pre-computed lookup table for escape sequences
static ESCAPE_LOOKUP: [u8; 128] = {
let mut table = [0u8; 128];
table[b'\\' as usize] = b'\\';
table[b'n' as usize] = b'\n';
table[b'r' as usize] = b'\r';
table[b't' as usize] = b'\t';
table[b'b' as usize] = b'\x08';
table[b'f' as usize] = b'\x0C';
table[b'v' as usize] = b'\x0B';
table[b'\'' as usize] = b'\'';
table[b'"' as usize] = b'"';
table[b'`' as usize] = b'`';
table[b'0' as usize] = b'\0';
table
};
// Buffer for string construction - using thread_local to avoid allocation
thread_local! {
static STRING_BUFFER: std::cell::RefCell<Vec<u8>> = std::cell::RefCell::new(Vec::with_capacity(1024));
}
impl Lexer<'_> {
/// Read a string literal
#[inline]
pub(super) fn read_string(&mut self, quote: u8) -> Result<Token> {
let start_pos = self.start_pos;
// Skip the opening quote
self.cursor.advance();
// Fast path: if there are no escape sequences or line terminators, we can
// directly extract the string without processing each character
let mut has_escapes = false;
// Try to find the closing quote
match self.find_string_end(quote) {
Some(end) => {
// Fast path - no escapes
let end_pos = self.cursor.position() + end;
// Skip to the end quote
self.cursor.advance_n(end);
// Skip the closing quote
self.cursor.advance();
end_pos
}
None => {
// Slower path - contains escapes, line terminators, or unterminated
has_escapes = true;
// Process each character using a local buffer
let mut buffer = Vec::with_capacity(128);
let mut found_closing_quote = false;
while let Some(ch) = self.cursor.peek() {
// Check for unterminated string
if self.cursor.is_eof() {
let span = Span::new(start_pos, self.cursor.pos());
return Err(Error {
kind: ErrorKind::UnterminatedString,
span,
});
}
// Check for closing quote
if ch == quote {
self.cursor.advance();
found_closing_quote = true;
break;
}
// Check for line terminators (not allowed in strings)
if ch == b'\r' || ch == b'\n' {
let span = Span::new(start_pos, self.cursor.pos());
return Err(Error {
kind: ErrorKind::UnterminatedString,
span,
});
}
// Check for escape sequences
if ch == b'\\' {
self.cursor.advance();
// Get the next character
match self.cursor.peek() {
Some(b'x') => {
// Hexadecimal escape \xNN
self.cursor.advance();
let hex_value = self.read_hex_escape(2)? as u8;
buffer.push(hex_value);
}
Some(b'u') => {
// Unicode escape \uNNNN or \u{NNNNNN}
self.cursor.advance();
let code_point_char = self.read_unicode_escape()?;
let mut utf8_buf = [0u8; 4];
let utf8_str = code_point_char.encode_utf8(&mut utf8_buf);
buffer.extend_from_slice(utf8_str.as_bytes());
}
Some(escape_char @ 0..=127) => {
// Simple escape sequence
self.cursor.advance();
let replacement = ESCAPE_LOOKUP[escape_char as usize];
if replacement != 0 {
buffer.push(replacement);
} else if (b'0'..=b'7').contains(&escape_char) {
// Octal escape (legacy)
buffer.push(self.read_octal_escape(escape_char)?);
} else {
// Any other character is escaped as itself
buffer.push(escape_char);
}
}
Some(ch) => {
// Any other escape sequence
self.cursor.advance();
buffer.push(ch);
}
None => {
// Unterminated escape sequence
let span = Span::new(start_pos, self.cursor.pos());
return Err(Error {
kind: ErrorKind::UnterminatedString,
span,
});
}
}
} else {
// Regular character
buffer.push(ch);
self.cursor.advance();
}
}
if !found_closing_quote {
let span = Span::new(start_pos, self.cursor.pos());
return Err(Error {
kind: ErrorKind::UnterminatedString,
span,
});
}
// Save the buffer in thread_local for reuse
STRING_BUFFER.with(|tls_buffer| {
let mut tls = tls_buffer.borrow_mut();
tls.clear();
tls.extend_from_slice(&buffer);
});
self.cursor.position()
}
};
// Extract the raw string (including quotes)
let raw_start = start_pos.0;
let raw_end = self.cursor.position();
let raw_bytes = self.cursor.slice(raw_start, raw_end);
let raw_str = unsafe { std::str::from_utf8_unchecked(raw_bytes) };
// Extract the string value if we used the fast path
let string_value = if has_escapes {
// Use the thread-local buffer for the string value
STRING_BUFFER.with(|buffer| {
let buffer = buffer.borrow();
Atom::from(unsafe { std::str::from_utf8_unchecked(&buffer) })
})
} else {
// Direct extraction (excluding quotes)
let value_bytes = self.cursor.slice(raw_start + 1, raw_end - 1);
Atom::from(unsafe { std::str::from_utf8_unchecked(value_bytes) })
};
// Create token
let span = Span::new(start_pos, self.cursor.pos());
Ok(Token::new(
TokenType::Str,
span,
bool::from(self.had_line_break),
TokenValue::Str {
value: string_value,
raw: Atom::from(raw_str),
},
))
}
/// Find the end of a string without processing escape sequences
#[inline]
fn find_string_end(&self, quote: u8) -> Option<u32> {
let pos = 0;
let rest = self.cursor.rest();
// Try the SIMD implementation first, falling back to standard if needed
self.find_string_end_simd(pos, rest, quote)
.or_else(|| self.find_string_end_standard(pos, rest, quote))
}
/// SIMD-accelerated implementation for finding end of string
#[inline]
fn find_string_end_simd(&self, start_pos: u32, rest: &[u8], quote: u8) -> Option<u32> {
// Safety check for small inputs - process with standard method
if rest.len() < 32 || start_pos >= rest.len() as u32 {
return None;
}
let mut pos = start_pos;
// Process in chunks of 16 bytes using SIMD
while pos + 16 <= rest.len() as u32 {
// Load 16 bytes
let chunk_bytes = &rest[pos as usize..(pos + 16) as usize];
let mut bytes = [0u8; 16];
bytes.copy_from_slice(chunk_bytes);
let chunk = u8x16::new(bytes);
// Create vectors for quick comparison
let quote_vec = u8x16::splat(quote);
let backslash_vec = u8x16::splat(b'\\');
let newline_vec = u8x16::splat(b'\n');
let carriage_vec = u8x16::splat(b'\r');
// Check for presence of special characters
let quote_mask = chunk.cmp_eq(quote_vec);
let backslash_mask = chunk.cmp_eq(backslash_vec);
let newline_mask = chunk.cmp_eq(newline_vec);
let carriage_mask = chunk.cmp_eq(carriage_vec);
// Convert masks to arrays for checking
let quote_arr = quote_mask.to_array();
let backslash_arr = backslash_mask.to_array();
let newline_arr = newline_mask.to_array();
let carriage_arr = carriage_mask.to_array();
// Check for any special character that requires detailed processing
for i in 0..16 {
if quote_arr[i] != 0
|| backslash_arr[i] != 0
|| newline_arr[i] != 0
|| carriage_arr[i] != 0
{
// We found a character that needs special handling
// Process from here using the standard algorithm
return self.find_string_end_standard(pos + i as u32, rest, quote);
}
}
// If we get here, the chunk doesn't contain any special characters
pos += 16;
}
// Process remainder with standard algorithm
if pos < rest.len() as u32 {
return self.find_string_end_standard(pos, rest, quote);
}
None
}
/// Standard (non-SIMD) implementation of string end finding
#[inline]
fn find_string_end_standard(&self, start_pos: u32, rest: &[u8], quote: u8) -> Option<u32> {
let mut pos = start_pos;
let mut in_escape = false;
// Safety check for empty input
if rest.is_empty() || pos >= rest.len() as u32 {
return None;
}
while pos < rest.len() as u32 {
let ch = unsafe { *rest.get_unchecked(pos as usize) };
if in_escape {
// Skip the escaped character
in_escape = false;
pos += 1;
continue;
}
if ch == b'\\' {
// Mark that we're in an escape sequence
in_escape = true;
pos += 1;
// If we're at the end after a backslash, it's unterminated
if pos >= rest.len() as u32 {
return None;
}
} else if ch == quote {
// Found unescaped quote
return Some(pos);
} else if ch == b'\n' || ch == b'\r' {
// Line terminator in string is an error
return None;
} else {
pos += 1;
}
}
// String is unterminated
None
}
/// Read an octal escape sequence
#[inline]
fn read_octal_escape(&mut self, first: u8) -> Result<u8> {
let mut value = first - b'0';
// Read up to 2 more octal digits
for _ in 0..2 {
match self.cursor.peek() {
Some(c @ b'0'..=b'7') => {
// Ensure we don't overflow u8
let next_value = (value as u16) * 8 + (c - b'0') as u16;
if next_value > 255 {
break;
}
value = next_value as u8;
self.cursor.advance();
}
_ => break,
}
}
Ok(value)
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/template.rs | Rust | //! Template literals processing for the lexer
//!
//! This module handles the parsing of template literals in
//! ECMAScript/TypeScript.
use swc_atoms::Atom;
use super::Lexer;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
impl Lexer<'_> {
/// Read a template literal content
pub(super) fn read_template_content(&mut self, had_line_break: bool) -> Result<Token> {
let start_pos = self.start_pos;
let start_idx = start_pos.0;
// If it starts with "${", return a DollarLBrace token
if self.cursor.peek_at(0) == Some(b'$') && self.cursor.peek_at(1) == Some(b'{') {
self.cursor.advance_n(2);
// We are now expecting normal javascript syntax
self.in_template_expr = true;
return Ok(Token::new(
TokenType::DollarLBrace,
self.span(),
had_line_break,
TokenValue::None,
));
}
if self.cursor.peek() == Some(b'`') {
self.cursor.advance();
self.in_template = false;
self.in_template_expr = false;
return Ok(Token::new(
TokenType::BackQuote,
self.span(),
had_line_break,
TokenValue::None,
));
}
// Buffer for the processed template value (with escapes handled)
let mut value = String::new();
// Track if we've seen an escape sequence
let mut has_escapes = false;
// Flag to indicate if the template was invalid
let mut is_invalid = false;
// Flag to indicate if we found a "${" sequence
let mut found_dollar_brace = false;
// Read until the closing backtick or ${
loop {
match self.cursor.peek() {
// End of template
Some(b'`') => {
break;
}
// Start of template expression
Some(b'$') => {
if self.cursor.peek_at(1) == Some(b'{') {
// We found a "${" - mark the flag and break the loop
found_dollar_brace = true;
// Don't consume the characters yet
break;
} else {
// Just a regular $ character
value.push('$');
self.cursor.advance();
}
}
// End of file (unterminated template)
None => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidTemplate {
reason: "Unterminated template literal (eof)",
},
span,
});
}
// Escape sequence
Some(b'\\') => {
has_escapes = true;
self.cursor.advance();
// Process escape sequence
match self.cursor.peek() {
// Common escape sequences
Some(b'n') => {
value.push('\n');
self.cursor.advance();
}
Some(b'r') => {
value.push('\r');
self.cursor.advance();
}
Some(b't') => {
value.push('\t');
self.cursor.advance();
}
Some(b'b') => {
value.push('\u{0008}');
self.cursor.advance();
}
Some(b'f') => {
value.push('\u{000C}');
self.cursor.advance();
}
Some(b'v') => {
value.push('\u{000B}');
self.cursor.advance();
}
Some(b'0') => {
// Null character (not followed by another digit)
if !matches!(self.cursor.peek_at(1), Some(b'0'..=b'9')) {
value.push('\0');
self.cursor.advance();
} else {
// Invalid octal in template
is_invalid = true;
value.push('0');
self.cursor.advance();
}
}
// Hexadecimal escape (\xHH)
Some(b'x') => {
self.cursor.advance();
match self.read_hex_escape(2) {
Ok(hex_val) => {
value.push(std::char::from_u32(hex_val).unwrap_or('\u{FFFD}'));
}
Err(_) => {
// Invalid escape, but we continue with template
is_invalid = true;
value.push_str("\\x");
}
}
}
// Unicode escape (\uHHHH)
Some(b'u') => {
self.cursor.advance();
match self.read_unicode_escape() {
Ok(ch) => {
value.push(ch);
}
Err(_) => {
// Invalid escape, but we continue with template
is_invalid = true;
value.push_str("\\u");
}
}
}
// Line continuation
Some(b'\r') => {
self.cursor.advance();
// Skip CRLF
if self.cursor.peek() == Some(b'\n') {
self.cursor.advance();
}
// Line continuation, no character added
}
Some(b'\n') => {
self.cursor.advance();
// Line continuation, no character added
}
// Any other character escaped just represents itself
Some(ch) => {
// In templates, \ before character that doesn't need escaping
// is preserved in the cooked value for standard escapes
if matches!(ch, b'`' | b'\\' | b'$') {
value.push(ch as char);
} else {
// Raw character for non-standard escapes
value.push('\\');
value.push(ch as char);
}
self.cursor.advance();
}
// EOF after backslash
None => {
let span = self.span();
return Err(Error {
kind: ErrorKind::InvalidTemplate {
reason: "Unterminated template literal",
},
span,
});
}
}
}
// Line breaks are allowed in templates
Some(b'\n') => {
value.push('\n');
self.cursor.advance();
}
Some(b'\r') => {
value.push('\r');
self.cursor.advance();
// Skip CRLF
if self.cursor.peek() == Some(b'\n') {
value.push('\n');
self.cursor.advance();
}
}
// Regular character
Some(ch) => {
// For performance reasons, we'll read a batch of regular characters
if !has_escapes && ch < 128 {
// Fast path for ASCII characters
let start = self.cursor.position();
self.cursor.advance_while(|c| {
c != b'`'
&& c != b'\\'
&& c != b'$'
&& c != b'\n'
&& c != b'\r'
&& c < 128
});
// Add all these characters at once
let end = self.cursor.position();
if end > start {
let slice = self.cursor.slice(start, end);
value.push_str(unsafe { std::str::from_utf8_unchecked(slice) });
}
} else {
// Slow path for non-ASCII or after an escape
value.push(ch as char);
self.cursor.advance();
}
}
}
}
// Extract the raw template (including backticks)
let end_idx = self.cursor.position();
let raw_bytes = self.cursor.slice(start_idx, end_idx);
let raw_str = unsafe { std::str::from_utf8_unchecked(raw_bytes) };
let span = self.span();
// If we found a "${", return the content before "${"
if found_dollar_brace {
return Ok(Token::new(
TokenType::Template,
span,
had_line_break,
TokenValue::Template {
raw: Atom::from(raw_str),
cooked: if is_invalid {
None
} else {
Some(Atom::from(value))
},
},
));
}
// Determine the token type for a regular template
Ok(Token::new(
TokenType::Template,
span,
had_line_break,
if is_invalid {
TokenValue::Template {
raw: Atom::from(raw_str),
cooked: None, // No cooked value for invalid templates
}
} else {
TokenValue::Template {
raw: Atom::from(raw_str),
cooked: Some(Atom::from(value)),
}
},
))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lexer/tests.rs | Rust | use swc_atoms::Atom;
use crate::{
lexer::Lexer,
token::{TokenType, TokenValue},
JscTarget, Syntax,
};
/// Utility function to verify lexer tokens
fn verify_tokens(input: &str, expected_tokens: Vec<(TokenType, Option<TokenValue>)>) {
// Create a new lexer
let mut lexer = Lexer::new(input, JscTarget::Es2020, Syntax::default(), None);
// Verify each token
for (i, (expected_type, expected_value)) in expected_tokens.into_iter().enumerate() {
let token = lexer.next_token().expect("Failed to get next token");
assert_eq!(
token.token_type, expected_type,
"Token #{}: Expected token type {:?}, got {:?}",
i, expected_type, token.token_type
);
// If an expected value is provided, verify it
if let Some(expected_value) = expected_value {
match (&expected_value, &token.value) {
(TokenValue::Word(expected), TokenValue::Word(actual)) => {
assert_eq!(
expected.as_ref(),
actual.as_ref(),
"Token #{}: Expected word '{}', got '{}'",
i,
expected,
actual
);
}
(
TokenValue::Num {
value: expected_val,
..
},
TokenValue::Num {
value: actual_val, ..
},
) => {
assert_eq!(
*expected_val, *actual_val,
"Token #{}: Expected number {}, got {}",
i, expected_val, actual_val
);
}
(
TokenValue::Str {
value: expected_val,
..
},
TokenValue::Str {
value: actual_val, ..
},
) => {
assert_eq!(
expected_val.as_ref(),
actual_val.as_ref(),
"Token #{}: Expected string '{}', got '{}'",
i,
expected_val,
actual_val
);
}
(
TokenValue::Regex {
exp: expected_exp,
flags: expected_flags,
},
TokenValue::Regex {
exp: actual_exp,
flags: actual_flags,
},
) => {
assert_eq!(
expected_exp.as_ref(),
actual_exp.as_ref(),
"Token #{}: Expected regex pattern '{}', got '{}'",
i,
expected_exp,
actual_exp
);
assert_eq!(
expected_flags.as_ref(),
actual_flags.as_ref(),
"Token #{}: Expected regex flags '{}', got '{}'",
i,
expected_flags,
actual_flags
);
}
(
TokenValue::BigInt {
value: expected_val,
..
},
TokenValue::BigInt {
value: actual_val, ..
},
) => {
assert_eq!(
expected_val.as_ref(),
actual_val.as_ref(),
"Token #{}: Expected bigint {}, got {}",
i,
expected_val,
actual_val
);
}
(
TokenValue::Template {
raw: expected_raw,
cooked: expected_cooked,
},
TokenValue::Template {
raw: actual_raw,
cooked: actual_cooked,
},
) => {
assert_eq!(
expected_raw.as_ref(),
actual_raw.as_ref(),
"Token #{}: Expected template raw '{}', got '{}'",
i,
expected_raw,
actual_raw
);
match (&expected_cooked, &actual_cooked) {
(Some(expected), Some(actual)) => {
assert_eq!(
expected.as_ref(),
actual.as_ref(),
"Token #{}: Expected template cooked '{}', got '{}'",
i,
expected,
actual
);
}
(None, None) => {
// Both are None - valid for invalid templates
}
_ => {
panic!(
"Token #{}: Template cooked value mismatch, expected: {:?}, got: \
{:?}",
i, expected_cooked, actual_cooked
);
}
}
}
_ => panic!(
"Token #{}: Value type mismatch or unsupported value comparison\nexpected: \
{:?}\nactual: {:?}\ninput: {:?}",
i, expected_value, token.value, input
),
}
}
}
// Verify we've reached EOF
let final_token = lexer.next_token().expect("Failed to get final token");
assert_eq!(
final_token.token_type,
TokenType::EOF,
"Expected final token to be EOF, got {:?}",
final_token.token_type
);
}
#[test]
fn test_lexer_variable_declaration() {
// Simple JavaScript variable declaration
let input = "const x = 42;";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("x")))),
(TokenType::Eq, None),
(
TokenType::Num,
Some(TokenValue::Num {
value: 42.0,
raw: "42".into(),
}),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_function_declaration() {
// JavaScript function declaration
let input = "function add(a, b) { return a + b; }";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Function, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("add")))),
(TokenType::LParen, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("a")))),
(TokenType::Comma, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("b")))),
(TokenType::RParen, None),
(TokenType::LBrace, None),
(TokenType::Return, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("a")))),
(TokenType::Plus, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("b")))),
(TokenType::Semi, None),
(TokenType::RBrace, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_object_literal() {
// JavaScript object literal
let input = "const obj = { name: 'John', age: 30, isActive: true };";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("obj")))),
(TokenType::Eq, None),
(TokenType::LBrace, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::Colon, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("John"),
raw: "'John'".into(),
}),
),
(TokenType::Comma, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("age")))),
(TokenType::Colon, None),
(
TokenType::Num,
Some(TokenValue::Num {
value: 30.0,
raw: "30".into(),
}),
),
(TokenType::Comma, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("isActive"))),
),
(TokenType::Colon, None),
(TokenType::True, None),
(TokenType::RBrace, None),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_array_literal() {
// JavaScript array literal with different types of elements
let input = "const arr = [1, 'two', true, null, undefined];";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("arr")))),
(TokenType::Eq, None),
(TokenType::LBracket, None),
(
TokenType::Num,
Some(TokenValue::Num {
value: 1.0,
raw: "1".into(),
}),
),
(TokenType::Comma, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("two"),
raw: "'two'".into(),
}),
),
(TokenType::Comma, None),
(TokenType::True, None),
(TokenType::Comma, None),
(TokenType::Null, None),
(TokenType::Comma, None),
(TokenType::Undefined, None),
(TokenType::RBracket, None),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_arrow_function() {
// JavaScript arrow function
let input = "const multiply = (x, y) => x * y;";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("multiply"))),
),
(TokenType::Eq, None),
(TokenType::LParen, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("x")))),
(TokenType::Comma, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("y")))),
(TokenType::RParen, None),
(TokenType::Arrow, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("x")))),
(TokenType::Asterisk, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("y")))),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_template_literal() {
// JavaScript template literal with expressions
let input = "const greeting = `Hello, ${name}! You have ${messages.length} messages.`;";
// Expected token types and values according to ECMAScript standard
let expected_tokens = vec![
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("greeting"))),
),
(TokenType::Eq, None),
(TokenType::BackQuote, None), // Opening backtick
(
TokenType::Template,
Some(TokenValue::Template {
raw: "Hello, ".into(),
cooked: Some("Hello, ".into()),
}),
),
(TokenType::DollarLBrace, None), // Start of expression
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::RBrace, None), // End of expression
(
TokenType::Template,
Some(TokenValue::Template {
raw: "! You have ".into(),
cooked: Some("! You have ".into()),
}),
),
(TokenType::DollarLBrace, None), // Start of expression
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("messages"))),
),
(TokenType::Dot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("length"))),
),
(TokenType::RBrace, None), // End of expression
(
TokenType::Template,
Some(TokenValue::Template {
raw: " messages.".into(),
cooked: Some(" messages.".into()),
}),
),
(TokenType::BackQuote, None), // Closing backtick
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_conditional_operator() {
// JavaScript conditional (ternary) operator
let input = "const result = isValid ? 'Valid' : 'Invalid';";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("result"))),
),
(TokenType::Eq, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("isValid"))),
),
(TokenType::QuestionMark, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("Valid"),
raw: "'Valid'".into(),
}),
),
(TokenType::Colon, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("Invalid"),
raw: "'Invalid'".into(),
}),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_class_declaration() {
// JavaScript class declaration with a method
let input = "class Person { constructor(name) { this.name = name; } }";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Class, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("Person"))),
),
(TokenType::LBrace, None),
(TokenType::Constructor, None),
(TokenType::LParen, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::RParen, None),
(TokenType::LBrace, None),
(TokenType::This, None),
(TokenType::Dot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::Eq, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::Semi, None),
(TokenType::RBrace, None),
(TokenType::RBrace, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_destructuring_assignment() {
// JavaScript destructuring assignment with objects and arrays
let input = "const { name, age, [key]: value, ...rest } = person; const [first, second, \
...others] = items;";
// Expected token types and values
let expected_tokens = vec![
// Object destructuring
(TokenType::Const, None),
(TokenType::LBrace, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::Comma, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("age")))),
(TokenType::Comma, None),
(TokenType::LBracket, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("key")))),
(TokenType::RBracket, None),
(TokenType::Colon, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("value"))),
),
(TokenType::Comma, None),
(TokenType::DotDotDot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("rest")))),
(TokenType::RBrace, None),
(TokenType::Eq, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("person"))),
),
(TokenType::Semi, None),
// Array destructuring
(TokenType::Const, None),
(TokenType::LBracket, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("first"))),
),
(TokenType::Comma, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("second"))),
),
(TokenType::Comma, None),
(TokenType::DotDotDot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("others"))),
),
(TokenType::RBracket, None),
(TokenType::Eq, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("items"))),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_async_await() {
// JavaScript async/await syntax
let input = "async function fetchData() { try { const response = await fetch(url); return \
await response.json(); } catch (error) { console.error(error); } }";
// Expected token types and values
let expected_tokens = vec![
// async function declaration
(TokenType::Async, None),
(TokenType::Function, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("fetchData"))),
),
(TokenType::LParen, None),
(TokenType::RParen, None),
(TokenType::LBrace, None),
// try block
(TokenType::Try, None),
(TokenType::LBrace, None),
// const response = await fetch(url);
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("response"))),
),
(TokenType::Eq, None),
(TokenType::Await, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("fetch"))),
),
(TokenType::LParen, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("url")))),
(TokenType::RParen, None),
(TokenType::Semi, None),
// return await response.json();
(TokenType::Return, None),
(TokenType::Await, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("response"))),
),
(TokenType::Dot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("json")))),
(TokenType::LParen, None),
(TokenType::RParen, None),
(TokenType::Semi, None),
// end of try block
(TokenType::RBrace, None),
// catch block
(TokenType::Catch, None),
(TokenType::LParen, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("error"))),
),
(TokenType::RParen, None),
(TokenType::LBrace, None),
// console.error(error);
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("console"))),
),
(TokenType::Dot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("error"))),
),
(TokenType::LParen, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("error"))),
),
(TokenType::RParen, None),
(TokenType::Semi, None),
// end of catch block and function
(TokenType::RBrace, None),
(TokenType::RBrace, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_spread_operator() {
// JavaScript spread operator in function calls and array literals
let input = "function sum(...numbers) { return Math.max(...numbers, ...moreNumbers); }";
// Expected token types and values
let expected_tokens = vec![
// Function declaration with rest parameter
(TokenType::Function, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("sum")))),
(TokenType::LParen, None),
(TokenType::DotDotDot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("numbers"))),
),
(TokenType::RParen, None),
(TokenType::LBrace, None),
// Return statement with spread in function call
(TokenType::Return, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("Math")))),
(TokenType::Dot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("max")))),
(TokenType::LParen, None),
(TokenType::DotDotDot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("numbers"))),
),
(TokenType::Comma, None),
(TokenType::DotDotDot, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("moreNumbers"))),
),
(TokenType::RParen, None),
(TokenType::Semi, None),
// End of function
(TokenType::RBrace, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_for_of_loop() {
// JavaScript for-of loop
let input = "for (const item of items) { console.log(item); }";
// Expected token types and values
let expected_tokens = vec![
// for-of loop header
(TokenType::For, None),
(TokenType::LParen, None),
(TokenType::Const, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("item")))),
(TokenType::Of, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("items"))),
),
(TokenType::RParen, None),
(TokenType::LBrace, None),
// Loop body
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("console"))),
),
(TokenType::Dot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("log")))),
(TokenType::LParen, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("item")))),
(TokenType::RParen, None),
(TokenType::Semi, None),
// End of loop
(TokenType::RBrace, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_import_statement() {
// JavaScript import statements with various syntax forms
let input = "import defaultExport from 'module'; import * as name from 'module'; import { \
export1, export2 as alias } from 'module';";
// Expected token types and values
let expected_tokens = vec![
// Default import
(TokenType::Import, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("defaultExport"))),
),
(TokenType::From, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("module"),
raw: "'module'".into(),
}),
),
(TokenType::Semi, None),
// Namespace import
(TokenType::Import, None),
(TokenType::Asterisk, None),
(TokenType::As, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("name")))),
(TokenType::From, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("module"),
raw: "'module'".into(),
}),
),
(TokenType::Semi, None),
// Named imports
(TokenType::Import, None),
(TokenType::LBrace, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("export1"))),
),
(TokenType::Comma, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("export2"))),
),
(TokenType::As, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("alias"))),
),
(TokenType::RBrace, None),
(TokenType::From, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("module"),
raw: "'module'".into(),
}),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_export_statement() {
// JavaScript export statements with various syntax forms
let input = "export const value = 42; export default function() {}; export { name1, name2 as \
alias }; export * from 'module';";
// Expected token types and values
let expected_tokens = vec![
// Named export with declaration
(TokenType::Export, None),
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("value"))),
),
(TokenType::Eq, None),
(
TokenType::Num,
Some(TokenValue::Num {
value: 42.0,
raw: "42".into(),
}),
),
(TokenType::Semi, None),
// Default export
(TokenType::Export, None),
(TokenType::Default, None),
(TokenType::Function, None),
(TokenType::LParen, None),
(TokenType::RParen, None),
(TokenType::LBrace, None),
(TokenType::RBrace, None),
(TokenType::Semi, None),
// Named exports
(TokenType::Export, None),
(TokenType::LBrace, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("name1"))),
),
(TokenType::Comma, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("name2"))),
),
(TokenType::As, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("alias"))),
),
(TokenType::RBrace, None),
(TokenType::Semi, None),
// Re-export
(TokenType::Export, None),
(TokenType::Asterisk, None),
(TokenType::From, None),
(
TokenType::Str,
Some(TokenValue::Str {
value: Atom::from("module"),
raw: "'module'".into(),
}),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_regular_expressions() {
// JavaScript regular expressions with various flags
let input = "const pattern1 = /[a-z]+/; const pattern2 = /\\d+/g; const pattern3 = /^test$/i;";
// Expected token types and values
let expected_tokens = vec![
// First regex
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("pattern1"))),
),
(TokenType::Eq, None),
(
TokenType::Regex,
Some(TokenValue::Regex {
exp: "[a-z]+".into(),
flags: "".into(),
}),
),
(TokenType::Semi, None),
// Second regex with global flag
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("pattern2"))),
),
(TokenType::Eq, None),
(
TokenType::Regex,
Some(TokenValue::Regex {
exp: "\\d+".into(),
flags: "g".into(),
}),
),
(TokenType::Semi, None),
// Third regex with case-insensitive flag
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("pattern3"))),
),
(TokenType::Eq, None),
(
TokenType::Regex,
Some(TokenValue::Regex {
exp: "^test$".into(),
flags: "i".into(),
}),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_optional_chaining() {
// JavaScript optional chaining
let input = "const value = obj?.prop?.method?.()?.nested;";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("value"))),
),
(TokenType::Eq, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("obj")))),
(TokenType::OptionalChain, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("prop")))),
(TokenType::OptionalChain, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("method"))),
),
(TokenType::OptionalChain, None),
(TokenType::LParen, None),
(TokenType::RParen, None),
(TokenType::OptionalChain, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("nested"))),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
fn test_lexer_nullish_coalescing() {
// JavaScript nullish coalescing operator
let input = "const value = first ?? second ?? defaultValue;";
// Expected token types and values
let expected_tokens = vec![
(TokenType::Const, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("value"))),
),
(TokenType::Eq, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("first"))),
),
(TokenType::NullishCoalescing, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("second"))),
),
(TokenType::NullishCoalescing, None),
(
TokenType::Ident,
Some(TokenValue::Word(Atom::from("defaultValue"))),
),
(TokenType::Semi, None),
];
verify_tokens(input, expected_tokens);
}
#[test]
#[allow(clippy::excessive_precision)]
fn test_lexer_number_literals() {
// Test decimal integers
verify_tokens(
"123",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 123.0,
raw: Atom::from("123"),
}),
)],
);
// Test decimal with underscores
verify_tokens(
"1_000_000",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1000000.0,
raw: Atom::from("1_000_000"),
}),
)],
);
// Test floating point
verify_tokens(
"123.456",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 123.456,
raw: Atom::from("123.456"),
}),
)],
);
// Test floating point with leading dot
verify_tokens(
".456",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.456,
raw: Atom::from(".456"),
}),
)],
);
// Test floating point with underscores
verify_tokens(
"123_456.789_012",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 123456.789012,
raw: Atom::from("123_456.789_012"),
}),
)],
);
// Test exponential notation
verify_tokens(
"1.23e10",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1.23e10,
raw: Atom::from("1.23e10"),
}),
)],
);
// Test exponential notation with underscores
verify_tokens(
"1_234.5_67e1_0",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1234.567e10,
raw: Atom::from("1_234.5_67e1_0"),
}),
)],
);
// Test exponential notation with positive exponent
verify_tokens(
"1.23e+10",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1.23e10,
raw: Atom::from("1.23e+10"),
}),
)],
);
// Test exponential notation with negative exponent
verify_tokens(
"1.23e-10",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1.23e-10,
raw: Atom::from("1.23e-10"),
}),
)],
);
// Test binary literals
verify_tokens(
"0b1010",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 10.0,
raw: Atom::from("0b1010"),
}),
)],
);
// Test binary literals with underscores
verify_tokens(
"0b1010_1010",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 170.0,
raw: Atom::from("0b1010_1010"),
}),
)],
);
// Test octal literals
verify_tokens(
"0o755",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 493.0,
raw: Atom::from("0o755"),
}),
)],
);
// Test octal literals with underscores
verify_tokens(
"0o7_5_5",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 493.0,
raw: Atom::from("0o7_5_5"),
}),
)],
);
// Test hexadecimal literals
verify_tokens(
"0xABCD",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 43981.0,
raw: Atom::from("0xABCD"),
}),
)],
);
// Test hexadecimal literals with underscores
verify_tokens(
"0xA_BCD",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 43981.0,
raw: Atom::from("0xA_BCD"),
}),
)],
);
// Test BigInt literals
verify_tokens(
"123n",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(123)),
raw: Atom::from("123"),
}),
)],
);
// Test BigInt literals with underscores
verify_tokens(
"1_234_567n",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(1234567)),
raw: Atom::from("1_234_567"),
}),
)],
);
// Test binary BigInt literals
verify_tokens(
"0b1010n",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(10)),
raw: Atom::from("0b1010"),
}),
)],
);
// Test octal BigInt literals
verify_tokens(
"0o755n",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(493)),
raw: Atom::from("0o755"),
}),
)],
);
// Test hexadecimal BigInt literals
verify_tokens(
"0xABCDn",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(43981)),
raw: Atom::from("0xABCD"),
}),
)],
);
// Test large decimal integers
verify_tokens(
"9007199254740991", // Max safe integer
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 9007199254740991.0,
raw: Atom::from("9007199254740991"),
}),
)],
);
// Test large decimal floats
verify_tokens(
"1.7976931348623157e+308", // Close to max double
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 1.7976931348623157e+308,
raw: Atom::from("1.7976931348623157e+308"),
}),
)],
);
// Test special values
verify_tokens(
"0",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.0,
raw: Atom::from("0"),
}),
)],
);
}
#[test]
fn test_lexer_number_edge_cases() {
// Test dot followed by non-digit
verify_tokens(
".abc",
vec![
(TokenType::Dot, None),
(TokenType::Ident, Some(TokenValue::Word(Atom::from("abc")))),
],
);
// Test number followed by dot-dot (range operator)
verify_tokens(
"123..",
vec![
(
TokenType::Num,
Some(TokenValue::Num {
value: 123.0,
raw: Atom::from("123"),
}),
),
(TokenType::Dot, None),
(TokenType::Dot, None),
],
);
// Test zero with leading decimal point
verify_tokens(
".0",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.0,
raw: Atom::from(".0"),
}),
)],
);
// Test leading zeros
verify_tokens(
"0123",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 123.0,
raw: Atom::from("0123"),
}),
)],
);
// Test small number in scientific notation
verify_tokens(
"4.94065645841247e-324", // Min positive double
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 4.94065645841247e-324,
raw: Atom::from("4.94065645841247e-324"),
}),
)],
);
}
#[test]
#[should_panic]
fn test_lexer_invalid_binary_number() {
verify_tokens(
"0b",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.0,
raw: Atom::from("0b"),
}),
)],
);
}
#[test]
#[should_panic]
fn test_lexer_invalid_octal_number() {
verify_tokens(
"0o",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.0,
raw: Atom::from("0o"),
}),
)],
);
}
#[test]
#[should_panic]
fn test_lexer_invalid_hex_number() {
verify_tokens(
"0x",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 0.0,
raw: Atom::from("0x"),
}),
)],
);
}
#[test]
#[should_panic]
fn test_lexer_invalid_bigint_with_decimal() {
verify_tokens(
"123.456n",
vec![(
TokenType::BigInt,
Some(TokenValue::BigInt {
value: Box::new(num_bigint::BigInt::from(123)),
raw: Atom::from("123.456"),
}),
)],
);
}
#[test]
#[should_panic]
fn test_lexer_invalid_exponent() {
verify_tokens(
"123e",
vec![(
TokenType::Num,
Some(TokenValue::Num {
value: 123.0,
raw: Atom::from("123e"),
}),
)],
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/lib.rs | Rust | //! High-performance ECMAScript/TypeScript parser
//!
//! This parser is designed for maximum performance and memory efficiency,
//! operating at the byte level for optimal throughput.
#![cfg_attr(feature = "nightly", allow(internal_features))]
#![cfg_attr(feature = "nightly", feature(core_intrinsics))]
mod error;
mod lexer;
// mod parser;
pub mod token;
mod util;
pub use error::{Error, ErrorKind, Result};
pub use lexer::Lexer;
// pub use parser::Parser;
// /// Parse source code into an ECMAScript/TypeScript AST
// pub fn parse_file(
// source_map: &SourceMap,
// handler: &Handler,
// fm: &swc_common::SourceFile,
// target: JscTarget,
// syntax: Syntax,
// is_module: bool,
// comments: Option<&mut SingleThreadedComments>,
// ) -> Result<Program> {
// let lexer = Lexer::new(fm.src.as_ref(), target, syntax,
// comments.clone()); let mut parser = Parser::new(lexer, handler, syntax);
// if is_module {
// parser.parse_module()
// } else {
// parser.parse_script()
// }
// }
/// Target ECMAScript version
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum JscTarget {
Es3,
Es5,
Es2015,
Es2016,
Es2017,
Es2018,
Es2019,
Es2020,
Es2021,
Es2022,
EsNext,
}
/// Syntax configuration for the parser
#[derive(Debug, Clone, Copy)]
pub struct Syntax {
/// Enable parsing of JSX syntax
pub jsx: bool,
/// Enable parsing of TypeScript syntax
pub typescript: bool,
/// Enable parsing of decorators
pub decorators: bool,
/// Enable parsing of dynamic imports
pub dynamic_import: bool,
/// Enable parsing of private methods
pub private_methods: bool,
/// Enable parsing of private fields
pub private_fields: bool,
}
impl Default for Syntax {
fn default() -> Self {
Self {
jsx: false,
typescript: false,
decorators: false,
dynamic_import: true,
private_methods: true,
private_fields: true,
}
}
}
/// Single-threaded source comments storage
#[derive(Debug, Default, Clone)]
pub struct SingleThreadedComments {
// Comments implementation omitted for brevity
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/array.rs | Rust | //! Array expression parser implementation
//!
//! This module provides the implementation for parsing array expressions,
//! which are enclosed by square brackets and can contain multiple elements.
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{error::Result, token::TokenType};
impl<'a> Parser<'a> {
/// Parse an array expression: [elem1, elem2, ...spread]
pub(crate) fn parse_array_expression(&mut self) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBracket)?; // Expect '['
let mut elements = Vec::new();
// Parse the elements
while !self.is_token_type(TokenType::RBracket) {
// Handle elision (hole)
if self.is_token_type(TokenType::Comma) {
elements.push(None);
self.next_token(); // Skip ','
continue;
}
// Check for spread element
let is_spread = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the element expression
let expr = self.parse_assignment_expression()?;
// Create the element
let element = if is_spread {
Some(ast::ExprOrSpread {
spread: Some(expr.span().lo),
expr: Box::new(expr),
})
} else {
Some(ast::ExprOrSpread {
spread: None,
expr: Box::new(expr),
})
};
elements.push(element);
// Check for comma or end of elements
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RBracket) {
break;
}
} else {
break;
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBracket)?; // Expect ']'
// Create the array expression
Ok(ast::Expr::Array(ast::ArrayLit {
span: start_span.merge_with(end_span),
elems: elements,
}))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/binary.rs | Rust | //! Binary expression parser implementation
//!
//! This module handles parsing of binary expressions like a + b, a * b, etc.
//! It uses the Pratt parsing algorithm for handling operator precedence.
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{error::Result, token::TokenType};
impl<'a> Parser<'a> {
/// Parse a binary expression with a given minimum precedence
pub(crate) fn parse_binary_expression(&mut self, min_precedence: u8) -> Result<ast::Expr> {
// Parse the left-hand side expression
let mut left = self.parse_unary_expression()?;
// Process operators with precedence >= min_precedence
loop {
let current_token = self.cur_token.token_type;
let precedence = self.get_binary_precedence(current_token);
// If the current token is not a binary operator or its precedence is lower
// than the minimum precedence, break out of the loop
if precedence == 0 || precedence < min_precedence {
break;
}
// Save the operator and its span
let op = self.token_to_binary_op(current_token);
let op_span = self.cur_token.span;
// Skip the operator token
self.next_token();
// Parse the right-hand side expression with a higher precedence
// to ensure correct associativity
let right = self.parse_binary_expression(precedence + 1)?;
// Create the binary expression
left = ast::Expr::Bin(ast::BinExpr {
span: left.span().merge_with(right.span()),
op,
left: Box::new(left),
right: Box::new(right),
});
}
Ok(left)
}
/// Get the precedence of a binary operator
fn get_binary_precedence(&self, token_type: TokenType) -> u8 {
match token_type {
// Multiplicative operators (*, /, %)
TokenType::Mul | TokenType::Div | TokenType::Mod => 13,
// Additive operators (+, -)
TokenType::Add | TokenType::Sub => 12,
// Bitwise shift operators (<<, >>, >>>)
TokenType::LShift | TokenType::RShift | TokenType::ZeroFillRShift => 11,
// Relational operators (<, >, <=, >=, instanceof, in)
TokenType::Lt
| TokenType::Gt
| TokenType::LtEq
| TokenType::GtEq
| TokenType::InstanceOf
| TokenType::In => 10,
// Equality operators (==, !=, ===, !==)
TokenType::EqEq | TokenType::NotEq | TokenType::EqEqEq | TokenType::NotEqEq => 9,
// Bitwise AND operator (&)
TokenType::BitAnd => 8,
// Bitwise XOR operator (^)
TokenType::BitXor => 7,
// Bitwise OR operator (|)
TokenType::BitOr => 6,
// Logical AND operator (&&)
TokenType::And => 5,
// Logical OR operator (||)
TokenType::Or => 4,
// Nullish coalescing operator (??)
TokenType::NullishCoalescing => 3,
// Not a binary operator
_ => 0,
}
}
}
impl<'a> Parser<'a> {
/// Convert a token type to a binary operator
fn token_to_binary_op(&self, token_type: TokenType) -> ast::BinaryOp {
match token_type {
TokenType::EqEq => ast::BinaryOp::EqEq,
TokenType::NotEq => ast::BinaryOp::NotEq,
TokenType::EqEqEq => ast::BinaryOp::EqEqEq,
TokenType::NotEqEq => ast::BinaryOp::NotEqEq,
TokenType::Lt => ast::BinaryOp::Lt,
TokenType::LtEq => ast::BinaryOp::LtEq,
TokenType::Gt => ast::BinaryOp::Gt,
TokenType::GtEq => ast::BinaryOp::GtEq,
TokenType::LShift => ast::BinaryOp::LShift,
TokenType::RShift => ast::BinaryOp::RShift,
TokenType::ZeroFillRShift => ast::BinaryOp::ZeroFillRShift,
TokenType::Add => ast::BinaryOp::Add,
TokenType::Sub => ast::BinaryOp::Sub,
TokenType::Mul => ast::BinaryOp::Mul,
TokenType::Div => ast::BinaryOp::Div,
TokenType::Mod => ast::BinaryOp::Mod,
TokenType::BitOr => ast::BinaryOp::BitOr,
TokenType::BitXor => ast::BinaryOp::BitXor,
TokenType::BitAnd => ast::BinaryOp::BitAnd,
TokenType::In => ast::BinaryOp::In,
TokenType::InstanceOf => ast::BinaryOp::InstanceOf,
TokenType::Exp => ast::BinaryOp::Exp,
TokenType::And => ast::BinaryOp::LogicalAnd,
TokenType::Or => ast::BinaryOp::LogicalOr,
TokenType::NullishCoalescing => ast::BinaryOp::NullishCoalescing,
_ => unreachable!("Not a binary operator: {:?}", token_type),
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/call.rs | Rust | //! Call expression parser implementation
//!
//! This module provides the implementation for parsing call expressions,
//! including function calls, constructor calls (new operator),
//! and optional chaining calls.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::TokenType,
};
impl<'a> Parser<'a> {
/// Parse a call expression: callee(arg1, arg2)
pub(crate) fn parse_call_expression(&mut self, callee: ast::Expr) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::LParen)?; // Expect '('
// Check if this is an optional call
let optional = match &callee {
ast::Expr::Member(member) => member.optional,
_ => false,
};
// Parse the arguments
let args = self.parse_arguments()?;
let end_span = self.cur_token.span;
self.expect(TokenType::RParen)?; // Expect ')'
// Create the call expression
Ok(ast::Expr::Call(ast::CallExpr {
span: callee.span().merge_with(end_span),
callee: ast::Callee::Expr(Box::new(callee)),
args,
type_args: None,
optional,
}))
}
/// Parse a new expression: new Constructor(arg1, arg2)
pub(crate) fn parse_new_expression(&mut self) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::New)?; // Expect 'new'
// Check for new.target
if self.is_token_type(TokenType::Dot) {
self.next_token(); // Skip '.'
if self.is_token_identifier_eq("target") {
self.next_token(); // Skip 'target'
// Create the new.target meta property
return Ok(ast::Expr::MetaProp(ast::MetaPropExpr {
span: start_span.merge_with(self.prev_token.span),
kind: ast::MetaPropKind::NewTarget,
}));
} else {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("'target'"),
got: format!("{}", self.cur_token.token_type),
}));
}
}
// Parse the constructor expression
let constructor = self.parse_left_hand_side_expression()?;
// Parse the arguments if present
let args = if self.is_token_type(TokenType::LParen) {
self.next_token(); // Skip '('
let args = self.parse_arguments()?;
self.expect(TokenType::RParen)?; // Expect ')'
args
} else {
Vec::new()
};
// Create the new expression
Ok(ast::Expr::New(ast::NewExpr {
span: start_span.merge_with(match args.last() {
Some(arg) => match &*arg.expr {
ast::Expr::Lit(lit) => lit.span(),
expr => expr.span(),
},
None => constructor.span(),
}),
callee: Box::new(constructor),
args: Some(args),
type_args: None,
}))
}
/// Parse arguments for a call expression: (arg1, arg2)
pub(crate) fn parse_arguments(&mut self) -> Result<Vec<ast::ExprOrSpread>> {
let mut args = Vec::new();
// Parse the arguments
while !self.is_token_type(TokenType::RParen) {
// Check for spread argument
let is_spread = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the argument expression
let expr = self.parse_assignment_expression()?;
// Create the argument
let arg = ast::ExprOrSpread {
spread: if is_spread {
Some(expr.span().lo)
} else {
None
},
expr: Box::new(expr),
};
args.push(arg);
// Check for comma or end of arguments
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
Ok(args)
}
}
impl<'a> Parser<'a> {
/// Parse a chain of call expressions and member expressions
pub(crate) fn parse_call_chain(&mut self, callee: ast::Expr) -> Result<ast::Expr> {
let mut expr = callee;
loop {
match self.cur_token.token_type {
// Function call: expr(args)
TokenType::LParen => {
expr = self.parse_call_expression(expr)?;
}
// Member access: expr.prop
TokenType::Dot => {
self.next_token(); // Skip '.'
expr = self.parse_property_access(expr, false)?;
}
// Optional chaining: expr?.prop or expr?.(args)
TokenType::QuestionDot => {
self.next_token(); // Skip '?.'
// Check for property access, computed member, or call
match self.cur_token.token_type {
// Property access: expr?.prop
TokenType::Ident => {
expr = self.parse_property_access(expr, true)?;
}
// Computed member: expr?.[expr]
TokenType::LBracket => {
expr = self.parse_computed_member(expr, true)?;
}
// Call expression: expr?.(args)
TokenType::LParen => {
// Make the callee an optional member expression
if !matches!(expr, ast::Expr::Member(_)) {
// Convert to an optional member expression
expr = ast::Expr::Member(ast::MemberExpr {
span: expr.span(),
obj: Box::new(expr.clone()),
prop: ast::MemberProp::Ident(ast::Ident {
span: expr.span(),
sym: "".into(),
optional: false,
}),
computed: false,
optional: true,
});
}
expr = self.parse_call_expression(expr)?;
}
// Invalid expression
_ => {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier, '[', or '('"),
got: format!("{}", self.cur_token.token_type),
}));
}
}
}
// Computed member: expr[prop]
TokenType::LBracket => {
expr = self.parse_computed_member(expr, false)?;
}
// End of call chain
_ => {
break;
}
}
}
Ok(expr)
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/function.rs | Rust | //! Function expression parser implementation
//!
//! This module provides the implementation for parsing function expressions,
//! including normal functions, arrow functions, generator functions,
//! and async functions.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::TokenType,
};
impl<'a> Parser<'a> {
/// Parse a function expression: function [name](params) { body }
pub(crate) fn parse_function_expression(
&mut self,
is_async: bool,
is_generator: bool,
) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::Function)?; // Expect 'function'
// Check for generator function
let is_generator = if self.is_token_type(TokenType::Mul) {
self.next_token(); // Skip '*'
true
} else {
is_generator
};
// Parse the function name if present (optional)
let ident = if self.is_token_identifier() {
Some(self.parse_binding_identifier()?.id)
} else {
None
};
// Create a new scope for the function
self.enter_scope(super::super::ScopeKind::Function);
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_generator = self.in_generator;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_generator = is_generator;
self.in_async = is_async;
// Parse function parameters and body
let (params, body) = self.parse_function_params_and_body()?;
// Restore previous function state
self.in_function = prev_in_function;
self.in_generator = prev_in_generator;
self.in_async = prev_in_async;
// Exit the function scope
self.exit_scope();
// Create the function expression
Ok(ast::Expr::Fn(ast::FnExpr {
ident,
function: ast::Function {
params,
decorators: Vec::new(),
span: start_span.merge_with(body.span),
body: Some(body),
is_generator,
is_async,
type_params: None,
return_type: None,
},
}))
}
/// Parse an arrow function: (param1, param2) => body
pub(crate) fn parse_arrow_function_expression(&mut self, is_async: bool) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
// Create a new scope for the arrow function
self.enter_scope(super::super::ScopeKind::Function);
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_async = is_async;
// Parse the parameters
let params = match self.cur_token.token_type {
// Single parameter without parentheses: param => body
TokenType::Ident => {
let binding_ident = self.parse_binding_identifier()?;
vec![ast::Param {
span: binding_ident.id.span,
decorators: Vec::new(),
pat: ast::Pat::Ident(binding_ident),
}]
}
// Parameters with parentheses: (param1, param2) => body
TokenType::LParen => {
self.next_token(); // Skip '('
let mut params = Vec::new();
if !self.is_token_type(TokenType::RParen) {
loop {
// Check for rest parameter
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the parameter pattern
let pat = self.parse_binding_pattern()?;
// Create the parameter
let param = if is_rest {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat: ast::Pat::Rest(ast::RestPat {
span: pat.span(),
arg: Box::new(pat),
type_ann: None,
}),
}
} else {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat,
}
};
params.push(param);
// Rest parameter must be the last parameter
if is_rest {
if !self.is_token_type(TokenType::RParen) {
return Err(self.error(ErrorKind::General {
message: "Rest parameter must be the last parameter".into(),
}));
}
break;
}
// Check for comma or end of parameters
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
}
self.expect(TokenType::RParen)?; // Expect ')'
params
}
// Invalid parameter
_ => {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier or parameter list"),
got: format!("{}", self.cur_token.token_type),
}));
}
};
// Parse the arrow token
self.expect(TokenType::Arrow)?; // Expect '=>'
// Parse the arrow function body
let (body, span) = match self.cur_token.token_type {
// Block body: => { statements }
TokenType::LBrace => {
let block = self.parse_block_stmt()?;
let body = ast::BlockStmtOrExpr::BlockStmt(block.clone());
(body, block.span)
}
// Expression body: => expression
_ => {
let expr = self.parse_assignment_expression()?;
let span = expr.span();
let body = ast::BlockStmtOrExpr::Expr(Box::new(expr));
(body, span)
}
};
// Restore previous function state
self.in_function = prev_in_function;
self.in_async = prev_in_async;
// Exit the arrow function scope
self.exit_scope();
// Create the arrow function expression
Ok(ast::Expr::Arrow(ast::ArrowExpr {
span: start_span.merge_with(span),
params,
body,
is_async,
is_generator: false, // Arrow functions cannot be generators
return_type: None,
type_params: None,
}))
}
/// Try to parse an arrow function starting from an identifier
fn try_parse_arrow_function_from_ident(
&mut self,
ident: ast::Ident,
is_async: bool,
) -> Result<Option<ast::Expr>> {
// Check if the next token is an arrow
if !self.is_token_type(TokenType::Arrow) {
return Ok(None);
}
// We have an arrow, save state to restore if we fail
let state = self.save_state();
// Create a new scope for the arrow function
self.enter_scope(super::super::ScopeKind::Function);
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_async = is_async;
// Create the parameter from the identifier
let binding_ident = ast::BindingIdent {
id: ident.clone(),
type_ann: None,
};
let params = vec![ast::Param {
span: ident.span,
decorators: Vec::new(),
pat: ast::Pat::Ident(binding_ident),
}];
self.next_token(); // Skip '=>'
// Parse the arrow function body
let (body, span) = match self.cur_token.token_type {
// Block body: => { statements }
TokenType::LBrace => {
match self.parse_block_stmt() {
Ok(block) => {
let body = ast::BlockStmtOrExpr::BlockStmt(block.clone());
(body, block.span)
}
Err(_) => {
// Restore state and exit early
self.restore_state(state);
self.in_function = prev_in_function;
self.in_async = prev_in_async;
self.exit_scope();
return Ok(None);
}
}
}
// Expression body: => expression
_ => {
match self.parse_assignment_expression() {
Ok(expr) => {
let span = expr.span();
let body = ast::BlockStmtOrExpr::Expr(Box::new(expr));
(body, span)
}
Err(_) => {
// Restore state and exit early
self.restore_state(state);
self.in_function = prev_in_function;
self.in_async = prev_in_async;
self.exit_scope();
return Ok(None);
}
}
}
};
// Restore previous function state
self.in_function = prev_in_function;
self.in_async = prev_in_async;
// Exit the arrow function scope
self.exit_scope();
// Create the arrow function expression
Ok(Some(ast::Expr::Arrow(ast::ArrowExpr {
span: ident.span.merge_with(span),
params,
body,
is_async,
is_generator: false, // Arrow functions cannot be generators
return_type: None,
type_params: None,
})))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/member.rs | Rust | //! Member expression parser implementation
//!
//! This module provides the implementation for parsing member expressions,
//! including property access, computed member access, and optional chaining.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::TokenType,
};
impl<'a> Parser<'a> {
/// Parse a member expression: obj.prop, obj[expr], obj?.prop
pub(crate) fn parse_member_expression(&mut self, object: ast::Expr) -> Result<ast::Expr> {
let mut expr = object;
loop {
match self.cur_token.token_type {
// Property access: obj.prop
TokenType::Dot => {
self.next_token(); // Skip '.'
expr = self.parse_property_access(expr, false)?;
}
// Optional chaining: obj?.prop
TokenType::QuestionDot => {
self.next_token(); // Skip '?.'
// Check for property access or computed member
match self.cur_token.token_type {
// Property access: obj?.prop
TokenType::Ident => {
expr = self.parse_property_access(expr, true)?;
}
// Computed member: obj?.[expr]
TokenType::LBracket => {
expr = self.parse_computed_member(expr, true)?;
}
// Invalid member expression
_ => {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier or '['"),
got: format!("{}", self.cur_token.token_type),
}));
}
}
}
// Computed member: obj[expr]
TokenType::LBracket => {
expr = self.parse_computed_member(expr, false)?;
}
// End of member expression
_ => {
break;
}
}
}
Ok(expr)
}
/// Parse property access: obj.prop
pub(crate) fn parse_property_access(
&mut self,
object: ast::Expr,
optional: bool,
) -> Result<ast::Expr> {
// Property name must be an identifier
if !self.is_token_identifier() {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("property name"),
got: format!("{}", self.cur_token.token_type),
}));
}
// Parse the property name
let prop = self.parse_identifier_name()?;
// Create the member expression
Ok(ast::Expr::Member(ast::MemberExpr {
span: object.span().merge_with(prop.span),
obj: Box::new(object),
prop: ast::MemberProp::Ident(prop),
computed: false,
optional,
}))
}
/// Parse computed member access: obj[expr]
pub(crate) fn parse_computed_member(
&mut self,
object: ast::Expr,
optional: bool,
) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBracket)?; // Expect '['
// Parse the property expression
let prop = self.parse_expression()?;
let end_span = self.cur_token.span;
self.expect(TokenType::RBracket)?; // Expect ']'
// Create the member expression
Ok(ast::Expr::Member(ast::MemberExpr {
span: object.span().merge_with(end_span),
obj: Box::new(object),
prop: ast::MemberProp::Computed(ast::ComputedPropName {
span: start_span.merge_with(end_span),
expr: Box::new(prop),
}),
computed: true,
optional,
}))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/mod.rs | Rust | //! Expression parser module
//!
//! This module contains implementations for parsing JavaScript expressions.
use swc_ecma_ast as ast;
use super::Parser;
use crate::{
error::{ErrorKind, Result},
token::TokenType,
};
// Sub-modules
mod array;
mod binary;
mod call;
mod function;
mod member;
mod object;
mod primary;
mod unary;
impl<'a> Parser<'a> {
/// Parse an expression (sequence expression)
pub(crate) fn parse_expression(&mut self) -> Result<ast::Expr> {
// Start with an assignment expression
let mut exprs = vec![self.parse_assignment_expression()?];
// Check for comma operator (sequence expression)
while self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Parse the next expression
let expr = self.parse_assignment_expression()?;
exprs.push(expr);
}
// If there's only one expression, return it directly
// Otherwise, create a sequence expression
if exprs.len() == 1 {
Ok(exprs.remove(0))
} else {
let span = exprs
.first()
.unwrap()
.span()
.merge_with(exprs.last().unwrap().span());
Ok(ast::Expr::Seq(ast::SeqExpr {
span,
exprs: exprs.into_iter().map(Box::new).collect(),
}))
}
}
/// Parse an assignment expression
pub(crate) fn parse_assignment_expression(&mut self) -> Result<ast::Expr> {
// First check for arrow function with parenthesized parameters
if self.is_token_type(TokenType::LParen) {
let start = self.lexer.get_pos();
let lparen_token = self.cur_token;
// Try to parse as arrow function parameters
let mut error_occurred = false;
let mut params = Vec::new();
self.next_token(); // Skip '('
// Parse parameters
if !self.is_token_type(TokenType::RParen) {
loop {
// Check for rest parameter
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Try to parse as a pattern
match self.parse_binding_pattern() {
Ok(pattern) => {
if is_rest {
params.push(ast::Pat::Rest(ast::RestPat {
span: pattern.span(),
arg: Box::new(pattern),
type_ann: None,
}));
// Rest parameter must be the last parameter
if !self.is_token_type(TokenType::RParen) {
error_occurred = true;
break;
}
} else {
params.push(pattern);
}
}
Err(_) => {
error_occurred = true;
break;
}
}
// Check for comma or end of parameters
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
}
// If no error occurred and the next token is '=>', parse as arrow function
if !error_occurred && self.is_token_type(TokenType::RParen) {
self.next_token(); // Skip ')'
if self.is_token_type(TokenType::Arrow) {
return self.parse_arrow_function_expression(false, params);
}
}
// Not an arrow function, reset and continue as normal assignment
self.lexer.reset_pos(start);
self.cur_token = lparen_token;
self.next_token(); // Re-consume the token
}
// Check for async arrow function
if self.is_token_type(TokenType::Async) && !self.cur_token.had_line_break {
let start = self.lexer.get_pos();
let async_token = self.cur_token;
self.next_token(); // Skip 'async'
// If the next token is '(', try to parse as arrow function parameters
if self.is_token_type(TokenType::LParen) {
let mut error_occurred = false;
let mut params = Vec::new();
self.next_token(); // Skip '('
// Parse parameters
if !self.is_token_type(TokenType::RParen) {
loop {
// Check for rest parameter
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Try to parse as a pattern
match self.parse_binding_pattern() {
Ok(pattern) => {
if is_rest {
params.push(ast::Pat::Rest(ast::RestPat {
span: pattern.span(),
arg: Box::new(pattern),
type_ann: None,
}));
// Rest parameter must be the last parameter
if !self.is_token_type(TokenType::RParen) {
error_occurred = true;
break;
}
} else {
params.push(pattern);
}
}
Err(_) => {
error_occurred = true;
break;
}
}
// Check for comma or end of parameters
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
}
// If no error occurred and the next token is '=>', parse as async arrow
// function
if !error_occurred && self.is_token_type(TokenType::RParen) {
self.next_token(); // Skip ')'
if self.is_token_type(TokenType::Arrow) {
return self.parse_arrow_function_expression(true, params);
}
}
}
// Check for async arrow function with single parameter
else if self.is_token_identifier() {
let ident = self.parse_identifier_name()?;
if self.is_token_type(TokenType::Arrow) {
// Single parameter async arrow function
let params = vec![ast::Pat::Ident(ast::BindingIdent {
id: ident,
type_ann: None,
})];
return self.parse_arrow_function_expression(true, params);
}
}
// Not an async arrow function, reset and continue as normal assignment
self.lexer.reset_pos(start);
self.cur_token = async_token;
self.next_token(); // Re-consume the token
}
// Check for single-parameter arrow function
if self.is_token_identifier() && self.peek_token().token_type == TokenType::Arrow {
let ident = self.parse_identifier_name()?;
// Single parameter arrow function
let params = vec![ast::Pat::Ident(ast::BindingIdent {
id: ident,
type_ann: None,
})];
return self.parse_arrow_function_expression(false, params);
}
// Parse conditional expression
let expr = self.parse_conditional_expression()?;
// Check for assignment operators
if self.is_token_type(TokenType::Assign)
|| self.is_token_type(TokenType::AddAssign)
|| self.is_token_type(TokenType::SubAssign)
|| self.is_token_type(TokenType::MulAssign)
|| self.is_token_type(TokenType::DivAssign)
|| self.is_token_type(TokenType::ModAssign)
|| self.is_token_type(TokenType::ExpAssign)
|| self.is_token_type(TokenType::BitAndAssign)
|| self.is_token_type(TokenType::BitOrAssign)
|| self.is_token_type(TokenType::BitXorAssign)
|| self.is_token_type(TokenType::LShiftAssign)
|| self.is_token_type(TokenType::RShiftAssign)
|| self.is_token_type(TokenType::ZeroFillRShiftAssign)
|| self.is_token_type(TokenType::NullishAssign)
|| self.is_token_type(TokenType::AndAssign)
|| self.is_token_type(TokenType::OrAssign)
{
// Assignment expression
let op = match self.cur_token.token_type {
TokenType::Assign => ast::AssignOp::Assign,
TokenType::AddAssign => ast::AssignOp::AddAssign,
TokenType::SubAssign => ast::AssignOp::SubAssign,
TokenType::MulAssign => ast::AssignOp::MulAssign,
TokenType::DivAssign => ast::AssignOp::DivAssign,
TokenType::ModAssign => ast::AssignOp::ModAssign,
TokenType::ExpAssign => ast::AssignOp::ExpAssign,
TokenType::BitAndAssign => ast::AssignOp::BitAndAssign,
TokenType::BitOrAssign => ast::AssignOp::BitOrAssign,
TokenType::BitXorAssign => ast::AssignOp::BitXorAssign,
TokenType::LShiftAssign => ast::AssignOp::LShiftAssign,
TokenType::RShiftAssign => ast::AssignOp::RShiftAssign,
TokenType::ZeroFillRShiftAssign => ast::AssignOp::ZeroFillRShiftAssign,
TokenType::NullishAssign => ast::AssignOp::NullishAssign,
TokenType::AndAssign => ast::AssignOp::AndAssign,
TokenType::OrAssign => ast::AssignOp::OrAssign,
_ => unreachable!("Not an assignment operator"),
};
self.next_token(); // Skip operator
// Convert expression to pattern if possible
let left = match expr.as_pat() {
Ok(pat) => pat,
Err(_) => {
return Err(self.error(ErrorKind::General {
message: "Invalid left-hand side in assignment".into(),
}));
}
};
// Parse the right-hand side
let right = self.parse_assignment_expression()?;
// Create the assignment expression
let span = left.span().merge_with(right.span());
return Ok(ast::Expr::Assign(ast::AssignExpr {
span,
op,
left,
right: Box::new(right),
}));
}
// Not an assignment, return the conditional expression
Ok(expr)
}
/// Parse a conditional expression: test ? consequent : alternate
pub(crate) fn parse_conditional_expression(&mut self) -> Result<ast::Expr> {
// Parse binary expression first
let expr = self.parse_binary_expression(0)?;
// Check for conditional operator
if self.is_token_type(TokenType::Question) {
let test_span = expr.span();
self.next_token(); // Skip '?'
// Parse consequent expression
let consequent = self.parse_assignment_expression()?;
self.expect(TokenType::Colon)?; // Expect ':'
// Parse alternate expression
let alternate = self.parse_assignment_expression()?;
// Create the conditional expression
let span = test_span.merge_with(alternate.span());
Ok(ast::Expr::Cond(ast::CondExpr {
span,
test: Box::new(expr),
cons: Box::new(consequent),
alt: Box::new(alternate),
}))
} else {
// Not a conditional expression, return the binary expression
Ok(expr)
}
}
/// Parse a sequence expression: expr1, expr2, expr3
pub(crate) fn parse_sequence_expression(&mut self) -> Result<ast::Expr> {
// Start with an assignment expression
let mut expr = self.parse_assignment_expression()?;
// Check for comma operator (sequence expression)
if self.is_token_type(TokenType::Comma) {
let start_span = expr.span();
let mut exprs = vec![expr];
while self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Parse the next expression
let expr = self.parse_assignment_expression()?;
exprs.push(expr);
}
// Create the sequence expression
let end_span = exprs.last().unwrap().span();
expr = ast::Expr::Seq(ast::SeqExpr {
span: start_span.merge_with(end_span),
exprs: exprs.into_iter().map(Box::new).collect(),
});
}
Ok(expr)
}
/// Parse a yield expression: yield [expr]
pub(crate) fn parse_yield_expression(&mut self) -> Result<ast::Expr> {
// Only allowed in generator functions
if !self.in_generator {
return Err(self.error(ErrorKind::General {
message: "'yield' is only allowed in generator functions".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Yield)?; // Expect 'yield'
// Check for yield delegate (yield*)
let delegate = if self.is_token_type(TokenType::Mul) {
self.next_token(); // Skip '*'
true
} else {
false
};
// Parse argument if needed
let arg = if !self.can_insert_semicolon()
&& !self.is_token_type(TokenType::RBrace)
&& !self.is_token_type(TokenType::RParen)
&& !self.is_token_type(TokenType::RBracket)
&& !self.is_token_type(TokenType::Colon)
&& !self.is_token_type(TokenType::Comma)
{
Some(Box::new(self.parse_assignment_expression()?))
} else {
None
};
// Create the yield expression
let span = start_span.merge_with(if let Some(ref arg) = arg {
arg.span()
} else {
start_span
});
Ok(ast::Expr::Yield(ast::YieldExpr {
span,
arg,
delegate,
}))
}
/// Parse an arrow function expression: (params) => body
pub(crate) fn parse_arrow_function_expression(
&mut self,
is_async: bool,
params: Vec<ast::Pat>,
) -> Result<ast::Expr> {
self.expect(TokenType::Arrow)?; // Expect '=>'
// Remember we're in a function
let prev_in_function = self.in_function;
self.in_function = true;
// Remember async state
let prev_in_async = self.in_async;
self.in_async = is_async;
// Create a new scope for the arrow function
self.enter_scope(super::ScopeKind::Function);
// Parse the function body
let (body, is_expression) = if self.is_token_type(TokenType::LBrace) {
// Block body: () => { statements }
let body_block = self.parse_block_stmt()?;
(ast::BlockStmtOrExpr::BlockStmt(body_block), false)
} else {
// Expression body: () => expression
let expr = self.parse_assignment_expression()?;
(ast::BlockStmtOrExpr::Expr(Box::new(expr)), true)
};
// Exit the function scope
self.exit_scope();
// Restore previous function state
self.in_function = prev_in_function;
self.in_async = prev_in_async;
// Create the arrow function expression
let start_span = params
.first()
.map(|p| p.span())
.unwrap_or_else(|| self.prev_token.span);
let end_span = match &body {
ast::BlockStmtOrExpr::BlockStmt(block) => block.span,
ast::BlockStmtOrExpr::Expr(expr) => expr.span(),
};
Ok(ast::Expr::Arrow(ast::ArrowExpr {
span: start_span.merge_with(end_span),
params,
body,
is_async,
is_generator: false,
type_params: None,
return_type: None,
ctxt: Default::default(),
}))
}
/// Parse a JSX expression (stub implementation)
pub(crate) fn parse_jsx_expression(&mut self) -> Result<ast::Expr> {
// This is a stub implementation, actual JSX parsing would be more complex
if !self.syntax.jsx {
return Err(self.error(ErrorKind::General {
message: "JSX syntax is not enabled".into(),
}));
}
Err(self.error(ErrorKind::General {
message: "JSX parsing is not fully implemented".into(),
}))
}
/// Parse a TypeScript as expression: expr as Type
pub(crate) fn parse_ts_as_expression(&mut self, expr: ast::Expr) -> Result<ast::Expr> {
if !self.syntax.typescript {
return Err(self.error(ErrorKind::General {
message: "TypeScript syntax is not enabled".into(),
}));
}
// Expect 'as' keyword
if !self.is_token_identifier_eq("as") {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("'as'"),
got: format!("{}", self.cur_token.token_type),
}));
}
self.next_token(); // Skip 'as'
// Parse the type
let type_ann = self.parse_ts_type()?;
// Create the as expression
let span = expr.span().merge_with(type_ann.span());
Ok(ast::Expr::TsAs(ast::TsAsExpr {
span,
expr: Box::new(expr),
type_ann: Box::new(type_ann),
}))
}
/// Parse a TypeScript non-null expression: expr!
pub(crate) fn parse_ts_non_null_expression(&mut self, expr: ast::Expr) -> Result<ast::Expr> {
if !self.syntax.typescript {
return Err(self.error(ErrorKind::General {
message: "TypeScript syntax is not enabled".into(),
}));
}
self.expect(TokenType::Bang)?; // Expect '!'
// Create the non-null expression
let span = expr.span().merge_with(self.prev_token.span);
Ok(ast::Expr::TsNonNull(ast::TsNonNullExpr {
span,
expr: Box::new(expr),
}))
}
/// Parse a TypeScript type assertion: <Type>expr
pub(crate) fn parse_ts_type_assertion(&mut self) -> Result<ast::Expr> {
if !self.syntax.typescript {
return Err(self.error(ErrorKind::General {
message: "TypeScript syntax is not enabled".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Lt)?; // Expect '<'
// Parse the type
let type_ann = self.parse_ts_type()?;
self.expect(TokenType::Gt)?; // Expect '>'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the type assertion
let span = start_span.merge_with(expr.span());
Ok(ast::Expr::TsTypeAssertion(ast::TsTypeAssertion {
span,
expr: Box::new(expr),
type_ann: Box::new(type_ann),
}))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/object.rs | Rust | //! Object expression parser implementation
//!
//! This module provides the implementation for parsing object expressions,
//! which are enclosed by curly braces and can contain multiple properties.
use swc_common::{Span, Spanned};
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
impl<'a> Parser<'a> {
/// Parse an object expression: { key: value, method() {}, ...spread }
pub(crate) fn parse_object_expression(&mut self) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBrace)?; // Expect '{'
let mut properties = Vec::new();
// Parse the properties
while !self.is_token_type(TokenType::RBrace) {
// Parse the property
let prop = self.parse_object_property()?;
properties.push(prop);
// Check for comma or end of properties
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RBrace) {
break;
}
} else {
break;
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBrace)?; // Expect '}'
// Create the object expression
Ok(ast::Expr::Object(ast::ObjectLit {
span: start_span.merge_with(end_span),
props: properties,
}))
}
/// Parse an object property
pub(crate) fn parse_object_property(&mut self) -> Result<ast::PropOrSpread> {
// Check for spread element
if self.is_token_type(TokenType::Ellipsis) {
let start_span = self.cur_token.span;
self.next_token(); // Skip '...'
// Parse the spread argument
let arg = self.parse_assignment_expression()?;
// Create the spread element
return Ok(ast::PropOrSpread::Spread(ast::SpreadElement {
dot3_token: start_span.lo,
expr: Box::new(arg),
}));
}
// Check for async method
let is_async = if self.is_token_type(TokenType::Async) && !self.peek_token().had_line_break
{
// Look ahead to determine if this is an async method
match self.peek_token().token_type {
TokenType::LBracket | TokenType::Ident | TokenType::Str | TokenType::Num => {
self.next_token(); // Skip 'async'
true
}
_ => false,
}
} else {
false
};
// Check for generator method
let is_generator = if self.is_token_type(TokenType::Mul) {
self.next_token(); // Skip '*'
true
} else {
false
};
// Check for getter or setter
let method_kind = if self.is_token_identifier_eq("get") && !self.peek_token().had_line_break
{
// Look ahead to determine if this is a getter
match self.peek_token().token_type {
TokenType::LBracket | TokenType::Ident | TokenType::Str | TokenType::Num => {
self.next_token(); // Skip 'get'
ast::MethodKind::Getter
}
_ => ast::MethodKind::Method,
}
} else if self.is_token_identifier_eq("set") && !self.peek_token().had_line_break {
// Look ahead to determine if this is a setter
match self.peek_token().token_type {
TokenType::LBracket | TokenType::Ident | TokenType::Str | TokenType::Num => {
self.next_token(); // Skip 'set'
ast::MethodKind::Setter
}
_ => ast::MethodKind::Method,
}
} else {
ast::MethodKind::Method
};
// Parse the property key
let key_span = self.cur_token.span;
let mut is_computed = false;
let key = match self.cur_token.token_type {
// Identifier property
TokenType::Ident => {
let id = self.parse_identifier_name()?;
// Check for shorthand property: { key } instead of { key: key }
if !is_async
&& !is_generator
&& method_kind == ast::MethodKind::Method
&& !self.is_token_type(TokenType::Colon)
&& !self.is_token_type(TokenType::LParen)
{
// Create the shorthand property
return Ok(ast::PropOrSpread::Prop(Box::new(ast::Prop::Shorthand(
ast::Ident {
span: id.span,
sym: id.sym,
optional: false,
ctxt: Default::default(),
},
))));
}
ast::PropName::Ident(id)
}
// String property
TokenType::Str => {
let str_lit = match &self.cur_token.value {
TokenValue::String(s) => ast::Str {
span: self.cur_token.span,
value: s.clone().into(),
raw: None,
},
_ => unreachable!("Expected string literal"),
};
self.next_token(); // Skip string
ast::PropName::Str(str_lit)
}
// Numeric property
TokenType::Num => {
let num_lit = match &self.cur_token.value {
TokenValue::Number(n) => ast::Number {
span: self.cur_token.span,
value: *n,
raw: None,
},
_ => unreachable!("Expected number literal"),
};
self.next_token(); // Skip number
ast::PropName::Num(num_lit)
}
// Computed property: [expr]
TokenType::LBracket => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '['
// Parse the computed key expression
let expr = self.parse_assignment_expression()?;
let end_span = self.cur_token.span;
self.expect(TokenType::RBracket)?; // Expect ']'
is_computed = true;
ast::PropName::Computed(ast::ComputedPropName {
span: start_span.merge_with(end_span),
expr: Box::new(expr),
})
}
// Invalid property key
_ => {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("property name"),
got: format!("{}", self.cur_token.token_type),
}));
}
};
// Check for method definition: { method() {} }
if self.is_token_type(TokenType::LParen) {
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_generator = self.in_generator;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_generator = is_generator;
self.in_async = is_async;
// Create a new scope for the method
self.enter_scope(super::super::ScopeKind::Function);
self.next_token(); // Skip '('
// Parse the parameters
let mut params = Vec::new();
if !self.is_token_type(TokenType::RParen) {
loop {
// Check for rest parameter
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the parameter pattern
let pat = self.parse_binding_pattern()?;
// Create the parameter
let param = if is_rest {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat: ast::Pat::Rest(ast::RestPat {
span: pat.span(),
arg: Box::new(pat),
type_ann: None,
}),
}
} else {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat,
}
};
params.push(param);
// Rest parameter must be the last parameter
if is_rest {
if !self.is_token_type(TokenType::RParen) {
return Err(self.error(ErrorKind::General {
message: "Rest parameter must be the last parameter".into(),
}));
}
break;
}
// Check for comma or end of parameters
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
}
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the method body
self.expect(TokenType::LBrace)?; // Expect '{'
let body = self.parse_block_stmt()?;
// Exit the method scope
self.exit_scope();
// Restore previous function state
self.in_function = prev_in_function;
self.in_generator = prev_in_generator;
self.in_async = prev_in_async;
// Create the method definition
let function = ast::Function {
params,
decorators: Vec::new(),
span: key_span.merge_with(body.span),
body: Some(body),
is_generator,
is_async,
type_params: None,
return_type: None,
ctxt: Default::default(),
};
return Ok(ast::PropOrSpread::Prop(Box::new(ast::Prop::Method(
ast::MethodProp {
key,
function,
kind: method_kind,
},
))));
}
// Regular property: { key: value }
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the property value
let value = self.parse_assignment_expression()?;
// Create the property
Ok(ast::PropOrSpread::Prop(Box::new(ast::Prop::KeyValue(
ast::KeyValueProp {
key,
value: Box::new(value),
},
))))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/primary.rs | Rust | //! Primary expression parser implementation
//!
//! This module handles parsing of the most basic expressions:
//! - Literals (string, number, boolean, null, regex)
//! - Identifiers
//! - This expressions
//! - Parenthesized expressions
//! - Template literals
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{ErrorKind, Result},
token::{TokenType, TokenValue},
};
impl<'a> Parser<'a> {
/// Parse a primary expression (literal, identifier, this, parenthesized,
/// etc.)
pub(crate) fn parse_primary_expression(&mut self) -> Result<ast::Expr> {
match self.cur_token.token_type {
// Literals
TokenType::Str
| TokenType::Num
| TokenType::BigInt
| TokenType::True
| TokenType::False
| TokenType::Null
| TokenType::RegExp => self.parse_literal(),
// Identifiers
TokenType::Ident => self.parse_identifier_expression(),
// This expression
TokenType::This => self.parse_this_expression(),
// Parenthesized expression
TokenType::LParen => self.parse_parenthesized_expression(),
// Array literal
TokenType::LBracket => Ok(ast::Expr::Array(self.parse_array_expression()?)),
// Object literal
TokenType::LBrace => Ok(ast::Expr::Object(self.parse_object_expression()?)),
// Function expression
TokenType::Function => Ok(ast::Expr::Fn(self.parse_function_expression(false, false)?)),
// Template literal
TokenType::Template => self.parse_template_literal(None),
// New expression or new.target
TokenType::New => self.parse_new_expression(),
// Async function or async arrow function
TokenType::Async if !self.cur_token.had_line_break && self.is_async_function() => {
self.next_token(); // Skip 'async'
// Check if it's an async function expression
if self.is_token_type(TokenType::Function) {
Ok(ast::Expr::Fn(self.parse_function_expression(true, false)?))
} else {
// It's an async arrow function
Ok(ast::Expr::Arrow(
self.parse_arrow_function_expression(true)?,
))
}
}
// Class expression
TokenType::Class => self.parse_class_expression(),
// JSX fragment or element (if JSX is enabled)
TokenType::JSXFragment if self.syntax.jsx => self.parse_jsx_fragment(),
TokenType::JSXTagStart if self.syntax.jsx => self.parse_jsx_element(),
// Super expression
TokenType::Super => self.parse_super_expression(),
// Unexpected token
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("expression"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
/// Parse a literal expression (string, number, boolean, null, regex)
pub(crate) fn parse_literal(&mut self) -> Result<ast::Expr> {
let span = self.cur_token.span;
let expr = match self.cur_token.token_type {
// String literal
TokenType::Str => {
let (value, raw) = match &self.cur_token.value {
TokenValue::Str { value, raw } => (value.clone(), raw.clone()),
_ => unreachable!(),
};
ast::Expr::Lit(ast::Lit::Str(ast::Str {
span,
value,
raw: Some(raw),
}))
}
// Number literal
TokenType::Num => {
let (value, raw) = match &self.cur_token.value {
TokenValue::Num { value, raw } => (*value, raw.clone()),
_ => unreachable!(),
};
ast::Expr::Lit(ast::Lit::Num(ast::Number {
span,
value,
raw: Some(raw),
}))
}
// BigInt literal
TokenType::BigInt => {
let (value, raw) = match &self.cur_token.value {
TokenValue::BigInt { value, raw } => (value.clone(), raw.clone()),
_ => unreachable!(),
};
ast::Expr::Lit(ast::Lit::BigInt(ast::BigInt {
span,
value,
raw: Some(raw),
}))
}
// Boolean literal
TokenType::True => ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: true })),
TokenType::False => ast::Expr::Lit(ast::Lit::Bool(ast::Bool { span, value: false })),
// Null literal
TokenType::Null => ast::Expr::Lit(ast::Lit::Null(ast::Null { span })),
// RegExp literal
TokenType::RegExp => {
let (pattern, flags) = match &self.cur_token.value {
TokenValue::RegExp { pattern, flags } => (pattern.clone(), flags.clone()),
_ => unreachable!(),
};
ast::Expr::Lit(ast::Lit::Regex(ast::Regex {
span,
exp: pattern,
flags,
}))
}
// Unexpected token
_ => unreachable!(),
};
self.next_token(); // Skip the literal
Ok(expr)
}
/// Parse an identifier expression
pub(crate) fn parse_identifier_expression(&mut self) -> Result<ast::Expr> {
let ident = self.parse_identifier_name()?;
Ok(ast::Expr::Ident(ident))
}
/// Parse a this expression
pub(crate) fn parse_this_expression(&mut self) -> Result<ast::Expr> {
let span = self.cur_token.span;
self.next_token(); // Skip 'this'
Ok(ast::Expr::This(ast::ThisExpr { span }))
}
/// Parse a parenthesized expression
pub(crate) fn parse_parenthesized_expression(&mut self) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
self.next_token(); // Skip '('
// Check for empty parentheses (should be an error)
if self.is_token_type(TokenType::RParen) {
return Err(self.error(ErrorKind::General {
message: "Empty parentheses are not allowed".into(),
}));
}
// Parse the expression inside the parentheses
let expr = self.parse_expression()?;
let end_span = self.cur_token.span;
self.expect(TokenType::RParen)?; // Expect ')'
// Wrap the expression in a ParenExpr node
Ok(ast::Expr::Paren(ast::ParenExpr {
span: start_span.merge_with(end_span),
expr: Box::new(expr),
}))
}
/// Parse a template literal
pub(crate) fn parse_template_literal(
&mut self,
tag: Option<Box<ast::Expr>>,
) -> Result<ast::Expr> {
let start_span = self.cur_token.span;
let is_tagged = tag.is_some();
// Process the template parts
let mut quasis = Vec::new();
let mut expressions = Vec::new();
// If it's a no-substitution template (just a single quasi)
if !self.cur_token.template_has_substitutions() {
// Extract the raw and cooked values
let (raw, cooked) = match &self.cur_token.value {
TokenValue::Template { raw, cooked } => (raw.clone(), cooked.clone()),
_ => unreachable!(),
};
// Create the template element
quasis.push(ast::TplElement {
span: self.cur_token.span,
tail: true,
cooked: Some(cooked),
raw,
});
self.next_token(); // Skip the template
} else {
// Template with substitutions
while !self.is_token_type(TokenType::EOF) {
// Extract the raw and cooked values
let (raw, cooked) = match &self.cur_token.value {
TokenValue::Template { raw, cooked } => (raw.clone(), cooked.clone()),
_ => unreachable!(),
};
// Is this the tail element?
let is_tail = !self.cur_token.template_has_substitutions();
// Create the template element
quasis.push(ast::TplElement {
span: self.cur_token.span,
tail: is_tail,
cooked: Some(cooked),
raw,
});
self.next_token(); // Skip the template part
// If it's the tail, we're done
if is_tail {
break;
}
// Parse the expression inside the template
let expr = self.parse_expression()?;
expressions.push(Box::new(expr));
// Expect the closing brace
if !self.is_token_type(TokenType::Template)
&& !self.is_token_type(TokenType::TemplateMiddle)
{
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("template continuation"),
got: format!("{}", self.cur_token.token_type),
}));
}
}
}
// Create the template literal
if let Some(tag) = tag {
// Tagged template literal
Ok(ast::Expr::TaggedTpl(ast::TaggedTpl {
span: start_span.merge_with(quasis.last().unwrap().span),
tag,
type_params: None,
tpl: ast::Tpl {
span: start_span.merge_with(quasis.last().unwrap().span),
exprs: expressions,
quasis,
},
}))
} else {
// Regular template literal
Ok(ast::Expr::Tpl(ast::Tpl {
span: start_span.merge_with(quasis.last().unwrap().span),
exprs: expressions,
quasis,
}))
}
}
}
// Additional methods that would be implemented elsewhere
impl<'a> Parser<'a> {
// These methods will be implemented in other files
pub(crate) fn parse_new_expression(&mut self) -> Result<ast::Expr> {
// Will be implemented in call.rs
unimplemented!()
}
fn is_async_function(&self) -> bool {
// Helper method to check if it's an async function expression
// Will be implemented in the parser module
unimplemented!()
}
pub(crate) fn parse_class_expression(&mut self) -> Result<ast::Expr> {
// Will be implemented in class.rs
unimplemented!()
}
pub(crate) fn parse_jsx_fragment(&mut self) -> Result<ast::Expr> {
// Will be implemented in jsx.rs
unimplemented!()
}
pub(crate) fn parse_jsx_element(&mut self) -> Result<ast::Expr> {
// Will be implemented in jsx.rs
unimplemented!()
}
pub(crate) fn parse_super_expression(&mut self) -> Result<ast::Expr> {
// Will be implemented in call.rs or member.rs
unimplemented!()
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/expr/unary.rs | Rust | //! Unary expression parser implementation
//!
//! This module provides the implementation for parsing unary expressions,
//! including prefix operators like !, -, +, typeof, void, delete,
//! and prefix/postfix increment and decrement operators (++, --).
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{ErrorKind, Result},
token::TokenType,
};
impl<'a> Parser<'a> {
/// Parse a unary expression: !expr, -expr, +expr, typeof expr, etc.
pub(crate) fn parse_unary_expression(&mut self) -> Result<ast::Expr> {
// Check for unary operators
match self.cur_token.token_type {
// Logical not: !expr
TokenType::Bang => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '!'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Bang,
arg: Box::new(expr),
}))
}
// Unary minus: -expr
TokenType::Minus => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '-'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Minus,
arg: Box::new(expr),
}))
}
// Unary plus: +expr
TokenType::Plus => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '+'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Plus,
arg: Box::new(expr),
}))
}
// Bitwise not: ~expr
TokenType::Tilde => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '~'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Tilde,
arg: Box::new(expr),
}))
}
// Typeof operator: typeof expr
TokenType::Typeof => {
let start_span = self.cur_token.span;
self.next_token(); // Skip 'typeof'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::TypeOf,
arg: Box::new(expr),
}))
}
// Void operator: void expr
TokenType::Void => {
let start_span = self.cur_token.span;
self.next_token(); // Skip 'void'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Void,
arg: Box::new(expr),
}))
}
// Delete operator: delete expr
TokenType::Delete => {
let start_span = self.cur_token.span;
self.next_token(); // Skip 'delete'
// Delete operator is not allowed in strict mode for identifiers
if self.strict_mode && self.is_token_identifier() {
return Err(self.error(ErrorKind::General {
message: "Delete of an unqualified identifier in strict mode.".into(),
}));
}
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the unary expression
Ok(ast::Expr::Unary(ast::UnaryExpr {
span: start_span.merge_with(expr.span()),
op: ast::UnaryOp::Delete,
arg: Box::new(expr),
}))
}
// Update expressions: ++expr, --expr
TokenType::PlusPlus | TokenType::MinusMinus => self.parse_update_expression(),
// Await expression: await expr
TokenType::Await => self.parse_await_expression(),
// Not a unary expression
_ => {
// Try to parse as an update expression or a primary expression
self.parse_left_hand_side_expression()
}
}
}
/// Parse an update expression: ++expr, --expr, expr++, expr--
pub(crate) fn parse_update_expression(&mut self) -> Result<ast::Expr> {
// Check for prefix increment/decrement
match self.cur_token.token_type {
// Prefix increment: ++expr
TokenType::PlusPlus => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '++'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the update expression
Ok(ast::Expr::Update(ast::UpdateExpr {
span: start_span.merge_with(expr.span()),
op: ast::UpdateOp::PlusPlus,
prefix: true,
arg: Box::new(expr),
}))
}
// Prefix decrement: --expr
TokenType::MinusMinus => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '--'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the update expression
Ok(ast::Expr::Update(ast::UpdateExpr {
span: start_span.merge_with(expr.span()),
op: ast::UpdateOp::MinusMinus,
prefix: true,
arg: Box::new(expr),
}))
}
// Not a prefix update expression
_ => {
// Parse as a left-hand side expression
let expr = self.parse_left_hand_side_expression()?;
// Check for postfix increment/decrement
// No automatic semicolon insertion before ++ or --
if !self.had_line_break_before_current() {
match self.cur_token.token_type {
// Postfix increment: expr++
TokenType::PlusPlus => {
let end_span = self.cur_token.span;
self.next_token(); // Skip '++'
// Create the update expression
return Ok(ast::Expr::Update(ast::UpdateExpr {
span: expr.span().merge_with(end_span),
op: ast::UpdateOp::PlusPlus,
prefix: false,
arg: Box::new(expr),
}));
}
// Postfix decrement: expr--
TokenType::MinusMinus => {
let end_span = self.cur_token.span;
self.next_token(); // Skip '--'
// Create the update expression
return Ok(ast::Expr::Update(ast::UpdateExpr {
span: expr.span().merge_with(end_span),
op: ast::UpdateOp::MinusMinus,
prefix: false,
arg: Box::new(expr),
}));
}
// Not a postfix update expression
_ => {}
}
}
// Return the expression as is
Ok(expr)
}
}
}
/// Parse an await expression: await expr
pub(crate) fn parse_await_expression(&mut self) -> Result<ast::Expr> {
// Await is only allowed in async functions
if !self.in_async {
return Err(self.error(ErrorKind::General {
message: "'await' is only allowed within async functions and top level modules"
.into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Await)?; // Expect 'await'
// Parse the expression
let expr = self.parse_unary_expression()?;
// Create the await expression
Ok(ast::Expr::Await(ast::AwaitExpr {
span: start_span.merge_with(expr.span()),
arg: Box::new(expr),
}))
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/mod.rs | Rust | //! ECMAScript/TypeScript parser implementation
//!
//! This module provides the core parser implementation for ECMAScript and
//! TypeScript.
use std::collections::HashSet;
use swc_common::errors::Handler;
use swc_ecma_ast as ast;
use crate::{
error::{Error, ErrorKind, Result},
lexer::Lexer,
token::{Token, TokenType, TokenValue},
Syntax,
};
// Sub-modules
pub(crate) mod expr;
mod stmt;
/// Scope kind for keeping track of different kinds of scopes
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ScopeKind {
/// Global scope
Global,
/// Module scope
Module,
/// Script scope
Script,
/// Function scope
Function,
/// Class scope
Class,
/// Block scope
Block,
/// Catch clause scope
Catch,
/// Loop scope (for for-in/of/loop)
For,
/// Switch scope
Switch,
}
/// Scope for tracking variables, labels, etc.
#[derive(Debug, Clone)]
pub(crate) struct Scope {
/// Kind of scope
kind: ScopeKind,
/// Set of labels declared in this scope
labels: HashSet<String>,
/// Parent scope
parent: Option<Box<Scope>>,
}
impl Scope {
/// Create a new scope
fn new(kind: ScopeKind, parent: Option<Box<Scope>>) -> Self {
Self {
kind,
labels: HashSet::new(),
parent,
}
}
/// Check if a label exists in this scope or any parent scope
fn has_label(&self, label: &str) -> bool {
if self.labels.contains(label) {
return true;
}
if let Some(ref parent) = self.parent {
return parent.has_label(label);
}
false
}
/// Add a label to this scope
fn add_label(&mut self, label: String) {
self.labels.insert(label);
}
}
/// ECMAScript/TypeScript parser
pub struct Parser<'a> {
/// Lexer for tokenizing the input
lexer: Lexer<'a>,
/// Error handler
handler: &'a Handler,
/// Current token
cur_token: Token,
/// Previous token
prev_token: Token,
/// Syntax configuration
syntax: Syntax,
/// Current scope
scope: Scope,
/// Strict mode flag
pub(crate) strict_mode: bool,
/// In module flag
pub(crate) in_module: bool,
/// In function flag
pub(crate) in_function: bool,
/// In async function flag
pub(crate) in_async: bool,
/// In generator function flag
pub(crate) in_generator: bool,
/// In constructor flag
pub(crate) in_constructor: bool,
/// In method flag
pub(crate) in_method: bool,
/// In loop flag
pub(crate) in_loop: bool,
/// In switch flag
pub(crate) in_switch: bool,
}
impl<'a> Parser<'a> {
/// Create a new parser
pub fn new(lexer: Lexer<'a>, handler: &'a Handler, syntax: Syntax) -> Self {
let mut parser = Self {
lexer,
handler,
cur_token: Token::default(),
prev_token: Token::default(),
syntax,
scope: Scope::new(ScopeKind::Global, None),
strict_mode: false,
in_module: false,
in_function: false,
in_async: false,
in_generator: false,
in_constructor: false,
in_method: false,
in_loop: false,
in_switch: false,
};
// Initialize the current token
parser.next_token();
parser
}
/// Advance to the next token
pub fn next_token(&mut self) {
self.prev_token = std::mem::take(&mut self.cur_token);
self.cur_token = self.lexer.next_token().unwrap_or_else(|e| {
// Report the error but continue with a dummy token
self.report_error(e);
Token::default()
});
}
/// Look ahead to the next token without consuming it
pub fn peek_token(&self) -> Token {
self.lexer.peek_token().unwrap_or_default()
}
/// Look ahead n tokens without consuming them
pub fn peek_token_n(&self, n: usize) -> Option<Token> {
self.lexer.peek_token_n(n).ok()
}
/// Create an error
pub fn error(&self, kind: ErrorKind) -> Error {
Error::new(kind, self.cur_token.span)
}
/// Report an error using the handler
pub fn report_error(&self, error: Error) {
self.handler.struct_err(&error.to_string()).emit();
}
/// Check if the current token has the given type
pub fn is_token_type(&self, token_type: TokenType) -> bool {
self.cur_token.token_type == token_type
}
/// Check if the current token is an identifier
pub fn is_token_identifier(&self) -> bool {
self.cur_token.token_type == TokenType::Ident
}
/// Check if the current token is an identifier with the given name
pub fn is_token_identifier_eq(&self, name: &str) -> bool {
if let TokenValue::Ident(ref ident) = self.cur_token.value {
ident == name
} else {
false
}
}
/// Expect the current token to have the given type and advance
pub fn expect(&mut self, token_type: TokenType) -> Result<()> {
if self.is_token_type(token_type) {
self.next_token();
Ok(())
} else {
Err(self.error(ErrorKind::UnexpectedToken {
expected: Some(format!("{}", token_type)),
got: format!("{}", self.cur_token.token_type),
}))
}
}
/// Enter a new scope
pub fn enter_scope(&mut self, kind: ScopeKind) {
let parent = Some(Box::new(std::mem::replace(
&mut self.scope,
Scope::new(kind, None),
)));
self.scope.parent = parent;
}
/// Exit the current scope
pub fn exit_scope(&mut self) {
if let Some(parent) = std::mem::take(&mut self.scope.parent) {
self.scope = *parent;
} else {
// This should never happen if scopes are balanced
self.scope = Scope::new(ScopeKind::Global, None);
}
}
/// Add a label to the current scope
pub fn add_label(&mut self, label: String) {
self.scope.add_label(label);
}
/// Check if a label exists in the current scope chain
pub fn has_label(&self, label: &str) -> bool {
self.scope.has_label(label)
}
/// Parse an identifier name
pub fn parse_identifier_name(&mut self) -> Result<ast::Ident> {
if !self.is_token_identifier() {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier"),
got: format!("{}", self.cur_token.token_type),
}));
}
let span = self.cur_token.span;
let sym = match &self.cur_token.value {
TokenValue::Ident(name) => name.clone().into(),
_ => unreachable!("Token is not an identifier"),
};
self.next_token(); // Consume the identifier
Ok(ast::Ident {
span,
sym,
optional: false,
})
}
/// Parse an identifier reference
pub fn parse_identifier_reference(&mut self) -> Result<ast::Expr> {
let ident = self.parse_identifier_name()?;
Ok(ast::Expr::Ident(ident))
}
/// Parse a literal (string, number, boolean, null, etc.)
pub fn parse_literal(&mut self) -> Result<ast::Expr> {
let span = self.cur_token.span;
match self.cur_token.token_type {
TokenType::Str => {
let str_lit = self.parse_string_literal()?;
Ok(ast::Expr::Lit(ast::Lit::Str(str_lit)))
}
TokenType::Num => {
let num_lit = self.parse_number_literal()?;
Ok(ast::Expr::Lit(ast::Lit::Num(num_lit)))
}
TokenType::True => {
self.next_token(); // Consume 'true'
Ok(ast::Expr::Lit(ast::Lit::Bool(ast::Bool {
span,
value: true,
})))
}
TokenType::False => {
self.next_token(); // Consume 'false'
Ok(ast::Expr::Lit(ast::Lit::Bool(ast::Bool {
span,
value: false,
})))
}
TokenType::Null => {
self.next_token(); // Consume 'null'
Ok(ast::Expr::Lit(ast::Lit::Null(ast::Null { span })))
}
TokenType::BigInt => {
match &self.cur_token.value {
TokenValue::BigInt(value) => {
let value = value.clone();
self.next_token(); // Consume BigInt
Ok(ast::Expr::Lit(ast::Lit::BigInt(ast::BigInt {
span,
value,
})))
}
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("BigInt literal"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
TokenType::RegExp => {
match &self.cur_token.value {
TokenValue::RegExp { pattern, flags } => {
let pattern = pattern.clone();
let flags = flags.clone();
self.next_token(); // Consume RegExp
Ok(ast::Expr::Lit(ast::Lit::Regex(ast::Regex {
span,
exp: pattern,
flags,
})))
}
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("RegExp literal"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("literal"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
/// Parse a string literal
pub fn parse_string_literal(&mut self) -> Result<ast::Str> {
if !self.is_token_type(TokenType::Str) {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("string literal"),
got: format!("{}", self.cur_token.token_type),
}));
}
let span = self.cur_token.span;
let value = match &self.cur_token.value {
TokenValue::Str(s) => s.clone().into(),
_ => unreachable!("Token is not a string literal"),
};
self.next_token(); // Consume the string
Ok(ast::Str {
span,
value,
raw: None,
})
}
/// Parse a number literal
pub fn parse_number_literal(&mut self) -> Result<ast::Number> {
if !self.is_token_type(TokenType::Num) {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("number literal"),
got: format!("{}", self.cur_token.token_type),
}));
}
let span = self.cur_token.span;
let value = match &self.cur_token.value {
TokenValue::Num(n) => *n,
_ => unreachable!("Token is not a number literal"),
};
self.next_token(); // Consume the number
Ok(ast::Number {
span,
value,
raw: None,
})
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/stmt/block.rs | Rust | //! Block statement parser implementation
//!
//! This module provides the implementation for parsing block statements,
//! which are enclosed by curly braces and can contain multiple statements.
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{error::Result, token::TokenType};
impl<'a> Parser<'a> {
/// Parse a block statement: { stmt1; stmt2; ... }
pub(crate) fn parse_block_stmt(&mut self) -> Result<ast::BlockStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBrace)?; // Expect '{'
let mut stmts = Vec::new();
while !self.is_token_type(TokenType::RBrace) && !self.is_token_type(TokenType::EOF) {
// Parse a statement
match self.parse_statement() {
Ok(stmt) => stmts.push(stmt),
Err(err) => {
// Report the error but continue parsing
self.report_error(err);
self.error_recovery();
}
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBrace)?; // Expect '}'
// Create the block statement
Ok(ast::BlockStmt {
span: start_span.merge_with(end_span),
stmts,
..Default::default()
})
}
/// Parse a block statement with a new lexical scope
pub(crate) fn parse_block_stmt_with_scope(&mut self) -> Result<ast::BlockStmt> {
// Create a new scope for the block statement
self.enter_scope(super::super::ScopeKind::Block);
// Parse the block statement
let result = self.parse_block_stmt();
// Exit the block scope
self.exit_scope();
result
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/stmt/control.rs | Rust | //! Control flow statement parser implementation
//!
//! This module provides the implementation for parsing control flow statements,
//! including if, while, do-while, for, switch, try-catch, and jump statements.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::TokenType,
};
impl<'a> Parser<'a> {
/// Parse an if statement: if (test) consequent else alternate
pub(crate) fn parse_if_statement(&mut self) -> Result<ast::IfStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::If)?; // Expect 'if'
self.expect(TokenType::LParen)?; // Expect '('
let test = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the consequent
let consequent = self.parse_statement()?;
// Parse the alternate if present
let alternate = if self.is_token_type(TokenType::Else) {
self.next_token(); // Skip 'else'
Some(Box::new(self.parse_statement()?))
} else {
None
};
// Create the if statement
let end_span = match &alternate {
Some(alt) => alt.span(),
None => consequent.span(),
};
Ok(ast::IfStmt {
span: start_span.merge_with(end_span),
test: Box::new(test),
cons: Box::new(consequent),
alt: alternate,
})
}
/// Parse a switch statement: switch (discriminant) { case1: ... case2: ...
/// }
pub(crate) fn parse_switch_statement(&mut self) -> Result<ast::SwitchStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::Switch)?; // Expect 'switch'
self.expect(TokenType::LParen)?; // Expect '('
let discriminant = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
self.expect(TokenType::LBrace)?; // Expect '{'
// Parse the cases
let mut cases = Vec::new();
let mut default_case = None;
while !self.is_token_type(TokenType::RBrace) && !self.is_token_type(TokenType::EOF) {
// Parse a case
if self.is_token_type(TokenType::Case) {
let case_span = self.cur_token.span;
self.next_token(); // Skip 'case'
// Parse the test expression
let test = self.parse_expression()?;
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the consequent statements
let mut consequent = Vec::new();
while !self.is_token_type(TokenType::Case)
&& !self.is_token_type(TokenType::Default)
&& !self.is_token_type(TokenType::RBrace)
&& !self.is_token_type(TokenType::EOF)
{
// Parse a statement
let stmt = self.parse_statement()?;
consequent.push(stmt);
}
// Create the case
cases.push(ast::SwitchCase {
span: case_span.merge_with(if let Some(last) = consequent.last() {
last.span()
} else {
self.prev_token.span
}),
test: Some(Box::new(test)),
cons: consequent,
});
}
// Parse a default case
else if self.is_token_type(TokenType::Default) {
let default_span = self.cur_token.span;
self.next_token(); // Skip 'default'
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the consequent statements
let mut consequent = Vec::new();
while !self.is_token_type(TokenType::Case)
&& !self.is_token_type(TokenType::Default)
&& !self.is_token_type(TokenType::RBrace)
&& !self.is_token_type(TokenType::EOF)
{
// Parse a statement
let stmt = self.parse_statement()?;
consequent.push(stmt);
}
// Check if there's already a default case
if default_case.is_some() {
return Err(self.error(ErrorKind::General {
message: "Multiple default clauses in switch statement".into(),
}));
}
// Create the default case
default_case = Some(ast::SwitchCase {
span: default_span.merge_with(if let Some(last) = consequent.last() {
last.span()
} else {
self.prev_token.span
}),
test: None,
cons: consequent,
});
}
// Invalid case
else {
return Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("'case' or 'default'"),
got: format!("{}", self.cur_token.token_type),
}));
}
}
// If we have a default case, add it to the cases
if let Some(default) = default_case {
cases.push(default);
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBrace)?; // Expect '}'
// Create the switch statement
Ok(ast::SwitchStmt {
span: start_span.merge_with(end_span),
discriminant: Box::new(discriminant),
cases,
})
}
/// Parse a for statement: for ([init]; [test]; [update]) body
pub(crate) fn parse_for_statement(&mut self) -> Result<ast::Stmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::For)?; // Expect 'for'
// Check for 'await' keyword (for-await-of loop)
let await_token = if self.is_token_type(TokenType::Await) {
if !self.in_async {
return Err(self.error(ErrorKind::General {
message: "'for await' is only allowed within async functions and modules"
.into(),
}));
}
self.next_token(); // Skip 'await'
true
} else {
false
};
self.expect(TokenType::LParen)?; // Expect '('
// Create a new scope for the for loop
self.enter_scope(super::super::ScopeKind::Block);
// Parse the initializer
let init = if self.is_token_type(TokenType::Semi) {
// No initializer
None
} else if self.is_token_type(TokenType::Var) {
// Variable declaration initializer
self.next_token(); // Skip 'var'
let var_decl = self.parse_var_declarations()?;
Some(ast::VarDeclOrExpr::VarDecl(ast::VarDecl {
span: var_decl
.iter()
.fold(None, |acc, decl| match acc {
Some(span) => Some(span.merge_with(decl.span())),
None => Some(decl.span()),
})
.unwrap_or_else(|| Span::dummy()),
kind: ast::VarDeclKind::Var,
decls: var_decl,
declare: false,
}))
} else if self.is_token_type(TokenType::Let) {
// Let declaration initializer
self.next_token(); // Skip 'let'
let let_decl = self.parse_var_declarations()?;
Some(ast::VarDeclOrExpr::VarDecl(ast::VarDecl {
span: let_decl
.iter()
.fold(None, |acc, decl| match acc {
Some(span) => Some(span.merge_with(decl.span())),
None => Some(decl.span()),
})
.unwrap_or_else(|| Span::dummy()),
kind: ast::VarDeclKind::Let,
decls: let_decl,
declare: false,
}))
} else if self.is_token_type(TokenType::Const) {
// Const declaration initializer
self.next_token(); // Skip 'const'
let const_decl = self.parse_var_declarations()?;
Some(ast::VarDeclOrExpr::VarDecl(ast::VarDecl {
span: const_decl
.iter()
.fold(None, |acc, decl| match acc {
Some(span) => Some(span.merge_with(decl.span())),
None => Some(decl.span()),
})
.unwrap_or_else(|| Span::dummy()),
kind: ast::VarDeclKind::Const,
decls: const_decl,
declare: false,
}))
} else {
// Expression initializer
let expr = self.parse_expression()?;
// Check for for-in or for-of loop
if self.is_token_type(TokenType::In)
|| (self.is_token_identifier_eq("of") && !await_token)
{
// Reset position and parse as a for-in or for-of loop
return self.parse_for_in_of_statement(start_span, expr, false);
} else if self.is_token_identifier_eq("of") && await_token {
// Reset position and parse as a for-await-of loop
return self.parse_for_in_of_statement(start_span, expr, true);
}
Some(ast::VarDeclOrExpr::Expr(Box::new(expr)))
};
// Check for for-in or for-of loop after variable declaration
if let Some(ast::VarDeclOrExpr::VarDecl(var_decl)) = &init {
if var_decl.decls.len() == 1 && self.is_token_type(TokenType::In) {
// For-in loop
return self.parse_for_in_of_statement_var(start_span, var_decl.clone(), false);
} else if var_decl.decls.len() == 1 && self.is_token_identifier_eq("of") {
// For-of loop
return self.parse_for_in_of_statement_var(
start_span,
var_decl.clone(),
await_token,
);
}
}
// Regular for loop
self.expect(TokenType::Semi)?; // Expect ';'
// Parse the test expression
let test = if !self.is_token_type(TokenType::Semi) {
Some(Box::new(self.parse_expression()?))
} else {
None
};
self.expect(TokenType::Semi)?; // Expect ';'
// Parse the update expression
let update = if !self.is_token_type(TokenType::RParen) {
Some(Box::new(self.parse_expression()?))
} else {
None
};
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the body
let body = self.parse_statement()?;
// Exit the for loop scope
self.exit_scope();
// Create the for statement
Ok(ast::Stmt::For(ast::ForStmt {
span: start_span.merge_with(body.span()),
init,
test,
update,
body: Box::new(body),
}))
}
/// Parse a while statement: while (test) body
pub(crate) fn parse_while_statement(&mut self) -> Result<ast::WhileStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::While)?; // Expect 'while'
self.expect(TokenType::LParen)?; // Expect '('
let test = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the body
let body = self.parse_statement()?;
// Create the while statement
Ok(ast::WhileStmt {
span: start_span.merge_with(body.span()),
test: Box::new(test),
body: Box::new(body),
})
}
/// Parse a do-while statement: do body while (test);
pub(crate) fn parse_do_while_statement(&mut self) -> Result<ast::DoWhileStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::Do)?; // Expect 'do'
// Parse the body
let body = self.parse_statement()?;
self.expect(TokenType::While)?; // Expect 'while'
self.expect(TokenType::LParen)?; // Expect '('
let test = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
self.consume_semicolon(); // Consume semicolon
// Create the do-while statement
Ok(ast::DoWhileStmt {
span: start_span.merge_with(self.prev_token.span),
test: Box::new(test),
body: Box::new(body),
})
}
/// Parse a try statement: try block catch finally
pub(crate) fn parse_try_statement(&mut self) -> Result<ast::TryStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::Try)?; // Expect 'try'
// Parse the try block
let block = self.parse_block_stmt()?;
// Parse the handler (catch block) if present
let handler = if self.is_token_type(TokenType::Catch) {
let catch_start = self.cur_token.span;
self.next_token(); // Skip 'catch'
// Parse the catch parameter if present
let param = if self.is_token_type(TokenType::LParen) {
self.next_token(); // Skip '('
// Create a new scope for the catch block
self.enter_scope(super::super::ScopeKind::Block);
// Parse the catch parameter
let param = self.parse_binding_pattern()?;
self.expect(TokenType::RParen)?; // Expect ')'
Some(param)
} else {
None
};
// Parse the catch block
let body = self.parse_block_stmt()?;
// Exit the catch scope if we created one
if param.is_some() {
self.exit_scope();
}
// Create the catch clause
Some(ast::CatchClause {
span: catch_start.merge_with(body.span),
param,
body,
})
} else {
None
};
// Parse the finalizer (finally block) if present
let finalizer = if self.is_token_type(TokenType::Finally) {
self.next_token(); // Skip 'finally'
// Parse the finally block
Some(self.parse_block_stmt()?)
} else {
None
};
// Either a catch block or a finally block must be present
if handler.is_none() && finalizer.is_none() {
return Err(self.error(ErrorKind::General {
message: "Missing catch or finally after try".into(),
}));
}
// Create the try statement
let end_span = match &finalizer {
Some(finally) => finally.span,
None => match &handler {
Some(catch) => catch.span,
None => unreachable!("Either catch or finally must be present"),
},
};
Ok(ast::TryStmt {
span: start_span.merge_with(end_span),
block,
handler,
finalizer,
})
}
/// Parse a with statement: with (object) body
pub(crate) fn parse_with_statement(&mut self) -> Result<ast::WithStmt> {
// With statements are not allowed in strict mode
if self.strict_mode {
return Err(self.error(ErrorKind::General {
message: "'with' statements are not allowed in strict mode".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::With)?; // Expect 'with'
self.expect(TokenType::LParen)?; // Expect '('
let object = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the body
let body = self.parse_statement()?;
// Create the with statement
Ok(ast::WithStmt {
span: start_span.merge_with(body.span()),
object: Box::new(object),
body: Box::new(body),
})
}
/// Parse a break statement: break [label];
pub(crate) fn parse_break_statement(&mut self) -> Result<ast::BreakStmt> {
// Break statements are only allowed in loops or switch statements
if !self.in_iteration && !self.in_switch {
return Err(self.error(ErrorKind::General {
message: "Illegal break statement outside of a loop or switch".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Break)?; // Expect 'break'
// Parse the label if present
let label = if !self.can_insert_semicolon() && self.is_token_identifier() {
let label = self.parse_identifier_name()?;
// Check if the label exists
if !self.has_label(&label.sym) {
return Err(self.error(ErrorKind::General {
message: format!("Undefined label '{}'", label.sym),
}));
}
Some(label)
} else {
None
};
self.consume_semicolon(); // Consume semicolon
// Create the break statement
Ok(ast::BreakStmt {
span: start_span.merge_with(self.prev_token.span),
label,
})
}
/// Parse a continue statement: continue [label];
pub(crate) fn parse_continue_statement(&mut self) -> Result<ast::ContinueStmt> {
// Continue statements are only allowed in loops
if !self.in_iteration {
return Err(self.error(ErrorKind::General {
message: "Illegal continue statement outside of a loop".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Continue)?; // Expect 'continue'
// Parse the label if present
let label = if !self.can_insert_semicolon() && self.is_token_identifier() {
let label = self.parse_identifier_name()?;
// Check if the label exists
if !self.has_label(&label.sym) {
return Err(self.error(ErrorKind::General {
message: format!("Undefined label '{}'", label.sym),
}));
}
Some(label)
} else {
None
};
self.consume_semicolon(); // Consume semicolon
// Create the continue statement
Ok(ast::ContinueStmt {
span: start_span.merge_with(self.prev_token.span),
label,
})
}
/// Parse a return statement: return [expr];
pub(crate) fn parse_return_statement(&mut self) -> Result<ast::ReturnStmt> {
// Return statements are only allowed in functions
if !self.in_function {
return Err(self.error(ErrorKind::General {
message: "Illegal return statement outside of a function".into(),
}));
}
let start_span = self.cur_token.span;
self.expect(TokenType::Return)?; // Expect 'return'
// Parse the return value if present
let arg = if !self.can_insert_semicolon()
&& !self.is_token_type(TokenType::RBrace)
&& !self.is_token_type(TokenType::Semi)
{
Some(Box::new(self.parse_expression()?))
} else {
None
};
self.consume_semicolon(); // Consume semicolon
// Create the return statement
Ok(ast::ReturnStmt {
span: start_span.merge_with(self.prev_token.span),
arg,
})
}
/// Parse a throw statement: throw expr;
pub(crate) fn parse_throw_statement(&mut self) -> Result<ast::ThrowStmt> {
let start_span = self.cur_token.span;
self.expect(TokenType::Throw)?; // Expect 'throw'
// ASI doesn't apply to throw statements
if self.cur_token.had_line_break {
return Err(self.error(ErrorKind::General {
message: "Illegal newline after throw".into(),
}));
}
// Parse the throw argument
let arg = self.parse_expression()?;
self.consume_semicolon(); // Consume semicolon
// Create the throw statement
Ok(ast::ThrowStmt {
span: start_span.merge_with(self.prev_token.span),
arg: Box::new(arg),
})
}
}
impl<'a> Parser<'a> {
/// Parse a for-in or for-of statement with a left-hand expression
pub(crate) fn parse_for_in_of_statement(
&mut self,
start_span: Span,
left: ast::Expr,
is_await: bool,
) -> Result<ast::Stmt> {
// Check the type of loop
let is_for_in = self.is_token_type(TokenType::In);
// Convert left expression to a pattern if possible
let left = match left.as_pat() {
Ok(pat) => pat,
Err(_) => {
return Err(self.error(ErrorKind::General {
message: "Invalid left-hand side in for-in/for-of loop".into(),
}));
}
};
self.next_token(); // Skip 'in' or 'of'
// Parse the right expression
let right = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the body
let body = self.parse_statement()?;
// Create the appropriate loop statement
if is_for_in {
// For-in loop
Ok(ast::Stmt::ForIn(ast::ForInStmt {
span: start_span.merge_with(body.span()),
left: ast::VarDeclOrPat::Pat(left),
right: Box::new(right),
body: Box::new(body),
}))
} else {
// For-of loop
Ok(ast::Stmt::ForOf(ast::ForOfStmt {
span: start_span.merge_with(body.span()),
is_await,
left: ast::VarDeclOrPat::Pat(left),
right: Box::new(right),
body: Box::new(body),
}))
}
}
/// Parse a for-in or for-of statement with a variable declaration
pub(crate) fn parse_for_in_of_statement_var(
&mut self,
start_span: Span,
left: ast::VarDecl,
is_await: bool,
) -> Result<ast::Stmt> {
// Check the type of loop
let is_for_in = self.is_token_type(TokenType::In);
self.next_token(); // Skip 'in' or 'of'
// Parse the right expression
let right = self.parse_expression()?;
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the body
let body = self.parse_statement()?;
// Create the appropriate loop statement
if is_for_in {
// For-in loop
Ok(ast::Stmt::ForIn(ast::ForInStmt {
span: start_span.merge_with(body.span()),
left: ast::VarDeclOrPat::VarDecl(left),
right: Box::new(right),
body: Box::new(body),
}))
} else {
// For-of loop
Ok(ast::Stmt::ForOf(ast::ForOfStmt {
span: start_span.merge_with(body.span()),
is_await,
left: ast::VarDeclOrPat::VarDecl(left),
right: Box::new(right),
body: Box::new(body),
}))
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/stmt/decl.rs | Rust | //! Declaration parser implementation
//!
//! This module provides the implementation for parsing declarations,
//! including variable declarations, function declarations, and class
//! declarations.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
impl<'a> Parser<'a> {
/// Parse a variable declaration: var id = init;
pub(crate) fn parse_var_declaration(&mut self) -> Result<ast::VarDecl> {
let start_span = self.cur_token.span;
self.expect(TokenType::Var)?; // Expect 'var'
// Parse the variable declarators
let decls = self.parse_var_declarations()?;
self.consume_semicolon(); // Consume semicolon
// Create the variable declaration
Ok(ast::VarDecl {
span: start_span.merge_with(self.prev_token.span),
kind: ast::VarDeclKind::Var,
decls,
declare: false,
})
}
/// Parse let declarations: let id = init;
pub(crate) fn parse_let_declaration(&mut self) -> Result<ast::VarDecl> {
let start_span = self.cur_token.span;
self.expect(TokenType::Let)?; // Expect 'let'
// Parse the variable declarators
let decls = self.parse_var_declarations()?;
self.consume_semicolon(); // Consume semicolon
// Create the variable declaration
Ok(ast::VarDecl {
span: start_span.merge_with(self.prev_token.span),
kind: ast::VarDeclKind::Let,
decls,
declare: false,
..Default::default()
})
}
/// Parse const declarations: const id = init;
pub(crate) fn parse_const_declaration(&mut self) -> Result<ast::VarDecl> {
let start_span = self.cur_token.span;
self.expect(TokenType::Const)?; // Expect 'const'
// Parse the variable declarators
let decls = self.parse_var_declarations()?;
self.consume_semicolon(); // Consume semicolon
// Create the variable declaration
Ok(ast::VarDecl {
span: start_span.merge_with(self.prev_token.span),
kind: ast::VarDeclKind::Const,
decls,
declare: false,
})
}
/// Parse variable declarators: id = init, id2 = init2, ...
pub(crate) fn parse_var_declarations(&mut self) -> Result<Vec<ast::VarDeclarator>> {
let mut decls = Vec::new();
// Parse the first declarator
let is_const = self.prev_token.token_type == TokenType::Const;
let decl = self.parse_var_declarator(is_const)?;
decls.push(decl);
// Parse additional declarators if present
while self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
let decl = self.parse_var_declarator(is_const)?;
decls.push(decl);
}
Ok(decls)
}
/// Parse a variable declarator: id = init
pub(crate) fn parse_var_declarator(&mut self, is_const: bool) -> Result<ast::VarDeclarator> {
// Parse the pattern
let name = self.parse_binding_pattern()?;
let name_span = name.span();
// Parse the initializer if present
let init = if self.is_token_type(TokenType::Assign) {
self.next_token(); // Skip '='
Some(Box::new(self.parse_assignment_expression()?))
} else {
// Const declarations must have an initializer
if is_const {
return Err(self.error(ErrorKind::General {
message: "Missing initializer in const declaration".into(),
}));
}
None
};
// Create the variable declarator
Ok(ast::VarDeclarator {
span: name_span.merge_with(if let Some(ref init) = init {
init.span()
} else {
name_span
}),
name,
init,
definite: false,
})
}
/// Parse a function declaration: function id(params) { body }
pub(crate) fn parse_function_declaration(
&mut self,
is_async: bool,
is_generator: bool,
) -> Result<ast::FnDecl> {
let start_span = self.cur_token.span;
self.expect(TokenType::Function)?; // Expect 'function'
// Check for generator function
let is_generator = if self.is_token_type(TokenType::Mul) {
self.next_token(); // Skip '*'
true
} else {
is_generator
};
// Parse the function identifier
let id = self.parse_binding_identifier()?;
// Create a new scope for the function
self.enter_scope(super::super::ScopeKind::Function);
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_generator = self.in_generator;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_generator = is_generator;
self.in_async = is_async;
// Parse function parameters and body
let (params, body) = self.parse_function_params_and_body()?;
// Restore previous function state
self.in_function = prev_in_function;
self.in_generator = prev_in_generator;
self.in_async = prev_in_async;
// Exit the function scope
self.exit_scope();
// Create the function declaration
Ok(ast::FnDecl {
ident: id.id,
declare: false,
function: ast::Function {
params,
decorators: Vec::new(),
span: start_span.merge_with(body.span),
body: Some(body),
is_generator,
is_async,
type_params: None,
return_type: None,
},
})
}
/// Parse a class declaration: class id { ... }
pub(crate) fn parse_class_declaration(&mut self) -> Result<ast::ClassDecl> {
let start_span = self.cur_token.span;
self.expect(TokenType::Class)?; // Expect 'class'
// Parse the class identifier
let id = self.parse_binding_identifier()?;
// Parse class heritage (extends clause)
let super_class = if self.is_token_type(TokenType::Extends) {
self.next_token(); // Skip 'extends'
// Parse the super class expression
Some(Box::new(self.parse_left_hand_side_expression()?))
} else {
None
};
// Create a new scope for the class
self.enter_scope(super::super::ScopeKind::Class);
// Parse the class body
let class_body = self.parse_class_body()?;
// Exit the class scope
self.exit_scope();
// Create the class declaration
Ok(ast::ClassDecl {
ident: id.id,
declare: false,
class: ast::Class {
span: start_span.merge_with(class_body.span),
decorators: Vec::new(),
body: class_body.body,
super_class,
is_abstract: false,
type_params: None,
super_type_params: None,
implements: Vec::new(),
},
})
}
/// Parse a binding pattern
pub(crate) fn parse_binding_pattern(&mut self) -> Result<ast::Pat> {
match self.cur_token.token_type {
// Identifier pattern
TokenType::Ident => {
let id = self.parse_binding_identifier()?;
Ok(ast::Pat::Ident(id))
}
// Array pattern
TokenType::LBracket => self.parse_array_pattern(),
// Object pattern
TokenType::LBrace => self.parse_object_pattern(),
// Invalid pattern
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier, array pattern, or object pattern"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
/// Parse a binding identifier
pub(crate) fn parse_binding_identifier(&mut self) -> Result<ast::BindingIdent> {
// Parse the identifier
let id = self.parse_identifier_name()?;
// Check for reserved words
if self.strict_mode {
// In strict mode, 'eval' and 'arguments' cannot be binding names
if id.sym == "eval" || id.sym == "arguments" {
return Err(self.error(ErrorKind::General {
message: format!("Cannot use '{}' as a binding name in strict mode", id.sym),
}));
}
}
// Add the identifier to the current scope
self.add_binding(id.sym);
// Create the binding identifier
Ok(ast::BindingIdent { id, type_ann: None })
}
}
impl<'a> Parser<'a> {
/// Parse function parameters and body
pub(crate) fn parse_function_params_and_body(
&mut self,
) -> Result<(Vec<ast::Param>, ast::BlockStmt)> {
self.expect(TokenType::LParen)?; // Expect '('
// Parse the parameters
let mut params = Vec::new();
if !self.is_token_type(TokenType::RParen) {
loop {
// Check for rest parameter
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the parameter pattern
let pat = self.parse_binding_pattern()?;
// Create the parameter
let param = if is_rest {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat: ast::Pat::Rest(ast::RestPat {
span: pat.span(),
arg: Box::new(pat),
type_ann: None,
}),
}
} else {
ast::Param {
span: pat.span(),
decorators: Vec::new(),
pat,
}
};
params.push(param);
// Rest parameter must be the last parameter
if is_rest {
if !self.is_token_type(TokenType::RParen) {
return Err(self.error(ErrorKind::General {
message: "Rest parameter must be the last parameter".into(),
}));
}
break;
}
// Check for comma or end of parameters
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RParen) {
break;
}
} else {
break;
}
}
}
self.expect(TokenType::RParen)?; // Expect ')'
// Parse the function body
let body = self.parse_block_stmt()?;
Ok((params, body))
}
/// Parse an array pattern: [a, b, ...rest]
pub(crate) fn parse_array_pattern(&mut self) -> Result<ast::Pat> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBracket)?; // Expect '['
let mut elements = Vec::new();
// Parse the elements
while !self.is_token_type(TokenType::RBracket) {
// Handle elision (hole)
if self.is_token_type(TokenType::Comma) {
elements.push(None);
self.next_token(); // Skip ','
continue;
}
// Check for rest element
let is_rest = if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
true
} else {
false
};
// Parse the element pattern
let pat = self.parse_binding_pattern()?;
// Create the element
let element = if is_rest {
Some(ast::Pat::Rest(ast::RestPat {
span: pat.span(),
arg: Box::new(pat),
type_ann: None,
}))
} else {
Some(pat)
};
elements.push(element);
// Rest element must be the last element
if is_rest {
if !self.is_token_type(TokenType::RBracket) {
return Err(self.error(ErrorKind::General {
message: "Rest element must be the last element".into(),
}));
}
break;
}
// Check for comma or end of elements
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RBracket) {
break;
}
} else {
break;
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBracket)?; // Expect ']'
// Create the array pattern
Ok(ast::Pat::Array(ast::ArrayPat {
span: start_span.merge_with(end_span),
elems: elements,
optional: false,
type_ann: None,
}))
}
/// Parse an object pattern: { a, b: c, ...rest }
pub(crate) fn parse_object_pattern(&mut self) -> Result<ast::Pat> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBrace)?; // Expect '{'
let mut properties = Vec::new();
// Parse the properties
while !self.is_token_type(TokenType::RBrace) {
// Check for rest element
if self.is_token_type(TokenType::Ellipsis) {
self.next_token(); // Skip '...'
// Parse the rest element pattern
let pat = self.parse_binding_pattern()?;
// Create the rest element
properties.push(ast::ObjectPatProp::Rest(ast::RestPat {
span: pat.span(),
arg: Box::new(pat),
type_ann: None,
}));
// Rest element must be the last property
if !self.is_token_type(TokenType::RBrace) {
return Err(self.error(ErrorKind::General {
message: "Rest element must be the last property".into(),
}));
}
break;
}
// Parse the property
let prop = self.parse_object_pattern_property()?;
properties.push(prop);
// Check for comma or end of properties
if self.is_token_type(TokenType::Comma) {
self.next_token(); // Skip ','
// Handle trailing comma
if self.is_token_type(TokenType::RBrace) {
break;
}
} else {
break;
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBrace)?; // Expect '}'
// Create the object pattern
Ok(ast::Pat::Object(ast::ObjectPat {
span: start_span.merge_with(end_span),
props: properties,
optional: false,
type_ann: None,
}))
}
/// Parse an object pattern property: key, key: value, or [computed]: value
pub(crate) fn parse_object_pattern_property(&mut self) -> Result<ast::ObjectPatProp> {
match self.cur_token.token_type {
// Identifier property
TokenType::Ident => {
let id = self.parse_identifier_name()?;
// Check for key-value pair: key: value
if self.is_token_type(TokenType::Colon) {
self.next_token(); // Skip ':'
// Parse the value pattern
let value = self.parse_binding_pattern()?;
// Create the key-value property
Ok(ast::ObjectPatProp::KeyValue(ast::KeyValuePatProp {
key: ast::PropName::Ident(id),
value: Box::new(value),
}))
} else {
// Create the shorthand property
let binding_id = ast::BindingIdent { id, type_ann: None };
Ok(ast::ObjectPatProp::Assign(ast::AssignPatProp {
span: binding_id.id.span,
key: binding_id.id,
value: None,
}))
}
}
// String property
TokenType::Str => {
let str_lit = match &self.cur_token.value {
TokenValue::String(s) => ast::Str {
span: self.cur_token.span,
value: s.clone().into(),
raw: None,
},
_ => unreachable!("Expected string literal"),
};
self.next_token(); // Skip string
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the value pattern
let value = self.parse_binding_pattern()?;
// Create the key-value property
Ok(ast::ObjectPatProp::KeyValue(ast::KeyValuePatProp {
key: ast::PropName::Str(str_lit),
value: Box::new(value),
}))
}
// Numeric property
TokenType::Num => {
let num_lit = match &self.cur_token.value {
TokenValue::Number(n) => ast::Number {
span: self.cur_token.span,
value: *n,
raw: None,
},
_ => unreachable!("Expected number literal"),
};
self.next_token(); // Skip number
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the value pattern
let value = self.parse_binding_pattern()?;
// Create the key-value property
Ok(ast::ObjectPatProp::KeyValue(ast::KeyValuePatProp {
key: ast::PropName::Num(num_lit),
value: Box::new(value),
}))
}
// Computed property: [expr]: value
TokenType::LBracket => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '['
// Parse the computed key expression
let key = self.parse_assignment_expression()?;
self.expect(TokenType::RBracket)?; // Expect ']'
self.expect(TokenType::Colon)?; // Expect ':'
// Parse the value pattern
let value = self.parse_binding_pattern()?;
// Create the key-value property
Ok(ast::ObjectPatProp::KeyValue(ast::KeyValuePatProp {
key: ast::PropName::Computed(ast::ComputedPropName {
span: start_span.merge_with(self.prev_token.span),
expr: Box::new(key),
}),
value: Box::new(value),
}))
}
// Invalid property
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier, string, number, or computed property"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
/// Parse a class body: { method() {}, field = value, ... }
pub(crate) fn parse_class_body(&mut self) -> Result<ast::ClassBody> {
let start_span = self.cur_token.span;
self.expect(TokenType::LBrace)?; // Expect '{'
let mut body = Vec::new();
// Parse class elements
while !self.is_token_type(TokenType::RBrace) && !self.is_token_type(TokenType::EOF) {
// Skip empty elements (semicolons)
if self.is_token_type(TokenType::Semi) {
self.next_token(); // Skip ';'
continue;
}
// Check for static keyword
let is_static = if self.is_token_identifier_eq("static") {
self.next_token(); // Skip 'static'
true
} else {
false
};
// Check for access modifiers (TypeScript)
let accessibility = if self.syntax.typescript
&& (self.is_token_identifier_eq("public")
|| self.is_token_identifier_eq("private")
|| self.is_token_identifier_eq("protected"))
{
let modifier = match self.cur_token.value {
TokenValue::String(ref s) if s == "public" => Some(ast::Accessibility::Public),
TokenValue::String(ref s) if s == "private" => {
Some(ast::Accessibility::Private)
}
TokenValue::String(ref s) if s == "protected" => {
Some(ast::Accessibility::Protected)
}
_ => None,
};
self.next_token(); // Skip modifier
modifier
} else {
None
};
// Parse the class element
match self.parse_class_element(is_static, accessibility) {
Ok(element) => body.push(element),
Err(err) => {
// Report the error but continue parsing
self.report_error(err);
self.error_recovery();
}
}
}
let end_span = self.cur_token.span;
self.expect(TokenType::RBrace)?; // Expect '}'
// Create the class body
Ok(ast::ClassBody {
span: start_span.merge_with(end_span),
body,
})
}
/// Parse a class element: method, getter, setter, or field
pub(crate) fn parse_class_element(
&mut self,
is_static: bool,
accessibility: Option<ast::Accessibility>,
) -> Result<ast::ClassMember> {
// Check for constructor
let is_constructor = if !is_static && self.is_token_identifier_eq("constructor") {
self.next_token(); // Skip 'constructor'
true
} else {
false
};
// Check for async method
let is_async = if self.is_token_type(TokenType::Async) && !self.peek_token().had_line_break
{
self.next_token(); // Skip 'async'
true
} else {
false
};
// Check for generator method
let is_generator = if self.is_token_type(TokenType::Mul) {
self.next_token(); // Skip '*'
true
} else {
false
};
// Check for getter or setter
let kind = if self.is_token_identifier_eq("get")
&& !self.peek_token().had_line_break
&& !is_async
&& !is_generator
{
self.next_token(); // Skip 'get'
ast::MethodKind::Getter
} else if self.is_token_identifier_eq("set")
&& !self.peek_token().had_line_break
&& !is_async
&& !is_generator
{
self.next_token(); // Skip 'set'
ast::MethodKind::Setter
} else if is_constructor {
ast::MethodKind::Constructor
} else {
ast::MethodKind::Method
};
// Parse the key
let key = self.parse_property_name()?;
// Check for computed field
if self.is_token_type(TokenType::Assign) {
// Class field
self.next_token(); // Skip '='
// Parse the initializer
let value = Some(Box::new(self.parse_assignment_expression()?));
self.consume_semicolon(); // Consume semicolon
// Create the class property
Ok(ast::ClassMember::ClassProp(ast::ClassProp {
span: key.span().merge_with(self.prev_token.span),
key: match key {
ast::PropName::Ident(id) => Box::new(ast::Expr::Ident(id)),
ast::PropName::Str(s) => Box::new(ast::Expr::Lit(ast::Lit::Str(s))),
ast::PropName::Num(n) => Box::new(ast::Expr::Lit(ast::Lit::Num(n))),
ast::PropName::Computed(c) => Box::new(ast::Expr::Paren(ast::ParenExpr {
span: c.span,
expr: c.expr,
})),
_ => unreachable!("Invalid property name"),
},
value,
type_ann: None,
is_static,
decorators: Vec::new(),
accessibility,
is_abstract: false,
is_optional: false,
is_override: false,
readonly: false,
declare: false,
definite: false,
}))
} else {
// Method definition
self.expect(TokenType::LParen)?; // Expect '('
// Remember we're in a function
let prev_in_function = self.in_function;
let prev_in_generator = self.in_generator;
let prev_in_async = self.in_async;
self.in_function = true;
self.in_generator = is_generator;
self.in_async = is_async;
// Create a new scope for the method
self.enter_scope(super::super::ScopeKind::Function);
// Parse parameters and body
let (params, body) = self.parse_function_params_and_body()?;
// Exit the method scope
self.exit_scope();
// Restore previous function state
self.in_function = prev_in_function;
self.in_generator = prev_in_generator;
self.in_async = prev_in_async;
// Create the class method
Ok(ast::ClassMember::Method(ast::ClassMethod {
span: key.span().merge_with(body.span),
key,
function: ast::Function {
params,
decorators: Vec::new(),
span: key.span().merge_with(body.span),
body: Some(body),
is_generator,
is_async,
type_params: None,
return_type: None,
ctxt: Default::default(),
},
kind,
is_static,
accessibility,
is_abstract: false,
is_optional: false,
is_override: false,
}))
}
}
/// Parse a property name: identifier, string, number, or computed property
pub(crate) fn parse_property_name(&mut self) -> Result<ast::PropName> {
match self.cur_token.token_type {
// Identifier property
TokenType::Ident => {
let id = self.parse_identifier_name()?;
Ok(ast::PropName::Ident(id))
}
// String property
TokenType::Str => {
let str_lit = match &self.cur_token.value {
TokenValue::String(s) => ast::Str {
span: self.cur_token.span,
value: s.clone().into(),
raw: None,
},
_ => unreachable!("Expected string literal"),
};
self.next_token(); // Skip string
Ok(ast::PropName::Str(str_lit))
}
// Numeric property
TokenType::Num => {
let num_lit = match &self.cur_token.value {
TokenValue::Number(n) => ast::Number {
span: self.cur_token.span,
value: *n,
raw: None,
},
_ => unreachable!("Expected number literal"),
};
self.next_token(); // Skip number
Ok(ast::PropName::Num(num_lit))
}
// Computed property: [expr]
TokenType::LBracket => {
let start_span = self.cur_token.span;
self.next_token(); // Skip '['
// Parse the computed key expression
let expr = self.parse_assignment_expression()?;
let end_span = self.cur_token.span;
self.expect(TokenType::RBracket)?; // Expect ']'
Ok(ast::PropName::Computed(ast::ComputedPropName {
span: start_span.merge_with(end_span),
expr: Box::new(expr),
}))
}
// Invalid property name
_ => Err(self.error(ErrorKind::UnexpectedToken {
expected: Some("identifier, string, number, or computed property name"),
got: format!("{}", self.cur_token.token_type),
})),
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/stmt/expr.rs | Rust | //! Expression statement parser implementation
//!
//! This module provides the implementation for parsing expression statements,
//! which are statements consisting of a single expression followed by a
//! semicolon.
use swc_ecma_ast as ast;
use super::super::Parser;
use crate::{error::Result, token::TokenType};
impl<'a> Parser<'a> {
/// Parse an expression statement: expr;
pub(crate) fn parse_expression_statement(&mut self) -> Result<ast::ExprStmt> {
// Check for directive prologue (string literal at the beginning of a program or
// function)
let is_directive = if self.is_token_type(TokenType::Str)
&& (self.peek_token().token_type == TokenType::Semi || self.peek_token().had_line_break)
{
true
} else {
false
};
// Parse the expression
let expr = self.parse_expression()?;
// Check for strict mode directive
if is_directive {
if let ast::Expr::Lit(ast::Lit::Str(ref str_lit)) = expr {
if str_lit.value == "use strict" {
// Enable strict mode
self.strict_mode = true;
}
}
}
self.consume_semicolon(); // Consume semicolon
// Create the expression statement
Ok(ast::ExprStmt {
span: expr.span().merge_with(self.prev_token.span),
expr: Box::new(expr),
})
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/parser/stmt/mod.rs | Rust | //! Statement parser module
//!
//! This module contains implementations for parsing JavaScript statements.
use swc_common::Span;
use swc_ecma_ast as ast;
use super::Parser;
use crate::{
error::{Error, ErrorKind, Result},
token::{Token, TokenType, TokenValue},
};
// Sub-modules
mod block;
mod control;
mod decl;
mod expr;
impl<'a> Parser<'a> {
/// Parse a statement
pub(crate) fn parse_statement(&mut self) -> Result<ast::Stmt> {
match self.cur_token.token_type {
// Block statement: { ... }
TokenType::LBrace => {
let block = self.parse_block_stmt()?;
Ok(ast::Stmt::Block(block))
}
// Empty statement: ;
TokenType::Semi => {
let empty = self.parse_empty_statement()?;
Ok(ast::Stmt::Empty(empty))
}
// Variable declarations
TokenType::Var => {
let decl = self.parse_var_declaration()?;
Ok(ast::Stmt::Decl(ast::Decl::Var(decl)))
}
TokenType::Let => {
let decl = self.parse_let_declaration()?;
Ok(ast::Stmt::Decl(ast::Decl::Var(decl)))
}
TokenType::Const => {
let decl = self.parse_const_declaration()?;
Ok(ast::Stmt::Decl(ast::Decl::Var(decl)))
}
// Function declaration
TokenType::Function => {
let decl = self.parse_function_declaration(false, false)?;
Ok(ast::Stmt::Decl(ast::Decl::Fn(decl)))
}
TokenType::Async => {
// Check if it's an async function declaration
if self.peek_token().token_type == TokenType::Function {
self.next_token(); // Skip 'async'
let decl = self.parse_function_declaration(true, false)?;
return Ok(ast::Stmt::Decl(ast::Decl::Fn(decl)));
}
// Otherwise, it's an expression statement
let expr = self.parse_expression_statement()?;
Ok(ast::Stmt::Expr(expr))
}
// Class declaration
TokenType::Class => {
let decl = self.parse_class_declaration()?;
Ok(ast::Stmt::Decl(ast::Decl::Class(decl)))
}
// Control flow statements
TokenType::If => {
let stmt = self.parse_if_statement()?;
Ok(ast::Stmt::If(stmt))
}
TokenType::Switch => {
let stmt = self.parse_switch_statement()?;
Ok(ast::Stmt::Switch(stmt))
}
TokenType::For => {
let stmt = self.parse_for_statement()?;
Ok(stmt)
}
TokenType::While => {
let stmt = self.parse_while_statement()?;
Ok(ast::Stmt::While(stmt))
}
TokenType::Do => {
let stmt = self.parse_do_while_statement()?;
Ok(ast::Stmt::DoWhile(stmt))
}
TokenType::Try => {
let stmt = self.parse_try_statement()?;
Ok(ast::Stmt::Try(stmt))
}
TokenType::With => {
let stmt = self.parse_with_statement()?;
Ok(ast::Stmt::With(stmt))
}
TokenType::Break => {
let stmt = self.parse_break_statement()?;
Ok(ast::Stmt::Break(stmt))
}
TokenType::Continue => {
let stmt = self.parse_continue_statement()?;
Ok(ast::Stmt::Continue(stmt))
}
TokenType::Return => {
let stmt = self.parse_return_statement()?;
Ok(ast::Stmt::Return(stmt))
}
TokenType::Throw => {
let stmt = self.parse_throw_statement()?;
Ok(ast::Stmt::Throw(stmt))
}
// Debugger statement
TokenType::Debugger => {
let stmt = self.parse_debugger_statement()?;
Ok(ast::Stmt::Debugger(stmt))
}
// Labeled statement
TokenType::Ident => {
// Check if the next token is a colon
if self.peek_token().token_type == TokenType::Colon {
let stmt = self.parse_labeled_statement()?;
return Ok(ast::Stmt::Labeled(stmt));
}
// Otherwise, it's an expression statement
let expr = self.parse_expression_statement()?;
Ok(ast::Stmt::Expr(expr))
}
// Export statement (only in modules)
TokenType::Export => {
if !self.in_module {
return Err(self.error(ErrorKind::General {
message: "'export' is only allowed in modules".into(),
}));
}
// Export declarations are not implemented in this simplified version
return Err(self.error(ErrorKind::General {
message: "Export declarations are not fully implemented".into(),
}));
}
// Import statement (only in modules)
TokenType::Import => {
if !self.in_module {
return Err(self.error(ErrorKind::General {
message: "'import' is only allowed in modules".into(),
}));
}
// Import declarations are not implemented in this simplified version
return Err(self.error(ErrorKind::General {
message: "Import declarations are not fully implemented".into(),
}));
}
// Default: expression statement
_ => {
let expr = self.parse_expression_statement()?;
Ok(ast::Stmt::Expr(expr))
}
}
}
/// Parse a module
pub(crate) fn parse_module(&mut self) -> Result<ast::Program> {
// Set module mode
self.in_module = true;
// In ES6, modules are always in strict mode
self.strict_mode = true;
// Create a module scope
self.enter_scope(super::ScopeKind::Module);
// Parse the module body
let body = self.parse_module_items()?;
// Exit the module scope
self.exit_scope();
// Create the module program
Ok(ast::Program::Module(ast::Module {
span: body
.iter()
.fold(None, |acc, item| {
let item_span = match item {
ast::ModuleItem::ModuleDecl(decl) => match decl {
ast::ModuleDecl::Import(import) => import.span,
ast::ModuleDecl::ExportDecl(export) => export.span,
ast::ModuleDecl::ExportNamed(export) => export.span,
ast::ModuleDecl::ExportDefaultDecl(export) => export.span,
ast::ModuleDecl::ExportDefaultExpr(export) => export.span,
ast::ModuleDecl::ExportAll(export) => export.span,
ast::ModuleDecl::TsImportEquals(_) => unreachable!("Not implemented"),
ast::ModuleDecl::TsExportAssignment(_) => {
unreachable!("Not implemented")
}
ast::ModuleDecl::TsNamespaceExport(_) => {
unreachable!("Not implemented")
}
},
ast::ModuleItem::Stmt(stmt) => stmt.span(),
};
match acc {
Some(acc) => Some(acc.merge_with(item_span)),
None => Some(item_span),
}
})
.unwrap_or_else(|| Span::dummy()),
body,
shebang: None,
}))
}
/// Parse a script
pub(crate) fn parse_script(&mut self) -> Result<ast::Program> {
// Set script mode
self.in_module = false;
// Create a script scope
self.enter_scope(super::ScopeKind::Script);
// Parse the script body
let mut body = Vec::new();
while !self.is_token_type(TokenType::EOF) {
// Parse a statement
match self.parse_statement() {
Ok(stmt) => body.push(stmt),
Err(err) => {
// Report the error but continue parsing
self.report_error(err);
self.error_recovery();
}
}
}
// Exit the script scope
self.exit_scope();
// Create the script program
Ok(ast::Program::Script(ast::Script {
span: body
.iter()
.fold(None, |acc, stmt| {
let stmt_span = stmt.span();
match acc {
Some(acc) => Some(acc.merge_with(stmt_span)),
None => Some(stmt_span),
}
})
.unwrap_or_else(|| Span::dummy()),
body,
shebang: None,
}))
}
/// Parse an empty statement (;)
pub(crate) fn parse_empty_statement(&mut self) -> Result<ast::EmptyStmt> {
let span = self.cur_token.span;
self.expect(TokenType::Semi)?; // Expect ';'
Ok(ast::EmptyStmt { span })
}
/// Parse a debugger statement
pub(crate) fn parse_debugger_statement(&mut self) -> Result<ast::DebuggerStmt> {
let span = self.cur_token.span;
self.expect(TokenType::Debugger)?; // Expect 'debugger'
self.consume_semicolon(); // Consume semicolon
Ok(ast::DebuggerStmt {
span: span.merge_with(self.prev_token.span),
})
}
/// Parse a labeled statement: label: stmt
pub(crate) fn parse_labeled_statement(&mut self) -> Result<ast::LabeledStmt> {
let label = self.parse_identifier_name()?;
self.expect(TokenType::Colon)?; // Expect ':'
// Check for duplicate label
if self.has_label(&label.sym) {
return Err(self.error(ErrorKind::General {
message: format!("Label '{}' has already been declared", label.sym),
}));
}
// Add the label to the current scope
self.add_label(label.sym);
// Parse the labeled statement
let body = self.parse_statement()?;
// Create the labeled statement
Ok(ast::LabeledStmt {
span: label.span.merge_with(body.span()),
label,
body: Box::new(body),
})
}
/// Consume a semicolon (either explicit or automatic semicolon insertion)
fn consume_semicolon(&mut self) -> bool {
if self.is_token_type(TokenType::Semi) {
self.next_token(); // Skip explicit semicolon
return true;
}
// Automatic Semicolon Insertion (ASI) rules
if self.can_insert_semicolon() {
return true;
}
// If the next token is } or EOF, we can insert a semicolon
if self.is_token_type(TokenType::RBrace) || self.is_token_type(TokenType::EOF) {
return true;
}
// Otherwise, we need an explicit semicolon
self.report_error(self.error(ErrorKind::UnexpectedToken {
expected: Some(";"),
got: format!("{}", self.cur_token.token_type),
}));
false
}
/// Check if a semicolon can be automatically inserted
fn can_insert_semicolon(&self) -> bool {
// ASI applies if:
// 1. There's a line break before the next token
// 2. The next token is } (end of block)
// 3. The next token is EOF (end of input)
self.cur_token.had_line_break
|| self.is_token_type(TokenType::RBrace)
|| self.is_token_type(TokenType::EOF)
}
/// Error recovery - skip to the next statement
fn error_recovery(&mut self) {
// Skip tokens until we find a good synchronization point
while !self.is_token_type(TokenType::EOF) {
// Good synchronization points: semicolon, block start/end, some statements
if self.is_token_type(TokenType::Semi)
|| self.is_token_type(TokenType::RBrace)
|| self.is_token_type(TokenType::LBrace)
|| self.is_token_type(TokenType::Function)
|| self.is_token_type(TokenType::Class)
|| self.is_token_type(TokenType::If)
|| self.is_token_type(TokenType::For)
|| self.is_token_type(TokenType::While)
|| self.is_token_type(TokenType::Do)
|| self.is_token_type(TokenType::Try)
|| self.is_token_type(TokenType::Switch)
|| self.is_token_type(TokenType::Var)
|| self.is_token_type(TokenType::Let)
|| self.is_token_type(TokenType::Const)
{
// Found a synchronization point
if self.is_token_type(TokenType::Semi) {
self.next_token(); // Skip the semicolon
}
break;
}
// Skip the token and continue
self.next_token();
}
}
}
impl<'a> Parser<'a> {
/// Parse module items
pub(crate) fn parse_module_items(&mut self) -> Result<Vec<ast::ModuleItem>> {
let mut body = Vec::new();
while !self.is_token_type(TokenType::EOF) {
// Parse a module item
match self.parse_module_item() {
Ok(item) => body.push(item),
Err(err) => {
// Report the error but continue parsing
self.report_error(err);
self.error_recovery();
}
}
}
Ok(body)
}
/// Parse a module item (statement or module-specific declaration)
pub(crate) fn parse_module_item(&mut self) -> Result<ast::ModuleItem> {
// Check for import or export declarations
match self.cur_token.token_type {
TokenType::Import => {
// Import declarations are not implemented in this simplified version
return Err(self.error(ErrorKind::General {
message: "Import declarations are not fully implemented".into(),
}));
}
TokenType::Export => {
// Export declarations are not implemented in this simplified version
return Err(self.error(ErrorKind::General {
message: "Export declarations are not fully implemented".into(),
}));
}
_ => {
// Regular statement
let stmt = self.parse_statement()?;
Ok(ast::ModuleItem::Stmt(stmt))
}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/token.rs | Rust | //! High-performance token implementation
//!
//! This module provides token types and related functionality for
//! ECMAScript/TypeScript parser. The implementation is optimized for both
//! memory efficiency and processing speed.
use std::fmt;
use num_bigint::BigInt as BigIntValue;
use phf::phf_map;
use swc_atoms::Atom;
use swc_common::Span;
/// Performance-optimized token type
/// Represented as u8 to minimize memory usage
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum TokenType {
// Single character tokens (first 33 types)
LParen = 0, // (
RParen = 1, // )
LBrace = 2, // {
RBrace = 3, // }
LBracket = 4, // [
RBracket = 5, // ]
Semi = 6, // ;
Comma = 7, // ,
Dot = 8, // .
Colon = 9, // :
QuestionMark = 10, // ?
Bang = 11, // !
Tilde = 12, // ~
Plus = 13, // +
Minus = 14, // -
Asterisk = 15, // *
Slash = 16, // /
Percent = 17, // %
Lt = 18, // <
Gt = 19, // >
Pipe = 20, // |
Caret = 21, // ^
Ampersand = 22, // &
Eq = 23, // =
At = 24, // @
Hash = 25, // #
BackQuote = 26, // `
Arrow = 27, // =>
DotDotDot = 28, // ...
// Compound operators
PlusPlus = 29, // ++
MinusMinus = 30, // --
PlusEq = 31, // +=
MinusEq = 32, // -=
// More compound operators and keywords (starting from 33)
MulEq = 33, // *=
DivEq = 34, // /=
ModEq = 35, // %=
BitOrEq = 36, // |=
BitXorEq = 37, // ^=
BitAndEq = 38, // &=
ExpEq = 39, // **=
LogicalOrEq = 40, // ||=
LogicalAndEq = 41, // &&=
NullishEq = 42, // ??=
OptionalChain = 43, // ?.
EqEq = 44, // ==
NotEq = 45, // !=
EqEqEq = 46, // ===
NotEqEq = 47, // !==
LtEq = 48, // <=
GtEq = 49, // >=
LShift = 50, // <<
RShift = 51, // >>
ZeroFillRShift = 52, // >>>
Exp = 53, // **
LogicalOr = 54, // ||
LogicalAnd = 55, // &&
NullishCoalescing = 56, // ??
DollarLBrace = 57, // ${
// JSX-related tokens
JSXTagStart = 58,
JSXTagEnd = 59,
// Literals
Str = 60, // String literal
Num = 61, // Number literal
BigInt = 62, // BigInt literal
Regex = 63, // RegExp literal
Template = 64, // Template literal
JSXText = 65, // JSX text
// Identifiers and keywords
Ident = 66, // Identifier
// Reserved keyword tokens (starting from 100)
Await = 100,
Break = 101,
Case = 102,
Catch = 103,
Class = 104,
Const = 105,
Continue = 106,
Debugger = 107,
Default = 108,
Delete = 109,
Do = 110,
Else = 111,
Export = 112,
Extends = 113,
False = 114,
Finally = 115,
For = 116,
Function = 117,
If = 118,
Import = 119,
In = 120,
InstanceOf = 121,
Let = 122,
New = 123,
Null = 124,
Return = 125,
Super = 126,
Switch = 127,
This = 128,
Throw = 129,
True = 130,
Try = 131,
TypeOf = 132,
Var = 133,
Void = 134,
While = 135,
With = 136,
Yield = 137,
// TypeScript-related keywords (starting from 150)
Abstract = 150,
Any = 151,
As = 152,
Asserts = 153,
Assert = 154,
Async = 155,
Bigint = 156,
Boolean = 157,
Constructor = 158,
Declare = 159,
Enum = 160,
From = 161,
Get = 162,
Global = 163,
Implements = 164,
Interface = 165,
Intrinsic = 166,
Is = 167,
Keyof = 168,
Namespace = 169,
Never = 170,
Number = 171,
Object = 172,
Of = 173,
Package = 174,
Private = 175,
Protected = 176,
Public = 177,
Readonly = 178,
Require = 179,
Set = 180,
Static = 181,
String = 182,
Symbol = 183,
Type = 184,
Undefined = 185,
Unique = 186,
Unknown = 187,
Using = 188,
// Special tokens
Shebang = 190,
EOF = 191,
Invalid = 192,
}
impl TokenType {
/// Constant method for compiler optimization
/// Checks if this token can precede an expression
#[inline(always)]
pub const fn before_expr(self) -> bool {
// Check if the token is one that is typically followed by an expression
matches!(
self,
TokenType::Semi
| TokenType::Comma
| TokenType::LParen
| TokenType::LBracket
| TokenType::LBrace
| TokenType::Colon
| TokenType::QuestionMark
| TokenType::Arrow
| TokenType::DollarLBrace
| TokenType::Template
| TokenType::Plus
| TokenType::Minus
| TokenType::Bang
| TokenType::Tilde
| TokenType::PlusPlus
| TokenType::MinusMinus
| TokenType::PlusEq
| TokenType::MinusEq
| TokenType::MulEq
| TokenType::DivEq
| TokenType::ModEq
| TokenType::ExpEq
| TokenType::BitOrEq
| TokenType::BitXorEq
| TokenType::BitAndEq
| TokenType::LogicalOrEq
| TokenType::LogicalAndEq
| TokenType::NullishEq
| TokenType::OptionalChain
| TokenType::Eq
| TokenType::EqEq
| TokenType::EqEqEq
| TokenType::NotEq
| TokenType::NotEqEq
| TokenType::Lt
| TokenType::Gt
| TokenType::LtEq
| TokenType::GtEq
| TokenType::LogicalOr
| TokenType::LogicalAnd
| TokenType::NullishCoalescing
| TokenType::Exp
| TokenType::Slash
| TokenType::Percent
| TokenType::Asterisk
| TokenType::LShift
| TokenType::RShift
| TokenType::ZeroFillRShift
| TokenType::Ampersand
| TokenType::Pipe
| TokenType::Caret
| TokenType::Return
| TokenType::Case
| TokenType::Delete
| TokenType::Throw
| TokenType::In
| TokenType::TypeOf
| TokenType::InstanceOf
| TokenType::Void
| TokenType::Do
| TokenType::New
| TokenType::Yield
| TokenType::Await
| TokenType::Extends
| TokenType::Of
| TokenType::As
| TokenType::Is
| TokenType::Asserts
| TokenType::Assert
| TokenType::Using
)
}
/// Constant method for compiler optimization
/// Checks if this token can start an expression
#[inline(always)]
pub const fn starts_expr(self) -> bool {
matches!(
self,
TokenType::LParen
| TokenType::LBrace
| TokenType::LBracket
| TokenType::Plus
| TokenType::Minus
| TokenType::Bang
| TokenType::Tilde
| TokenType::PlusPlus
| TokenType::MinusMinus
| TokenType::BackQuote
| TokenType::DollarLBrace
| TokenType::Str
| TokenType::Num
| TokenType::BigInt
| TokenType::Regex
| TokenType::JSXTagStart
| TokenType::Ident
| TokenType::Await
| TokenType::Class
| TokenType::Function
| TokenType::Import
| TokenType::New
| TokenType::Super
| TokenType::This
| TokenType::Throw
| TokenType::True
| TokenType::False
| TokenType::Null
| TokenType::TypeOf
| TokenType::Void
| TokenType::Delete
| TokenType::Yield
)
}
/// Check if the token is a keyword
#[inline]
pub fn is_keyword(self) -> bool {
(self as u8) >= 100 && (self as u8) < 190
}
/// Convert token type to string representation
#[inline]
pub fn as_str(self) -> &'static str {
match self {
TokenType::LParen => "(",
TokenType::RParen => ")",
TokenType::LBrace => "{",
TokenType::RBrace => "}",
TokenType::LBracket => "[",
TokenType::RBracket => "]",
TokenType::Semi => ";",
TokenType::Comma => ",",
TokenType::Dot => ".",
TokenType::Colon => ":",
TokenType::QuestionMark => "?",
TokenType::Bang => "!",
TokenType::Tilde => "~",
TokenType::Plus => "+",
TokenType::Minus => "-",
TokenType::Asterisk => "*",
TokenType::Slash => "/",
TokenType::Percent => "%",
TokenType::Lt => "<",
TokenType::Gt => ">",
TokenType::Pipe => "|",
TokenType::Caret => "^",
TokenType::Ampersand => "&",
TokenType::Eq => "=",
TokenType::At => "@",
TokenType::Hash => "#",
TokenType::BackQuote => "`",
TokenType::Arrow => "=>",
TokenType::DotDotDot => "...",
TokenType::PlusPlus => "++",
TokenType::MinusMinus => "--",
TokenType::PlusEq => "+=",
TokenType::MinusEq => "-=",
TokenType::MulEq => "*=",
TokenType::DivEq => "/=",
TokenType::ModEq => "%=",
TokenType::BitOrEq => "|=",
TokenType::BitXorEq => "^=",
TokenType::BitAndEq => "&=",
TokenType::ExpEq => "**=",
TokenType::LogicalOrEq => "||=",
TokenType::LogicalAndEq => "&&=",
TokenType::NullishEq => "??=",
TokenType::OptionalChain => "?.",
TokenType::EqEq => "==",
TokenType::NotEq => "!=",
TokenType::EqEqEq => "===",
TokenType::NotEqEq => "!==",
TokenType::LtEq => "<=",
TokenType::GtEq => ">=",
TokenType::LShift => "<<",
TokenType::RShift => ">>",
TokenType::ZeroFillRShift => ">>>",
TokenType::Exp => "**",
TokenType::LogicalOr => "||",
TokenType::LogicalAnd => "&&",
TokenType::NullishCoalescing => "??",
TokenType::DollarLBrace => "${",
TokenType::JSXTagStart => "<",
TokenType::JSXTagEnd => "/>",
TokenType::Str => "string",
TokenType::Num => "number",
TokenType::BigInt => "BigInt",
TokenType::Regex => "RegExp",
TokenType::Template => "template",
TokenType::JSXText => "JSX text",
TokenType::Ident => "identifier",
TokenType::Await => "await",
TokenType::Break => "break",
TokenType::Case => "case",
TokenType::Catch => "catch",
TokenType::Class => "class",
TokenType::Const => "const",
TokenType::Continue => "continue",
TokenType::Debugger => "debugger",
TokenType::Default => "default",
TokenType::Delete => "delete",
TokenType::Do => "do",
TokenType::Else => "else",
TokenType::Export => "export",
TokenType::Extends => "extends",
TokenType::False => "false",
TokenType::Finally => "finally",
TokenType::For => "for",
TokenType::Function => "function",
TokenType::If => "if",
TokenType::Import => "import",
TokenType::In => "in",
TokenType::InstanceOf => "instanceof",
TokenType::Let => "let",
TokenType::New => "new",
TokenType::Null => "null",
TokenType::Return => "return",
TokenType::Super => "super",
TokenType::Switch => "switch",
TokenType::This => "this",
TokenType::Throw => "throw",
TokenType::True => "true",
TokenType::Try => "try",
TokenType::TypeOf => "typeof",
TokenType::Var => "var",
TokenType::Void => "void",
TokenType::While => "while",
TokenType::With => "with",
TokenType::Yield => "yield",
TokenType::Abstract => "abstract",
TokenType::Any => "any",
TokenType::As => "as",
TokenType::Asserts => "asserts",
TokenType::Assert => "assert",
TokenType::Async => "async",
TokenType::Bigint => "bigint",
TokenType::Boolean => "boolean",
TokenType::Constructor => "constructor",
TokenType::Declare => "declare",
TokenType::Enum => "enum",
TokenType::From => "from",
TokenType::Get => "get",
TokenType::Global => "global",
TokenType::Implements => "implements",
TokenType::Interface => "interface",
TokenType::Intrinsic => "intrinsic",
TokenType::Is => "is",
TokenType::Keyof => "keyof",
TokenType::Namespace => "namespace",
TokenType::Never => "never",
TokenType::Number => "number",
TokenType::Object => "object",
TokenType::Of => "of",
TokenType::Package => "package",
TokenType::Private => "private",
TokenType::Protected => "protected",
TokenType::Public => "public",
TokenType::Readonly => "readonly",
TokenType::Require => "require",
TokenType::Set => "set",
TokenType::Static => "static",
TokenType::String => "string",
TokenType::Symbol => "symbol",
TokenType::Type => "type",
TokenType::Undefined => "undefined",
TokenType::Unique => "unique",
TokenType::Unknown => "unknown",
TokenType::Using => "using",
TokenType::Shebang => "#!",
TokenType::EOF => "EOF",
TokenType::Invalid => "invalid token",
}
}
}
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
/// Token value enum optimized for efficient representation
#[derive(Clone, Default)]
pub enum TokenValue {
/// No value (for most tokens)
#[default]
None,
/// Identifier or keyword (managed as atoms to minimize duplicate strings)
Word(Atom),
/// String literal
Str { value: Atom, raw: Atom },
/// Number literal
Num { value: f64, raw: Atom },
/// BigInt literal
BigInt { value: Box<BigIntValue>, raw: Atom },
/// Regular expression literal
Regex { exp: Atom, flags: Atom },
/// Template literal
Template { raw: Atom, cooked: Option<Atom> },
/// JSX text
JSXText { value: Atom, raw: Atom },
/// Shebang comment
Shebang(Atom),
}
impl fmt::Debug for TokenValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenValue::None => write!(f, "None"),
TokenValue::Word(word) => write!(f, "Word({})", word),
TokenValue::Str { value, raw } => write!(f, "Str({}, raw: {})", value, raw),
TokenValue::Num { value, raw } => write!(f, "Num({}, raw: {})", value, raw),
TokenValue::BigInt { value, raw } => write!(f, "BigInt({}, raw: {})", value, raw),
TokenValue::Regex { exp, flags } => write!(f, "Regex(/{}/{}", exp, flags),
TokenValue::Template { raw, cooked } => {
if let Some(cooked) = cooked {
write!(f, "Template({}, cooked: {})", raw, cooked)
} else {
write!(f, "Template({}, invalid)", raw)
}
}
TokenValue::JSXText { value, .. } => write!(f, "JSXText({})", value),
TokenValue::Shebang(content) => write!(f, "Shebang({})", content),
}
}
}
/// Performance-optimized token structure
/// Optimized for memory layout and data access patterns
#[derive(Clone)]
pub struct Token {
/// Token type (1 byte)
pub token_type: TokenType,
/// Whether this token was preceded by a line break (1 byte)
pub had_line_break: bool,
/// Token span (8 bytes)
pub span: Span,
/// Token value (containing actual values for strings, numbers, etc.)
pub value: TokenValue,
}
impl Token {
/// Create a new token
pub fn new(token_type: TokenType, span: Span, had_line_break: bool, value: TokenValue) -> Self {
Self {
token_type,
had_line_break,
span,
value,
}
}
/// Check if this token can precede an expression
#[inline]
pub fn before_expr(&self) -> bool {
self.token_type.before_expr()
}
/// Check if this token can start an expression
#[inline]
pub fn starts_expr(&self) -> bool {
self.token_type.starts_expr()
}
/// Return the value if this is an identifier token
pub fn ident_value(&self) -> Option<&Atom> {
if let (TokenType::Ident, TokenValue::Word(word)) = (&self.token_type, &self.value) {
Some(word)
} else {
None
}
}
/// Check if this is a keyword token
#[inline]
pub fn is_keyword(&self) -> bool {
self.token_type.is_keyword()
}
}
impl fmt::Debug for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.value {
TokenValue::None => write!(f, "{:?}", self.token_type),
TokenValue::Word(word) => write!(f, "{:?}({})", self.token_type, word),
TokenValue::Str { value, raw } => write!(f, "Str({}, raw: {})", value, raw),
TokenValue::Num { value, raw } => write!(f, "Num({}, raw: {})", value, raw),
TokenValue::BigInt { value, raw } => write!(f, "BigInt({}, raw: {})", value, raw),
TokenValue::Regex { exp, flags } => write!(f, "Regex(/{}/{}", exp, flags),
TokenValue::Template { raw, .. } => write!(f, "Template({})", raw),
TokenValue::JSXText { value, .. } => write!(f, "JSXText({})", value),
TokenValue::Shebang(content) => write!(f, "Shebang({})", content),
}
}
}
// Compile-time keyword to token type mapping using PHF
static KEYWORDS: phf::Map<&'static str, TokenType> = phf_map! {
// JavaScript keywords
"await" => TokenType::Await,
"break" => TokenType::Break,
"case" => TokenType::Case,
"catch" => TokenType::Catch,
"class" => TokenType::Class,
"const" => TokenType::Const,
"continue" => TokenType::Continue,
"debugger" => TokenType::Debugger,
"default" => TokenType::Default,
"delete" => TokenType::Delete,
"do" => TokenType::Do,
"else" => TokenType::Else,
"export" => TokenType::Export,
"extends" => TokenType::Extends,
"false" => TokenType::False,
"finally" => TokenType::Finally,
"for" => TokenType::For,
"function" => TokenType::Function,
"if" => TokenType::If,
"import" => TokenType::Import,
"in" => TokenType::In,
"instanceof" => TokenType::InstanceOf,
"let" => TokenType::Let,
"new" => TokenType::New,
"null" => TokenType::Null,
"return" => TokenType::Return,
"super" => TokenType::Super,
"switch" => TokenType::Switch,
"this" => TokenType::This,
"throw" => TokenType::Throw,
"true" => TokenType::True,
"try" => TokenType::Try,
"typeof" => TokenType::TypeOf,
"var" => TokenType::Var,
"void" => TokenType::Void,
"while" => TokenType::While,
"with" => TokenType::With,
"yield" => TokenType::Yield,
// TypeScript-related keywords
"abstract" => TokenType::Abstract,
"any" => TokenType::Any,
"as" => TokenType::As,
"asserts" => TokenType::Asserts,
"assert" => TokenType::Assert,
"async" => TokenType::Async,
"bigint" => TokenType::Bigint,
"boolean" => TokenType::Boolean,
"constructor" => TokenType::Constructor,
"declare" => TokenType::Declare,
"enum" => TokenType::Enum,
"from" => TokenType::From,
"get" => TokenType::Get,
"global" => TokenType::Global,
"implements" => TokenType::Implements,
"interface" => TokenType::Interface,
"intrinsic" => TokenType::Intrinsic,
"is" => TokenType::Is,
"keyof" => TokenType::Keyof,
"namespace" => TokenType::Namespace,
"never" => TokenType::Never,
"number" => TokenType::Number,
"object" => TokenType::Object,
"of" => TokenType::Of,
"package" => TokenType::Package,
"private" => TokenType::Private,
"protected" => TokenType::Protected,
"public" => TokenType::Public,
"readonly" => TokenType::Readonly,
"require" => TokenType::Require,
"set" => TokenType::Set,
"static" => TokenType::Static,
"string" => TokenType::String,
"symbol" => TokenType::Symbol,
"type" => TokenType::Type,
"undefined" => TokenType::Undefined,
"unique" => TokenType::Unique,
"unknown" => TokenType::Unknown,
"using" => TokenType::Using,
};
/// Convert a keyword string to TokenType
/// Uses a PHF map for O(1) time complexity with zero runtime overhead
#[inline(always)]
pub fn keyword_to_token_type(word: &str) -> Option<TokenType> {
KEYWORDS.get(word).copied()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_keyword_to_token_type() {
assert_eq!(keyword_to_token_type("const"), Some(TokenType::Const));
assert_eq!(keyword_to_token_type("function"), Some(TokenType::Function));
assert_eq!(keyword_to_token_type("class"), Some(TokenType::Class));
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_fast_parser/src/util.rs | Rust | //! Utility functions for the parser. This module is copied from hashbrown
// FIXME: Branch prediction hint. This is currently only available on nightly
// but it consistently improves performance by 10-15%.
#[cfg(not(feature = "nightly"))]
pub(crate) use std::convert::{identity as likely, identity as unlikely};
#[cfg(feature = "nightly")]
pub(crate) use std::intrinsics::{likely, unlikely};
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/benches/all.rs | Rust | extern crate swc_malloc;
use std::fs::read_to_string;
use codspeed_criterion_compat::{black_box, criterion_group, criterion_main, Criterion};
use swc_common::{
errors::{Handler, HANDLER},
sync::Lrc,
FileName, Globals, Mark, SourceMap, SyntaxContext, GLOBALS,
};
use swc_ecma_ast::{EsVersion, Program};
use swc_ecma_lints::{
config::LintConfig,
rules::{lint_pass, LintParams},
};
use swc_ecma_parser::parse_file_as_module;
use swc_ecma_transforms_base::resolver;
pub fn bench_files(c: &mut Criterion) {
let mut group = c.benchmark_group("es/lints/libs");
group.sample_size(10);
let mut bench_file = |name: &str| {
group.bench_function(format!("es/lints/libs/{}", name), |b| {
let src =
read_to_string(format!("../swc_ecma_minifier/benches/full/{}.js", name)).unwrap();
let globals = Globals::default();
GLOBALS.set(&globals, || {
let cm = Lrc::new(SourceMap::default());
let handler = Handler::with_tty_emitter(
swc_common::errors::ColorConfig::Always,
true,
false,
Some(cm.clone()),
);
let fm = cm.new_source_file(FileName::Anon.into(), src);
let unresolved_mark = Mark::new();
let top_level_mark = Mark::new();
let program = parse_file_as_module(
&fm,
Default::default(),
Default::default(),
None,
&mut Vec::new(),
)
.map_err(|err| {
err.into_diagnostic(&handler).emit();
})
.map(Program::Module)
.map(|module| module.apply(resolver(unresolved_mark, top_level_mark, false)))
.unwrap();
b.iter(|| {
GLOBALS.set(&globals, || {
HANDLER.set(&handler, || {
run(
cm.clone(),
&mut program.clone(),
unresolved_mark,
top_level_mark,
)
});
});
});
});
});
};
bench_file("antd");
bench_file("d3");
bench_file("echarts");
bench_file("jquery");
bench_file("lodash");
bench_file("moment");
bench_file("react");
bench_file("terser");
bench_file("three");
bench_file("typescript");
bench_file("victory");
bench_file("vue");
}
criterion_group!(files, bench_files);
criterion_main!(files);
fn run(cm: Lrc<SourceMap>, program: &mut Program, unresolved_mark: Mark, top_level_mark: Mark) {
let rules = swc_ecma_lints::rules::all(LintParams {
program,
lint_config: &LintConfig::default(),
unresolved_ctxt: SyntaxContext::empty().apply_mark(unresolved_mark),
top_level_ctxt: SyntaxContext::empty().apply_mark(top_level_mark),
es_version: EsVersion::EsNext,
source_map: cm.clone(),
});
let pass = black_box(lint_pass(rules));
program.mutate(pass)
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/examples/all.js | JavaScript | const a = 1;
a = 2;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/examples/all.rs | Rust | use swc_common::{errors::HANDLER, Mark, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_lints::{rule::Rule, rules::LintParams};
use swc_ecma_parser::Syntax;
use swc_ecma_transforms_base::resolver;
fn main() {
// testing::run_test creates Lrc<SourceMap> and `&Handler` for you
// You can refer to source code of it or try_with_handler of the swc crate.
let msg = testing::run_test(false, |cm, handler| -> Result<(), _> {
HANDLER.set(handler, || {
let fm = cm.load_file("examples/all.js".as_ref()).unwrap();
let module = swc_ecma_parser::parse_file_as_module(
&fm,
Syntax::default(),
EsVersion::latest(),
None,
&mut Vec::new(),
);
let mut program = match module {
Ok(v) => Program::Module(v),
Err(err) => {
err.into_diagnostic(handler).emit();
return Err(());
}
};
let unresolved_mark = Mark::new();
let top_level_mark = Mark::new();
let unresolved_ctxt = SyntaxContext::empty().apply_mark(unresolved_mark);
let top_level_ctxt = SyntaxContext::empty().apply_mark(top_level_mark);
program.mutate(resolver(unresolved_mark, top_level_mark, false));
let mut rules = swc_ecma_lints::rules::all(LintParams {
program: &program,
lint_config: &Default::default(),
unresolved_ctxt,
top_level_ctxt,
es_version: EsVersion::latest(),
source_map: cm.clone(),
});
let module = program.expect_module();
rules.lint_module(&module);
Err(())
})
})
.unwrap_err();
println!("{}", msg);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/config.rs | Rust | use std::fmt::Debug;
use serde::{Deserialize, Serialize};
use swc_config::merge::Merge;
#[cfg(feature = "non_critical_lints")]
use crate::rules::non_critical_lints::{
dot_notation::DotNotationConfig, eqeqeq::EqeqeqConfig, no_bitwise::NoBitwiseConfig,
no_console::NoConsoleConfig, no_empty_function::NoEmptyFunctionConfig,
no_param_reassign::NoParamReassignConfig, no_restricted_syntax::NoRestrictedSyntaxConfig,
no_use_before_define::NoUseBeforeDefineConfig, prefer_const::PreferConstConfig,
prefer_regex_literals::PreferRegexLiteralsConfig, quotes::QuotesConfig, radix::RadixConfig,
symbol_description::SymbolDescriptionConfig, use_is_nan::UseIsNanConfig,
valid_typeof::ValidTypeofConfig, yoda::YodaConfig,
};
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LintRuleReaction {
Off,
Warning,
Error,
}
impl Default for LintRuleReaction {
fn default() -> Self {
Self::Off
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(untagged)]
enum LintRuleLevel {
Str(LintRuleReaction),
Number(u8),
Unspecified,
}
impl Default for LintRuleLevel {
fn default() -> Self {
Self::Unspecified
}
}
impl From<LintRuleLevel> for LintRuleReaction {
fn from(level: LintRuleLevel) -> Self {
match level {
LintRuleLevel::Str(level) => level,
LintRuleLevel::Number(level) => match level {
1 => LintRuleReaction::Warning,
2 => LintRuleReaction::Error,
_ => LintRuleReaction::Off,
},
LintRuleLevel::Unspecified => LintRuleReaction::Off,
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct RuleConfig<T: Debug + Clone + Serialize + Default>(
#[serde(default)] LintRuleLevel,
#[serde(default)] T,
);
impl<T: Debug + Clone + Serialize + Default> RuleConfig<T> {
pub(crate) fn get_rule_reaction(&self) -> LintRuleReaction {
self.0.into()
}
pub(crate) fn get_rule_config(&self) -> &T {
&self.1
}
}
impl<T> Merge for RuleConfig<T>
where
T: Debug + Clone + Serialize + Default,
{
fn merge(&mut self, other: Self) {
if let LintRuleLevel::Unspecified = self.0 {
self.0 = other.0;
self.1 = other.1;
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, Merge)]
#[non_exhaustive]
#[serde(rename_all = "kebab-case")]
pub struct LintConfig {
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noConsole")]
pub no_console: RuleConfig<NoConsoleConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "preferRegexLiterals")]
pub prefer_regex_literals: RuleConfig<PreferRegexLiteralsConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noAlert")]
pub no_alert: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noDebugger")]
pub no_debugger: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noUseBeforeDefine")]
pub no_use_before_define: RuleConfig<NoUseBeforeDefineConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "dotNotation")]
pub dot_notation: RuleConfig<DotNotationConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default)]
pub quotes: RuleConfig<QuotesConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noEmptyFunction")]
pub no_empty_function: RuleConfig<NoEmptyFunctionConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noEmptyPattern")]
pub no_empty_pattern: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default)]
pub eqeqeq: RuleConfig<EqeqeqConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noLoopFunc")]
pub no_loop_func: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noNew")]
pub no_new: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noRestrictedSyntax")]
pub no_restricted_syntax: RuleConfig<NoRestrictedSyntaxConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default)]
pub radix: RuleConfig<RadixConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noBitwise")]
pub no_bitwise: RuleConfig<NoBitwiseConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "defaultParamLast")]
pub default_param_last: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default)]
pub yoda: RuleConfig<YodaConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noNewSymbol")]
pub no_new_symbol: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "useIsNan")]
pub use_isnan: RuleConfig<UseIsNanConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "validTypeof")]
pub valid_typeof: RuleConfig<ValidTypeofConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noParamReassign")]
pub no_param_reassign: RuleConfig<NoParamReassignConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "symbolDescription")]
pub symbol_description: RuleConfig<SymbolDescriptionConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noObjCalls")]
pub no_obj_calls: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noVar")]
pub no_var: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noThrowLiteral")]
pub no_throw_literal: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "preferConst")]
pub prefer_const: RuleConfig<PreferConstConfig>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noCompareNegZero")]
pub no_compare_neg_zero: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "constructorSuper")]
pub constructor_super: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noSparseArrays")]
pub no_sparse_arrays: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "defaultCaseLast")]
pub default_case_last: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noAwaitInLoop")]
pub no_await_in_loop: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noCondAssign")]
pub no_cond_assign: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "noPrototypeBuiltins")]
pub no_prototype_builtins: RuleConfig<()>,
#[serde(default, alias = "noNewObject")]
pub no_new_object: RuleConfig<()>,
#[cfg(feature = "non_critical_lints")]
#[serde(default, alias = "preferObjectSpread")]
pub prefer_object_spread: RuleConfig<()>,
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/lib.rs | Rust | #![cfg_attr(feature = "non_critical_lints", deny(unused))]
#![cfg_attr(feature = "non_critical_lints", deny(clippy::all))]
#![allow(rustc::untranslatable_diagnostic_trivial)]
#![allow(dead_code)]
pub mod config;
pub mod rule;
pub mod rules;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rule.rs | Rust | use std::{fmt::Debug, mem::take, sync::Arc};
use auto_impl::auto_impl;
use parking_lot::Mutex;
use swc_common::{
errors::{Diagnostic, DiagnosticBuilder, Emitter, Handler, HANDLER},
GLOBALS,
};
use swc_ecma_ast::{Module, Script};
use swc_ecma_visit::{Visit, VisitWith};
use swc_parallel::join;
/// A lint rule.
///
/// # Implementation notes
///
/// Must report error to [swc_common::HANDLER]
#[auto_impl(Box, &mut)]
pub trait Rule: Debug + Send + Sync {
fn lint_module(&mut self, program: &Module);
fn lint_script(&mut self, program: &Script);
}
trait LintNode<R: Rule>: Send + Sync {
fn lint(&self, rule: &mut R);
}
impl<R: Rule> LintNode<R> for Module {
#[inline]
fn lint(&self, rule: &mut R) {
rule.lint_module(self);
}
}
impl<R: Rule> LintNode<R> for Script {
#[inline]
fn lint(&self, rule: &mut R) {
rule.lint_script(self);
}
}
fn join_lint_rules<N: LintNode<R>, R: Rule>(rules: &mut [R], program: &N) {
let len = rules.len();
if len == 0 {
return;
}
if len == 1 {
program.lint(&mut rules[0]);
return;
}
let (ra, rb) = rules.split_at_mut(len / 2);
GLOBALS.with(|globals| {
HANDLER.with(|handler| {
join(
|| {
GLOBALS.set(globals, || {
HANDLER.set(handler, || join_lint_rules(ra, program))
})
},
|| {
GLOBALS.set(globals, || {
HANDLER.set(handler, || join_lint_rules(rb, program))
})
},
)
})
});
}
fn lint_rules<N: LintNode<R>, R: Rule>(rules: &mut Vec<R>, program: &N) {
if rules.is_empty() {
return;
}
if cfg!(target_arch = "wasm32") {
for rule in rules {
program.lint(rule);
}
} else {
let capturing = Capturing::default();
{
HANDLER.set(
&Handler::with_emitter(true, false, Box::new(capturing.clone())),
|| {
join_lint_rules(rules, program);
},
);
let mut errors = take(&mut *capturing.errors.lock());
errors.sort_by_key(|error| error.span.primary_span());
HANDLER.with(|handler| {
for error in errors {
DiagnosticBuilder::new_diagnostic(handler, error).emit();
}
});
}
}
}
#[derive(Default, Clone)]
struct Capturing {
errors: Arc<Mutex<Vec<Diagnostic>>>,
}
impl Emitter for Capturing {
fn emit(&mut self, db: &DiagnosticBuilder<'_>) {
self.errors.lock().push((**db).clone());
}
}
/// This preserves the order of errors.
impl<R> Rule for Vec<R>
where
R: Rule,
{
fn lint_module(&mut self, program: &Module) {
lint_rules(self, program)
}
fn lint_script(&mut self, program: &Script) {
lint_rules(self, program)
}
}
pub(crate) fn visitor_rule<V>(v: V) -> Box<dyn Rule>
where
V: 'static + Send + Sync + Visit + Default + Debug,
{
Box::new(VisitorRule(v))
}
#[derive(Debug)]
struct VisitorRule<V>(V)
where
V: Send + Sync + Visit;
impl<V> Rule for VisitorRule<V>
where
V: Send + Sync + Visit + Debug,
{
fn lint_module(&mut self, program: &Module) {
program.visit_with(&mut self.0);
}
fn lint_script(&mut self, program: &Script) {
program.visit_with(&mut self.0);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/const_assign.rs | Rust | use rustc_hash::FxHashMap;
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_utils::parallel::{cpu_count, Parallel, ParallelExt};
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::rule::Rule;
pub fn const_assign() -> Box<dyn Rule> {
Box::new(ConstAssignRule)
}
#[derive(Debug)]
struct ConstAssignRule;
impl Rule for ConstAssignRule {
fn lint_module(&mut self, program: &Module) {
let mut const_vars = FxHashMap::default();
let mut import_binding = FxHashMap::default();
program.visit_children_with(&mut Collector {
const_vars: &mut const_vars,
import_binding: &mut import_binding,
var_decl_kind: None,
});
program.visit_children_with(&mut ConstAssign {
const_vars: &const_vars,
import_binding: &import_binding,
is_pat_decl: false,
});
}
fn lint_script(&mut self, program: &Script) {
let mut const_vars = FxHashMap::default();
let mut import_binding = FxHashMap::default();
program.visit_children_with(&mut Collector {
const_vars: &mut const_vars,
// I don't believe that import stmt exists in Script
// But it's ok. Let's pass it in.
import_binding: &mut import_binding,
var_decl_kind: None,
});
program.visit_children_with(&mut ConstAssign {
const_vars: &const_vars,
import_binding: &import_binding,
is_pat_decl: false,
});
}
}
#[derive(Clone, Copy)]
struct ConstAssign<'a> {
const_vars: &'a FxHashMap<Id, Span>,
import_binding: &'a FxHashMap<Id, Span>,
is_pat_decl: bool,
}
impl Parallel for ConstAssign<'_> {
fn create(&self) -> Self {
*self
}
fn merge(&mut self, _: Self) {}
}
impl ConstAssign<'_> {
fn check(&mut self, id: &Ident) {
if self.is_pat_decl {
return;
}
if let Some(&decl_span) = self.const_vars.get(&id.to_id()) {
HANDLER.with(|handler| {
handler
.struct_span_err(
id.span,
"cannot reassign to a variable declared with `const`",
)
.span_label(decl_span, "const variable was declared here")
.span_suggestion(
decl_span,
"consider making this variable mutable",
format!("let {}", id.sym),
)
.span_label(id.span, "cannot reassign")
.emit();
});
}
if let Some(&binding_span) = self.import_binding.get(&id.to_id()) {
HANDLER.with(|handler| {
handler
.struct_span_err(id.span, "cannot reassign to an imported binding")
.span_label(binding_span, "imported binding")
.emit();
});
}
}
}
impl Visit for ConstAssign<'_> {
noop_visit_type!();
fn visit_binding_ident(&mut self, n: &BindingIdent) {
self.check(&Ident::from(n));
}
fn visit_class_members(&mut self, members: &[ClassMember]) {
self.maybe_par(cpu_count(), members, |v, member| {
member.visit_with(v);
});
}
fn visit_expr_or_spreads(&mut self, n: &[ExprOrSpread]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_exprs(&mut self, exprs: &[Box<Expr>]) {
self.maybe_par(cpu_count(), exprs, |v, expr| {
expr.visit_with(v);
});
}
fn visit_module_items(&mut self, items: &[ModuleItem]) {
self.maybe_par(cpu_count(), items, |v, item| {
item.visit_with(v);
});
}
fn visit_opt_vec_expr_or_spreads(&mut self, n: &[Option<ExprOrSpread>]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_prop_or_spreads(&mut self, n: &[PropOrSpread]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_stmts(&mut self, stmts: &[Stmt]) {
self.maybe_par(cpu_count(), stmts, |v, stmt| {
stmt.visit_with(v);
});
}
fn visit_update_expr(&mut self, n: &UpdateExpr) {
n.visit_children_with(self);
if let Expr::Ident(ident) = &*n.arg {
self.check(ident);
}
}
fn visit_var_declarator(&mut self, var_declarator: &VarDeclarator) {
let old_is_pat_decl = self.is_pat_decl;
self.is_pat_decl = true;
var_declarator.name.visit_with(self);
self.is_pat_decl = old_is_pat_decl;
var_declarator.init.visit_with(self);
}
}
struct Collector<'a> {
const_vars: &'a mut FxHashMap<Id, Span>,
import_binding: &'a mut FxHashMap<Id, Span>,
var_decl_kind: Option<VarDeclKind>,
}
impl Visit for Collector<'_> {
noop_visit_type!();
fn visit_import_specifier(&mut self, n: &ImportSpecifier) {
match n {
ImportSpecifier::Named(ImportNamedSpecifier { local, .. })
| ImportSpecifier::Default(ImportDefaultSpecifier { local, .. })
| ImportSpecifier::Namespace(ImportStarAsSpecifier { local, .. }) => {
self.import_binding.insert(local.to_id(), local.span);
}
}
}
fn visit_assign_pat_prop(&mut self, p: &AssignPatProp) {
p.visit_children_with(self);
if let Some(VarDeclKind::Const) = self.var_decl_kind {
*self.const_vars.entry(p.key.to_id()).or_default() = p.span;
}
}
fn visit_expr(&mut self, e: &Expr) {
let old_var_decl_kind = self.var_decl_kind;
self.var_decl_kind = None;
e.visit_children_with(self);
self.var_decl_kind = old_var_decl_kind;
}
fn visit_pat(&mut self, p: &Pat) {
p.visit_children_with(self);
if let Some(VarDeclKind::Const) = self.var_decl_kind {
if let Pat::Ident(i) = p {
*self.const_vars.entry(i.to_id()).or_default() = i.span;
}
}
}
fn visit_var_decl(&mut self, var_decl: &VarDecl) {
let old_var_decl_kind = self.var_decl_kind;
self.var_decl_kind = Some(var_decl.kind);
var_decl.visit_children_with(self);
self.var_decl_kind = old_var_decl_kind;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/constructor_super.rs | Rust | use std::mem;
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const BAD_SUPER_MESSAGE: &str = "Unexpected 'super()' because 'super' is not a constructor";
const CALL_SUPER_EXCPECTED_MESSAGE: &str = "Expected to call 'super()'";
const UNEXPECTED_DUPLICATE_SUPER_CALL_MESSAGE: &str = "Unexpected duplicate 'super()'";
const LACKED_CALL_SUPER_MESSAGE: &str = "Lacked a call of 'super()' in some code path";
const MORE_THAN_ONE_CALL_POSSIBLE_MESSAGE: &str = "More than one call 'super()' possible into loop";
pub fn constructor_super(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(ConstructorSuper::new(rule_reaction))),
}
}
#[derive(Debug, Clone, Copy, Default)]
enum SuperClass {
Valid,
Invalid,
#[default]
NotSetted,
}
#[derive(Debug, Default)]
struct ClassMeta {
super_class: SuperClass,
constructor_scope: usize,
code_path: CodePath,
loop_span: Option<Span>,
}
#[derive(Debug, Default, Clone)]
struct CodePath {
super_calls_count: usize,
possibly_returned: bool,
super_call_missed: bool,
// only for switch-case
break_exists: bool,
}
#[derive(Debug, Default)]
struct ConstructorSuper {
expected_reaction: LintRuleReaction,
class_meta: ClassMeta,
scope: usize,
}
impl ConstructorSuper {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self {
expected_reaction,
scope: 0,
class_meta: Default::default(),
}
}
fn emit_report(&self, span: Span, message: &str) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, message).emit();
}
_ => {}
});
}
fn collect_class(&mut self, class: &Class) {
self.class_meta.super_class = match &class.super_class {
Some(super_class) => match super_class.unwrap_seqs_and_parens() {
Expr::Ident(_) | Expr::Class(_) => SuperClass::Valid,
_ => SuperClass::Invalid,
},
None => SuperClass::NotSetted,
};
}
fn check_on_super_call(&mut self, span: Span) {
match self.class_meta.super_class {
SuperClass::Invalid => {
self.emit_report(span, BAD_SUPER_MESSAGE);
}
SuperClass::NotSetted => {
if let Some(span) = self.class_meta.loop_span {
self.emit_report(span, MORE_THAN_ONE_CALL_POSSIBLE_MESSAGE);
} else if self.class_meta.code_path.super_calls_count > 1 {
self.emit_report(span, UNEXPECTED_DUPLICATE_SUPER_CALL_MESSAGE);
} else {
self.emit_report(span, CALL_SUPER_EXCPECTED_MESSAGE);
}
}
SuperClass::Valid => {
if let Some(span) = self.class_meta.loop_span {
self.emit_report(span, MORE_THAN_ONE_CALL_POSSIBLE_MESSAGE);
} else if self.class_meta.code_path.super_calls_count > 1 {
self.emit_report(span, UNEXPECTED_DUPLICATE_SUPER_CALL_MESSAGE);
}
}
}
}
fn check_on_constructor(&self, span: Span) {
match self.class_meta.super_class {
SuperClass::Valid => {
if self.class_meta.code_path.super_call_missed {
self.emit_report(span, LACKED_CALL_SUPER_MESSAGE);
}
if self.class_meta.code_path.super_calls_count == 0 {
self.emit_report(span, CALL_SUPER_EXCPECTED_MESSAGE);
}
}
SuperClass::NotSetted => {}
SuperClass::Invalid => {
if self.class_meta.code_path.super_calls_count == 0 {
self.emit_report(span, CALL_SUPER_EXCPECTED_MESSAGE);
}
}
}
}
fn update_current_code_path(&mut self, ordered_pathes: &[CodePath]) {
let current_code_path = &mut self.class_meta.code_path;
for code_path in ordered_pathes.iter() {
current_code_path.possibly_returned =
current_code_path.possibly_returned || code_path.possibly_returned;
current_code_path.super_calls_count = std::cmp::max(
current_code_path.super_calls_count,
code_path.super_calls_count,
);
current_code_path.super_call_missed = current_code_path.super_call_missed
|| code_path.super_call_missed
|| code_path.super_calls_count == 0;
}
}
}
impl Visit for ConstructorSuper {
fn visit_class(&mut self, class: &Class) {
let prev_class_markers = mem::take(&mut self.class_meta);
self.collect_class(class);
class.visit_children_with(self);
self.class_meta = prev_class_markers;
}
fn visit_constructor(&mut self, constructor: &Constructor) {
self.scope += 1;
self.class_meta.constructor_scope = self.scope;
constructor.visit_children_with(self);
self.check_on_constructor(constructor.span);
self.scope -= 1;
}
fn visit_call_expr(&mut self, call_expr: &CallExpr) {
if let Callee::Super(super_call) = &call_expr.callee {
if !self.class_meta.code_path.possibly_returned
&& self.class_meta.constructor_scope == self.scope
{
self.class_meta.code_path.super_calls_count += 1;
self.class_meta.code_path.super_call_missed = false;
}
self.check_on_super_call(super_call.span);
}
call_expr.visit_children_with(self);
}
fn visit_if_stmt(&mut self, if_stmt: &IfStmt) {
if_stmt.test.visit_children_with(self);
let parent_code_path = self.class_meta.code_path.clone();
if_stmt.cons.visit_children_with(self);
let cons_code_path = mem::replace(&mut self.class_meta.code_path, parent_code_path.clone());
if_stmt.alt.visit_children_with(self);
let alt_code_path = mem::replace(&mut self.class_meta.code_path, parent_code_path);
self.update_current_code_path(&[cons_code_path, alt_code_path]);
}
fn visit_return_stmt(&mut self, n: &ReturnStmt) {
if self.scope == self.class_meta.constructor_scope {
self.class_meta.code_path.possibly_returned = true;
}
n.visit_children_with(self);
}
fn visit_cond_expr(&mut self, cond_expr: &CondExpr) {
cond_expr.test.visit_children_with(self);
let parent_code_path = self.class_meta.code_path.clone();
cond_expr.cons.visit_children_with(self);
let cons_code_path = mem::replace(&mut self.class_meta.code_path, parent_code_path.clone());
cond_expr.alt.visit_children_with(self);
let alt_code_path = mem::replace(&mut self.class_meta.code_path, parent_code_path);
self.update_current_code_path(&[cons_code_path, alt_code_path]);
}
fn visit_switch_stmt(&mut self, switch_stmt: &SwitchStmt) {
switch_stmt.discriminant.visit_children_with(self);
let parent_code_path = self.class_meta.code_path.clone();
let mut cases: Vec<CodePath> = Vec::with_capacity(switch_stmt.cases.len());
for switch_case in switch_stmt.cases.iter() {
switch_case.visit_children_with(self);
if self.class_meta.code_path.break_exists {
cases.push(mem::replace(
&mut self.class_meta.code_path,
parent_code_path.clone(),
));
}
}
if cases.is_empty() {
cases.push(mem::replace(
&mut self.class_meta.code_path,
parent_code_path,
));
}
self.update_current_code_path(cases.as_slice());
}
fn visit_try_stmt(&mut self, try_stmt: &TryStmt) {
let parent_code_path = self.class_meta.code_path.clone();
try_stmt.block.visit_children_with(self);
let block_code_path =
mem::replace(&mut self.class_meta.code_path, parent_code_path.clone());
if try_stmt.handler.is_some() {
try_stmt.handler.visit_children_with(self);
let handler_code_path = mem::replace(&mut self.class_meta.code_path, parent_code_path);
self.update_current_code_path(&[block_code_path, handler_code_path]);
} else {
self.update_current_code_path(&[block_code_path]);
}
try_stmt.finalizer.visit_children_with(self);
}
fn visit_break_stmt(&mut self, break_stmt: &BreakStmt) {
self.class_meta.code_path.break_exists = true;
break_stmt.visit_children_with(self);
}
fn visit_function(&mut self, function: &Function) {
self.scope += 1;
function.visit_children_with(self);
self.scope -= 1;
}
fn visit_for_in_stmt(&mut self, for_in_stmt: &ForInStmt) {
let prev_loop_span = mem::replace(&mut self.class_meta.loop_span, Some(for_in_stmt.span));
for_in_stmt.visit_children_with(self);
self.class_meta.loop_span = prev_loop_span;
}
fn visit_for_of_stmt(&mut self, for_of_stmt: &ForOfStmt) {
let prev_loop_span = mem::replace(&mut self.class_meta.loop_span, Some(for_of_stmt.span));
for_of_stmt.visit_children_with(self);
self.class_meta.loop_span = prev_loop_span;
}
fn visit_for_stmt(&mut self, for_stmt: &ForStmt) {
let prev_loop_span = mem::replace(&mut self.class_meta.loop_span, Some(for_stmt.span));
for_stmt.visit_children_with(self);
self.class_meta.loop_span = prev_loop_span;
}
fn visit_arrow_expr(&mut self, arrow_expr: &ArrowExpr) {
self.scope += 1;
arrow_expr.visit_children_with(self);
self.scope -= 1;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/default_case_last.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Default clause should be the last clause";
pub fn default_case_last(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(DefaultCaseLast::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct DefaultCaseLast {
expected_reaction: LintRuleReaction,
}
impl DefaultCaseLast {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
fn check_case(&self, cases_count: usize, pos: usize, case: &SwitchCase) {
if case.test.is_none() && pos != cases_count {
self.emit_report(case.span);
}
}
}
impl Visit for DefaultCaseLast {
fn visit_switch_stmt(&mut self, switch_stmt: &SwitchStmt) {
let cases_count = switch_stmt.cases.len();
switch_stmt.discriminant.visit_children_with(self);
switch_stmt
.cases
.iter()
.enumerate()
.for_each(|(idx, switch_case)| {
self.check_case(cases_count, idx + 1, switch_case);
switch_case.visit_children_with(self);
});
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/default_param_last.rs | Rust | use swc_common::{errors::HANDLER, Span, Spanned};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
static MESSAGE: &str = "Default parameters should be last.";
pub fn default_param_last(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(DefaultParamLast::new(config))),
}
}
#[derive(Default, Debug)]
struct DefaultParamLast {
expected_reaction: LintRuleReaction,
}
impl DefaultParamLast {
pub fn new(config: &RuleConfig<()>) -> Self {
Self {
expected_reaction: config.get_rule_reaction(),
}
}
fn check<'a, I>(&self, patterns: I)
where
I: IntoIterator<Item = (&'a Pat, Span)>,
{
patterns.into_iter().fold(false, |seen, (pat, span)| {
if matches!(pat, Pat::Assign(..) | Pat::Rest(..)) {
true
} else {
if seen {
self.emit_report(span);
}
seen
}
});
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => handler.struct_span_err(span, MESSAGE).emit(),
LintRuleReaction::Warning => handler.struct_span_warn(span, MESSAGE).emit(),
_ => {}
})
}
}
impl Visit for DefaultParamLast {
noop_visit_type!();
fn visit_function(&mut self, function: &Function) {
self.check(function.params.iter().map(|param| (¶m.pat, param.span)));
function.visit_children_with(self);
}
fn visit_arrow_expr(&mut self, arrow_expr: &ArrowExpr) {
self.check(arrow_expr.params.iter().map(|pat| (pat, pat.span())));
arrow_expr.visit_children_with(self);
}
fn visit_constructor(&mut self, constructor: &Constructor) {
constructor
.params
.iter()
.fold(false, |seen, param| match param {
ParamOrTsParamProp::Param(Param { pat, span, .. }) => match pat {
Pat::Assign(..) | Pat::Rest(..) => true,
_ => {
if seen {
self.emit_report(*span);
}
seen
}
},
ParamOrTsParamProp::TsParamProp(TsParamProp { param, span, .. }) => match param {
TsParamPropParam::Assign(..) => true,
_ => {
if seen {
self.emit_report(*span);
}
seen
}
},
});
constructor.visit_children_with(self);
}
fn visit_class_method(&mut self, class_method: &ClassMethod) {
self.check(
class_method
.function
.params
.iter()
.map(|param| (¶m.pat, param.span)),
);
class_method.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/dot_notation.rs | Rust | use dashmap::DashMap;
use regex::Regex;
use rustc_hash::FxBuildHasher;
use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, sync::Lazy, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
rules::utils::{resolve_string_quote_type, QuotesType},
};
const INVALID_REGEX_MESSAGE: &str = "dotNotation: invalid regex pattern in allowPattern. Check syntax documentation https://docs.rs/regex/latest/regex/#syntax";
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DotNotationConfig {
allow_keywords: Option<bool>,
allow_pattern: Option<String>,
}
pub fn dot_notation(
program: &Program,
config: &RuleConfig<DotNotationConfig>,
) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(DotNotation::new(program.is_module(), config))),
}
}
#[derive(Default, Debug)]
struct DotNotation {
expected_reaction: LintRuleReaction,
allow_keywords: bool,
pattern: Option<String>,
is_module: bool,
}
impl DotNotation {
fn new(is_module: bool, config: &RuleConfig<DotNotationConfig>) -> Self {
let dot_notation_config = config.get_rule_config();
Self {
expected_reaction: config.get_rule_reaction(),
allow_keywords: dot_notation_config.allow_keywords.unwrap_or(true),
is_module,
pattern: dot_notation_config.allow_pattern.clone(),
}
}
fn emit_report(&self, span: Span, quote_type: QuotesType, prop: &str) {
let message = format!(
"[{quote}{prop}{quote}] is better written in dot notation",
prop = prop,
quote = quote_type.get_char()
);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn check(&self, span: Span, quote_type: QuotesType, prop_name: &str) {
if self.allow_keywords
&& (prop_name.is_reserved() || prop_name.is_reserved_in_strict_mode(self.is_module))
{
return;
}
if let Some(pattern) = &self.pattern {
static REGEX_CACHE: Lazy<DashMap<String, Regex, FxBuildHasher>> =
Lazy::new(Default::default);
if !REGEX_CACHE.contains_key(pattern) {
REGEX_CACHE.insert(
pattern.clone(),
Regex::new(pattern).expect(INVALID_REGEX_MESSAGE),
);
}
if REGEX_CACHE.get(pattern).unwrap().is_match(prop_name) {
return;
}
}
self.emit_report(span, quote_type, prop_name);
}
}
impl Visit for DotNotation {
noop_visit_type!();
fn visit_member_prop(&mut self, member: &MemberProp) {
if let MemberProp::Computed(prop) = member {
match &*prop.expr {
Expr::Lit(Lit::Str(lit_str)) => {
let quote_type = resolve_string_quote_type(lit_str).unwrap();
self.check(prop.span, quote_type, &lit_str.value);
}
Expr::Member(member) => {
member.visit_children_with(self);
}
_ => {
prop.visit_with(self);
}
}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/duplicate_bindings.rs | Rust | use std::collections::hash_map::Entry;
use rustc_hash::{FxHashMap, FxHashSet};
use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::rule::{visitor_rule, Rule};
pub fn duplicate_bindings() -> Box<dyn Rule> {
visitor_rule(DuplicateBindings::default())
}
#[derive(Debug, Default, Clone, Copy)]
struct BindingInfo {
span: Span,
ctxt: SyntaxContext,
unique: bool,
is_function: bool,
}
#[derive(Debug, Default)]
struct DuplicateBindings {
bindings: FxHashMap<Id, BindingInfo>,
type_bindings: FxHashSet<Id>,
var_decl_kind: Option<VarDeclKind>,
is_pat_decl: bool,
/// at the top level of script of function, function behaves like var
/// in other scope it behaves like let
lexical_function: bool,
}
impl DuplicateBindings {
/// Add a binding.
fn add(&mut self, id: Atom, info: BindingInfo) {
match self.bindings.entry((id.clone(), info.ctxt)) {
Entry::Occupied(mut prev) => {
if !(info.is_function && prev.get().is_function)
&& (info.unique || prev.get().unique)
{
emit_error(&id, info.span, prev.get().span);
}
// Next span.
if info.unique || !prev.get().unique {
*prev.get_mut() = info
}
}
Entry::Vacant(e) => {
e.insert(info);
}
}
}
/// `const` or `let`
fn is_unique_var_kind(&self) -> bool {
matches!(
self.var_decl_kind,
Some(VarDeclKind::Const) | Some(VarDeclKind::Let)
)
}
fn visit_with_kind<V: VisitWith<Self>>(&mut self, e: &V, kind: Option<VarDeclKind>) {
let old_var_decl_kind = self.var_decl_kind.take();
let old_is_pat_decl = self.is_pat_decl;
self.var_decl_kind = kind;
self.is_pat_decl = true;
e.visit_children_with(self);
self.is_pat_decl = old_is_pat_decl;
self.var_decl_kind = old_var_decl_kind;
}
// this is for the wired case:
// in non strict mode, function in non top level or function scope
// is hoisted, while still error when collides with same level lexical var
fn visit_with_stmt_like<T: VisitWith<Self>, F: Fn(&T) -> Option<Ident>>(
&mut self,
s: &[T],
get_fn_ident: F,
) {
let mut fn_name = FxHashMap::default();
for s in s {
if let Some(ident) = get_fn_ident(s) {
if let Some(prev) = fn_name.get(&ident.sym) {
emit_error(&ident.sym, ident.span, *prev)
} else {
fn_name.insert(ident.sym.clone(), ident.span);
}
}
s.visit_with(self);
}
}
fn visit_with_stmts(&mut self, s: &[Stmt], lexical_function: bool) {
let old = self.lexical_function;
self.lexical_function = lexical_function;
if lexical_function {
self.visit_with_stmt_like(s, |s| match s {
Stmt::Decl(Decl::Fn(FnDecl {
ident, function: f, ..
})) if f.body.is_some() => Some(ident.clone()),
_ => None,
});
} else {
s.visit_children_with(self);
}
self.lexical_function = old;
}
}
impl Visit for DuplicateBindings {
noop_visit_type!();
fn visit_assign_pat_prop(&mut self, p: &AssignPatProp) {
p.visit_children_with(self);
if self.is_pat_decl {
self.add(
p.key.sym.clone(),
BindingInfo {
span: p.key.span,
ctxt: p.key.ctxt,
unique: self.is_unique_var_kind(),
is_function: false,
},
);
}
}
fn visit_function(&mut self, f: &Function) {
// in case any new parts is added
let Function {
body,
params,
decorators,
..
} = f;
params.visit_with(self);
decorators.visit_with(self);
if let Some(body) = body {
self.visit_with_stmts(&body.stmts, false)
}
}
fn visit_arrow_expr(&mut self, a: &ArrowExpr) {
let ArrowExpr { params, body, .. } = a;
params.visit_with(self);
if let BlockStmtOrExpr::BlockStmt(b) = &**body {
self.visit_with_stmts(&b.stmts, false)
}
}
fn visit_static_block(&mut self, c: &StaticBlock) {
self.visit_with_stmts(&c.body.stmts, false)
}
// block stmt and case block
fn visit_stmts(&mut self, b: &[Stmt]) {
self.visit_with_stmts(b, true)
}
fn visit_catch_clause(&mut self, c: &CatchClause) {
self.visit_with_kind(c, Some(VarDeclKind::Var))
}
fn visit_class_decl(&mut self, d: &ClassDecl) {
if d.declare {
return;
}
self.add(
d.ident.sym.clone(),
BindingInfo {
span: d.ident.span,
ctxt: d.ident.ctxt,
unique: true,
is_function: false,
},
);
d.visit_children_with(self);
}
fn visit_expr(&mut self, e: &Expr) {
let old_var_decl_kind = self.var_decl_kind.take();
let old_is_pat_decl = self.is_pat_decl;
self.var_decl_kind = None;
self.is_pat_decl = false;
e.visit_children_with(self);
self.is_pat_decl = old_is_pat_decl;
self.var_decl_kind = old_var_decl_kind;
}
fn visit_fn_decl(&mut self, d: &FnDecl) {
if d.function.body.is_none() || d.declare {
return;
}
self.add(
d.ident.sym.clone(),
BindingInfo {
span: d.ident.span,
ctxt: d.ident.ctxt,
unique: self.lexical_function,
is_function: true,
},
);
d.visit_children_with(self);
}
fn visit_import_decl(&mut self, s: &ImportDecl) {
if s.type_only {
return;
}
s.visit_children_with(self);
}
fn visit_export_default_decl(&mut self, e: &ExportDefaultDecl) {
// export default function foo() {} should be treated as hoisted
match &e.decl {
DefaultDecl::Class(ClassExpr {
ident: Some(ident), ..
}) => self.add(
ident.sym.clone(),
BindingInfo {
span: ident.span,
ctxt: ident.ctxt,
unique: true,
is_function: false,
},
),
DefaultDecl::Fn(FnExpr {
ident: Some(ident),
function: f,
..
}) if f.body.is_some() => self.add(
ident.sym.clone(),
BindingInfo {
span: ident.span,
ctxt: ident.ctxt,
unique: self.lexical_function,
is_function: true,
},
),
_ => {}
}
e.visit_children_with(self);
}
fn visit_import_default_specifier(&mut self, s: &ImportDefaultSpecifier) {
s.visit_children_with(self);
if !self.type_bindings.contains(&s.local.to_id()) {
self.add(
s.local.sym.clone(),
BindingInfo {
span: s.local.span,
ctxt: s.local.ctxt,
unique: true,
is_function: false,
},
);
}
}
fn visit_import_named_specifier(&mut self, s: &ImportNamedSpecifier) {
s.visit_children_with(self);
if !s.is_type_only && !self.type_bindings.contains(&s.local.to_id()) {
self.add(
s.local.sym.clone(),
BindingInfo {
span: s.local.span,
ctxt: s.local.ctxt,
unique: true,
is_function: false,
},
);
}
}
fn visit_import_star_as_specifier(&mut self, s: &ImportStarAsSpecifier) {
s.visit_children_with(self);
if !self.type_bindings.contains(&s.local.to_id()) {
self.add(
s.local.sym.clone(),
BindingInfo {
span: s.local.span,
ctxt: s.local.ctxt,
unique: true,
is_function: false,
},
);
}
}
fn visit_ts_import_equals_decl(&mut self, s: &TsImportEqualsDecl) {
s.visit_children_with(self);
if !s.is_type_only && !self.type_bindings.contains(&s.id.to_id()) {
self.add(
s.id.sym.clone(),
BindingInfo {
span: s.id.span,
ctxt: s.id.ctxt,
unique: true,
is_function: false,
},
);
}
}
fn visit_module(&mut self, m: &Module) {
m.visit_with(&mut TypeCollector {
type_bindings: &mut self.type_bindings,
});
self.lexical_function = true;
self.visit_with_stmt_like(&m.body, |s| match s {
ModuleItem::Stmt(Stmt::Decl(Decl::Fn(FnDecl {
ident, function: f, ..
})))
| ModuleItem::ModuleDecl(
ModuleDecl::ExportDecl(ExportDecl {
decl:
Decl::Fn(FnDecl {
ident, function: f, ..
}),
..
})
| ModuleDecl::ExportDefaultDecl(ExportDefaultDecl {
decl:
DefaultDecl::Fn(FnExpr {
ident: Some(ident),
function: f,
}),
..
}),
) if f.body.is_some() => Some(ident.clone()),
_ => None,
});
}
fn visit_pat(&mut self, p: &Pat) {
p.visit_children_with(self);
if let Pat::Ident(p) = p {
if self.is_pat_decl {
self.add(
p.sym.clone(),
BindingInfo {
span: p.span,
ctxt: p.ctxt,
unique: self.is_unique_var_kind(),
is_function: false,
},
);
}
}
}
fn visit_script(&mut self, s: &Script) {
s.visit_with(&mut TypeCollector {
type_bindings: &mut self.type_bindings,
});
s.body.visit_children_with(self);
}
fn visit_var_decl(&mut self, d: &VarDecl) {
if d.declare {
return;
}
self.visit_with_kind(d, Some(d.kind))
}
fn visit_param(&mut self, p: &Param) {
self.visit_with_kind(p, Some(VarDeclKind::Var))
}
}
struct TypeCollector<'a> {
type_bindings: &'a mut FxHashSet<Id>,
}
impl Visit for TypeCollector<'_> {
fn visit_ts_entity_name(&mut self, n: &TsEntityName) {
n.visit_children_with(self);
if let TsEntityName::Ident(ident) = n {
self.type_bindings.insert(ident.to_id());
}
}
}
#[cold]
fn emit_error(name: &str, span: Span, prev_span: Span) {
HANDLER.with(|handler| {
handler
.struct_span_err(
span,
&format!("the name `{}` is defined multiple times", name),
)
.span_label(prev_span, format!("previous definition of `{}` here", name))
.span_label(span, format!("`{}` redefined here", name))
.emit();
});
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/duplicate_exports.rs | Rust | use std::{collections::hash_map::Entry, mem};
use rustc_hash::FxHashMap;
use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::rule::{visitor_rule, Rule};
pub fn duplicate_exports() -> Box<dyn Rule> {
visitor_rule(DuplicateExports::default())
}
#[derive(Debug, Default)]
struct DuplicateExports {
exports: FxHashMap<Atom, Span>,
export_assign: Option<Span>,
}
impl DuplicateExports {
/// Add an export.
fn add(&mut self, id: &Ident) {
match self.exports.entry(id.sym.clone()) {
Entry::Occupied(mut prev) => {
let name = &id.sym;
HANDLER.with(|handler| {
handler
.struct_span_err(
id.span,
&format!("the name `{}` is exported multiple times", name),
)
.span_label(*prev.get(), "previous exported here")
.span_label(id.span, "exported more than once")
.note("Exported identifiers must be unique")
.emit();
});
// Next span.
*prev.get_mut() = id.span;
}
Entry::Vacant(e) => {
e.insert(id.span);
}
}
self.check_no_coexist();
}
fn add_export_assign(&mut self, span: Span) {
if let Some(prev_span) = self.export_assign {
// TS2300
HANDLER.with(|handler| {
handler
.struct_span_err(span, "multiple `export =` found")
.span_label(prev_span, "previous `export =` declared here")
.emit()
});
}
self.export_assign = Some(span);
self.check_no_coexist();
}
fn check_no_coexist(&self) {
if let Some(span) = self.export_assign {
if !self.exports.is_empty() {
// TS2309
HANDLER.with(|handler| {
handler
.struct_span_err(span, r#"An export assignment cannot be used in a module with other exported elements."#)
.emit()
});
}
}
}
}
impl Visit for DuplicateExports {
noop_visit_type!();
fn visit_export_default_decl(&mut self, d: &ExportDefaultDecl) {
if match &d.decl {
DefaultDecl::Fn(FnExpr { function: f, .. }) if f.body.is_none() => true,
DefaultDecl::TsInterfaceDecl(..) => true,
_ => false,
} {
return;
}
d.visit_children_with(self);
self.add(&Ident::new_no_ctxt("default".into(), d.span));
}
fn visit_export_default_expr(&mut self, d: &ExportDefaultExpr) {
d.visit_children_with(self);
match &*d.expr {
Expr::Fn(FnExpr { function: f, .. }) if f.body.is_none() => return,
_ => {}
}
self.add(&Ident::new_no_ctxt("default".into(), d.span));
}
fn visit_export_default_specifier(&mut self, s: &ExportDefaultSpecifier) {
self.add(&s.exported);
}
fn visit_export_named_specifier(&mut self, s: &ExportNamedSpecifier) {
let exported = match &s.exported {
Some(ModuleExportName::Ident(ident)) => Some(ident),
Some(ModuleExportName::Str(..)) => return,
_ => None,
};
let orig = match &s.orig {
ModuleExportName::Ident(ident) => ident,
ModuleExportName::Str(..) => return,
};
self.add(exported.as_ref().unwrap_or(&orig));
}
fn visit_export_namespace_specifier(&mut self, s: &ExportNamespaceSpecifier) {
match &s.name {
ModuleExportName::Ident(name) => self.add(name),
ModuleExportName::Str(..) => {}
};
}
/// Noop. Just to ensure that the visitor do not recurse into stmt.
fn visit_stmt(&mut self, _: &Stmt) {}
fn visit_ts_export_assignment(&mut self, n: &TsExportAssignment) {
self.add_export_assign(n.span);
}
fn visit_ts_import_equals_decl(&mut self, n: &TsImportEqualsDecl) {
if n.is_export && !n.is_type_only {
self.add(&n.id)
}
}
fn visit_ts_module_decl(&mut self, d: &TsModuleDecl) {
if !d.declare {
let old = mem::take(self);
d.visit_children_with(self);
*self = old;
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/eqeqeq.rs | Rust | use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
enum EqEqEqMode {
Always,
Never,
}
impl Default for EqEqEqMode {
fn default() -> Self {
Self::Always
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct EqeqeqConfig {
#[serde(default)]
mode: EqEqEqMode,
}
pub fn eqeqeq(config: &RuleConfig<EqeqeqConfig>) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(Eqeqeq::new(config))),
}
}
#[derive(Debug, Default)]
struct Eqeqeq {
expected_reaction: LintRuleReaction,
mode: EqEqEqMode,
}
impl Eqeqeq {
fn new(config: &RuleConfig<EqeqeqConfig>) -> Self {
Self {
expected_reaction: config.get_rule_reaction(),
mode: config.get_rule_config().mode,
}
}
fn emit_report(&self, span: Span, actual: &str, expected: &str) {
let message = format!("Use '{}' instead of '{}'", expected, actual);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn check(&self, span: Span, bin_op: &BinaryOp) {
match bin_op {
op!("==") => {
if let EqEqEqMode::Always = self.mode {
self.emit_report(span, "==", "===");
}
}
op!("!=") => {
if let EqEqEqMode::Always = self.mode {
self.emit_report(span, "!=", "!==");
}
}
op!("===") => {
if let EqEqEqMode::Never = self.mode {
self.emit_report(span, "===", "==");
}
}
op!("!==") => {
if let EqEqEqMode::Never = self.mode {
self.emit_report(span, "!==", "!=");
}
}
_ => {}
}
}
}
impl Visit for Eqeqeq {
noop_visit_type!();
fn visit_bin_expr(&mut self, bin_expr: &BinExpr) {
self.check(bin_expr.span, &bin_expr.op);
bin_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/mod.rs | Rust | use std::sync::Arc;
use swc_common::{SourceMap, SyntaxContext};
use swc_ecma_ast::*;
use crate::{config::LintConfig, rule::Rule};
mod const_assign;
mod duplicate_bindings;
mod duplicate_exports;
mod no_dupe_args;
mod utils;
#[cfg(feature = "non_critical_lints")]
#[path = ""]
pub(crate) mod non_critical_lints {
pub mod constructor_super;
pub mod default_case_last;
pub mod default_param_last;
pub mod dot_notation;
pub mod eqeqeq;
pub mod no_alert;
pub mod no_await_in_loop;
pub mod no_bitwise;
pub mod no_compare_neg_zero;
pub mod no_cond_assign;
pub mod no_console;
pub mod no_debugger;
pub mod no_empty_function;
pub mod no_empty_pattern;
pub mod no_loop_func;
pub mod no_new;
pub mod no_new_object;
pub mod no_new_symbol;
pub mod no_obj_calls;
pub mod no_param_reassign;
pub mod no_prototype_builtins;
pub mod no_restricted_syntax;
pub mod no_sparse_arrays;
pub mod no_throw_literal;
pub mod no_use_before_define;
pub mod no_var;
pub mod prefer_const;
pub mod prefer_object_spread;
pub mod prefer_regex_literals;
pub mod quotes;
pub mod radix;
pub mod symbol_description;
pub mod use_is_nan;
pub mod valid_typeof;
pub mod yoda;
}
#[cfg(feature = "non_critical_lints")]
use non_critical_lints::*;
pub struct LintParams<'a> {
pub program: &'a Program,
pub lint_config: &'a LintConfig,
pub unresolved_ctxt: SyntaxContext,
pub top_level_ctxt: SyntaxContext,
pub es_version: EsVersion,
pub source_map: Arc<SourceMap>,
}
pub fn all(lint_params: LintParams) -> Vec<Box<dyn Rule>> {
let mut rules = vec![
const_assign::const_assign(),
duplicate_bindings::duplicate_bindings(),
duplicate_exports::duplicate_exports(),
no_dupe_args::no_dupe_args(),
];
#[cfg(feature = "non_critical_lints")]
{
let LintParams {
program,
lint_config,
unresolved_ctxt,
top_level_ctxt: _,
es_version,
source_map,
} = lint_params;
rules.extend(no_use_before_define::no_use_before_define(
&lint_params.lint_config.no_use_before_define,
));
rules.extend(no_console::no_console(
&lint_config.no_console,
unresolved_ctxt,
));
rules.extend(no_alert::no_alert(
&lint_config.no_alert,
unresolved_ctxt,
es_version,
));
rules.extend(no_debugger::no_debugger(&lint_config.no_debugger));
rules.extend(quotes::quotes(&lint_config.quotes));
rules.extend(prefer_regex_literals::prefer_regex_literals(
&lint_config.prefer_regex_literals,
unresolved_ctxt,
es_version,
));
rules.extend(dot_notation::dot_notation(
program,
&lint_config.dot_notation,
));
rules.extend(no_empty_function::no_empty_function(
&source_map,
&lint_config.no_empty_function,
));
rules.extend(no_empty_pattern::no_empty_pattern(
&lint_config.no_empty_pattern,
));
rules.extend(eqeqeq::eqeqeq(&lint_config.eqeqeq));
rules.extend(no_loop_func::no_loop_func(&lint_config.no_loop_func));
rules.extend(no_new::no_new(&lint_config.no_new));
rules.extend(no_restricted_syntax::no_restricted_syntax(
&lint_config.no_restricted_syntax,
));
rules.extend(radix::radix(unresolved_ctxt, &lint_config.radix));
rules.extend(no_bitwise::no_bitwise(&lint_config.no_bitwise));
rules.extend(default_param_last::default_param_last(
&lint_config.default_param_last,
));
rules.extend(yoda::yoda(&lint_config.yoda));
rules.extend(no_new_symbol::no_new_symbol(
unresolved_ctxt,
&lint_config.no_new_symbol,
));
rules.extend(use_is_nan::use_is_nan(
unresolved_ctxt,
&lint_config.use_isnan,
));
rules.extend(valid_typeof::valid_typeof(&lint_config.valid_typeof));
rules.extend(no_param_reassign::no_param_reassign(
&lint_config.no_param_reassign,
));
rules.extend(symbol_description::symbol_description(
unresolved_ctxt,
&lint_config.symbol_description,
));
rules.extend(no_obj_calls::no_obj_calls(
unresolved_ctxt,
&lint_config.no_obj_calls,
));
rules.extend(no_throw_literal::no_throw_literal(
&lint_config.no_throw_literal,
));
rules.extend(no_var::no_var(&lint_config.no_var));
rules.extend(prefer_const::prefer_const(&lint_config.prefer_const));
rules.extend(no_compare_neg_zero::no_compare_neg_zero(
&lint_config.no_compare_neg_zero,
));
rules.extend(constructor_super::constructor_super(
&lint_config.constructor_super,
));
rules.extend(no_sparse_arrays::no_sparse_arrays(
&lint_config.no_sparse_arrays,
));
rules.extend(default_case_last::default_case_last(
&lint_config.default_case_last,
));
rules.extend(no_await_in_loop::no_await_in_loop(
&lint_config.no_await_in_loop,
));
rules.extend(no_cond_assign::no_cond_assign(&lint_config.no_cond_assign));
rules.extend(no_prototype_builtins::no_prototype_builtins(
&lint_config.no_prototype_builtins,
));
rules.extend(no_new_object::no_new_object(
unresolved_ctxt,
&lint_config.no_new_object,
));
rules.extend(prefer_object_spread::prefer_object_spread(
&lint_config.prefer_object_spread,
unresolved_ctxt,
es_version,
));
}
rules
}
pub fn lint_pass<R>(r: R) -> impl Pass
where
R: Rule,
{
LintPass(r)
}
struct LintPass<R>(R)
where
R: Rule;
impl<R> Pass for LintPass<R>
where
R: Rule,
{
fn process(&mut self, program: &mut Program) {
match program {
Program::Module(m) => self.0.lint_module(m),
Program::Script(s) => self.0.lint_script(s),
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_alert.rs | Rust | use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const FN_NAMES: &[&str] = &["alert", "confirm", "prompt"];
const GLOBAL_THIS_PROP: &str = "globalThis";
const OBJ_NAMES: &[&str] = &["window", GLOBAL_THIS_PROP];
pub fn no_alert(
config: &RuleConfig<()>,
unresolved_ctxt: SyntaxContext,
es_version: EsVersion,
) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoAlert::new(
rule_reaction,
unresolved_ctxt,
es_version,
))),
}
}
#[derive(Debug, Default)]
struct NoAlert {
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
pass_call_on_global_this: bool,
inside_callee: bool,
classes_depth: usize,
objects_depth: usize,
arrow_fns_depth: usize,
obj: Option<Atom>,
prop: Option<Atom>,
}
impl NoAlert {
fn new(
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
es_version: EsVersion,
) -> Self {
Self {
expected_reaction,
unresolved_ctxt,
pass_call_on_global_this: es_version < EsVersion::Es2020,
inside_callee: false,
classes_depth: 0,
objects_depth: 0,
arrow_fns_depth: 0,
obj: None,
prop: None,
}
}
fn emit_report(&self, span: Span, fn_name: &str) {
let message = format!("Unexpected {}", fn_name);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn is_inside_class(&self) -> bool {
self.classes_depth > 0
}
fn is_inside_object(&self) -> bool {
self.objects_depth > 0
}
fn is_inside_arrow_fn(&self) -> bool {
self.arrow_fns_depth > 0
}
fn check(&self, call_span: Span, obj: &Option<Atom>, prop: &Atom) {
if let Some(obj) = obj {
let obj_name: &str = obj;
if self.pass_call_on_global_this && obj_name == GLOBAL_THIS_PROP {
return;
}
if !OBJ_NAMES.contains(&obj_name) {
return;
}
}
let fn_name: &str = prop;
if FN_NAMES.contains(&fn_name) {
self.emit_report(call_span, fn_name);
}
}
fn is_satisfying_indent(&self, ident: &Ident) -> bool {
if ident.ctxt != self.unresolved_ctxt {
return false;
}
true
}
fn handle_member_prop(&mut self, prop: &MemberProp) {
match prop {
MemberProp::Ident(IdentName { sym, .. }) => {
self.prop = Some(sym.clone());
}
MemberProp::Computed(comp) => {
if let Expr::Lit(Lit::Str(Str { value, .. })) = comp.expr.as_ref() {
self.prop = Some(value.clone());
}
}
_ => {}
}
}
fn handle_member_expr(&mut self, member_expr: &MemberExpr) {
let MemberExpr { obj, prop, .. } = member_expr;
match obj.as_ref() {
Expr::Ident(obj) => {
if !self.is_satisfying_indent(obj) {
return;
}
self.obj = Some(obj.sym.clone());
self.handle_member_prop(prop);
}
Expr::This(_) => {
let inside_arrow_fn = self.is_inside_arrow_fn();
let inside_class = self.is_inside_class();
if inside_arrow_fn && inside_class {
return;
}
if !inside_arrow_fn && (inside_class || self.is_inside_object()) {
return;
}
self.handle_member_prop(prop);
}
_ => {}
}
}
fn handle_callee(&mut self, expr: &Expr) {
match expr {
Expr::Ident(ident) => {
if self.is_satisfying_indent(ident) {
self.prop = Some(ident.sym.clone());
}
}
Expr::Member(member_expr) => self.handle_member_expr(member_expr),
Expr::OptChain(OptChainExpr { base, .. }) if base.is_member() => {
let member_expr = base.as_member().unwrap();
self.handle_member_expr(member_expr);
}
Expr::OptChain(opt_chain) => {
opt_chain.visit_children_with(self);
}
Expr::Paren(paren) => {
paren.visit_children_with(self);
}
_ => {}
}
}
fn handle_call(&mut self, call_expr: &CallExpr) {
if let Some(callee) = call_expr.callee.as_expr() {
self.inside_callee = true;
callee.visit_with(self);
self.inside_callee = false;
}
if let Some(prop) = &self.prop {
self.check(call_expr.span, &self.obj, prop);
self.obj = None;
self.prop = None;
}
}
}
impl Visit for NoAlert {
noop_visit_type!();
fn visit_expr(&mut self, expr: &Expr) {
if self.inside_callee {
self.handle_callee(expr);
} else {
if let Expr::Call(call_expr) = expr {
self.handle_call(call_expr);
}
expr.visit_children_with(self);
}
}
fn visit_class(&mut self, class: &Class) {
self.classes_depth += 1;
class.visit_children_with(self);
self.classes_depth -= 1;
}
fn visit_object_lit(&mut self, lit_obj: &ObjectLit) {
self.objects_depth += 1;
lit_obj.visit_children_with(self);
self.objects_depth -= 1;
}
fn visit_arrow_expr(&mut self, arrow_fn: &ArrowExpr) {
self.arrow_fns_depth += 1;
arrow_fn.visit_children_with(self);
self.arrow_fns_depth -= 1;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_await_in_loop.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Unexpected `await` inside a loop";
pub fn no_await_in_loop(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoAwaitInLoop::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoAwaitInLoop {
expected_reaction: LintRuleReaction,
await_restricted: bool,
}
impl NoAwaitInLoop {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self {
expected_reaction,
await_restricted: false,
}
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
}
impl Visit for NoAwaitInLoop {
fn visit_for_stmt(&mut self, for_stmt: &ForStmt) {
let prev_await_restriction = self.await_restricted;
for_stmt.init.visit_children_with(self);
self.await_restricted = true;
for_stmt.test.visit_children_with(self);
for_stmt.update.visit_children_with(self);
for_stmt.body.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_do_while_stmt(&mut self, do_while_stmt: &DoWhileStmt) {
let prev_await_restriction = self.await_restricted;
self.await_restricted = true;
do_while_stmt.body.visit_children_with(self);
do_while_stmt.test.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_for_in_stmt(&mut self, for_in_stmt: &ForInStmt) {
let prev_await_restriction = self.await_restricted;
for_in_stmt.left.visit_children_with(self);
for_in_stmt.right.visit_children_with(self);
self.await_restricted = true;
for_in_stmt.body.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_for_of_stmt(&mut self, for_of_stmt: &ForOfStmt) {
let prev_await_restriction = self.await_restricted;
if self.await_restricted {
self.emit_report(for_of_stmt.span);
}
for_of_stmt.left.visit_children_with(self);
for_of_stmt.right.visit_children_with(self);
self.await_restricted = !for_of_stmt.is_await;
for_of_stmt.body.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_while_stmt(&mut self, while_stmt: &WhileStmt) {
let prev_await_restriction = self.await_restricted;
self.await_restricted = true;
while_stmt.test.visit_children_with(self);
while_stmt.body.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_await_expr(&mut self, await_expr: &AwaitExpr) {
if self.await_restricted {
self.emit_report(await_expr.span);
}
await_expr.visit_children_with(self);
}
fn visit_function(&mut self, function: &Function) {
let prev_await_restriction = self.await_restricted;
self.await_restricted = false;
function.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
fn visit_arrow_expr(&mut self, arrow_expr: &ArrowExpr) {
let prev_await_restriction = self.await_restricted;
self.await_restricted = false;
arrow_expr.visit_children_with(self);
self.await_restricted = prev_await_restriction;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_bitwise.rs | Rust | use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NoBitwiseConfig {
allow: Option<Vec<String>>,
int_32_hint: Option<bool>,
}
pub fn no_bitwise(config: &RuleConfig<NoBitwiseConfig>) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoBitwise::new(config))),
}
}
#[derive(Debug, Default)]
struct NoBitwise {
expected_reaction: LintRuleReaction,
allow_binary_ops: Option<FxHashSet<BinaryOp>>,
allow_assign_ops: Option<FxHashSet<AssignOp>>,
allow_bitwise_not: bool,
allow_int_32_hint: bool,
}
impl NoBitwise {
fn new(config: &RuleConfig<NoBitwiseConfig>) -> Self {
let rule_config = config.get_rule_config();
let mut allow_binary_ops: Option<FxHashSet<BinaryOp>> = None;
let mut allow_assign_ops: Option<FxHashSet<AssignOp>> = None;
let mut allow_bitwise_not: bool = false;
if let Some(allow) = &rule_config.allow {
allow.iter().for_each(|op| {
let op = op.as_str();
match op {
"~" => {
allow_bitwise_not = true;
}
"&" | "^" | "<<" | ">>" | ">>>" => {
if allow_binary_ops.is_none() {
allow_binary_ops = Some(Default::default());
}
let allow_binary_ops = allow_binary_ops.as_mut().unwrap();
match op {
"&" => allow_binary_ops.insert(op!("&")),
"^" => allow_binary_ops.insert(op!("^")),
"<<" => allow_binary_ops.insert(op!("<<")),
">>" => allow_binary_ops.insert(op!(">>")),
">>>" => allow_binary_ops.insert(op!(">>>")),
_ => false,
};
}
"|=" | "&=" | "<<=" | ">>=" | ">>>=" | "^=" => {
if allow_assign_ops.is_none() {
allow_assign_ops = Some(Default::default());
}
let allow_assign_ops = allow_assign_ops.as_mut().unwrap();
match op {
"|=" => allow_assign_ops.insert(op!("|=")),
"&=" => allow_assign_ops.insert(op!("&=")),
"<<=" => allow_assign_ops.insert(op!("<<=")),
">>=" => allow_assign_ops.insert(op!(">>=")),
">>>=" => allow_assign_ops.insert(op!(">>>=")),
"^=" => allow_assign_ops.insert(op!("^=")),
_ => false,
};
}
_ => {}
};
});
}
Self {
expected_reaction: config.get_rule_reaction(),
allow_binary_ops,
allow_assign_ops,
allow_bitwise_not,
allow_int_32_hint: rule_config.int_32_hint.unwrap_or(false),
}
}
fn emit_report(&self, span: Span, op: &str) {
let message = format!("Unexpected use of '{}'", op);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
}
impl Visit for NoBitwise {
noop_visit_type!();
fn visit_bin_expr(&mut self, bin_expr: &BinExpr) {
if let Some(allow) = &self.allow_binary_ops {
if allow.contains(&bin_expr.op) {
return;
}
}
match bin_expr.op {
op!("&") | op!("^") | op!("<<") | op!(">>") | op!(">>>") | op!(">>>") => {
self.emit_report(bin_expr.span, bin_expr.op.as_str());
}
op!("|") => {
if self.allow_int_32_hint {
if let Expr::Lit(Lit::Num(Number { value, .. })) = bin_expr.right.as_ref() {
if *value == 0f64 {
return;
}
}
}
self.emit_report(bin_expr.span, bin_expr.op.as_str());
}
_ => {}
}
bin_expr.visit_children_with(self);
}
fn visit_unary_expr(&mut self, unary_expr: &UnaryExpr) {
if let op!("~") = &unary_expr.op {
if self.allow_bitwise_not {
return;
}
self.emit_report(unary_expr.span, unary_expr.op.as_str());
}
unary_expr.visit_children_with(self);
}
fn visit_assign_expr(&mut self, assign_expr: &AssignExpr) {
if let Some(allow) = &self.allow_assign_ops {
if allow.contains(&assign_expr.op) {
return;
}
}
match assign_expr.op {
op!("|=") | op!("&=") | op!("<<=") | op!(">>=") | op!(">>>=") | op!("^=") => {
self.emit_report(assign_expr.span, assign_expr.op.as_str());
}
_ => {}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_compare_neg_zero.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
pub fn no_compare_neg_zero(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoCompareNegZero::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoCompareNegZero {
expected_reaction: LintRuleReaction,
}
impl NoCompareNegZero {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn emit_report(&self, op: BinaryOp, span: Span) {
let message = format!("Do not use the '{}' to compare against -0", op);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn is_neg_zero(&self, expr: &Expr) -> bool {
if let Expr::Unary(UnaryExpr {
op: op!(unary, "-"),
arg,
..
}) = expr.unwrap_seqs_and_parens()
{
if let Expr::Lit(Lit::Num(Number { value, .. })) = arg.unwrap_seqs_and_parens() {
return *value == 0f64;
}
}
false
}
fn check(&self, bin_expr: &BinExpr) {
if let op!("===")
| op!("==")
| op!("!==")
| op!("!=")
| op!("<")
| op!("<=")
| op!(">")
| op!(">=") = bin_expr.op
{
if self.is_neg_zero(bin_expr.left.as_ref()) || self.is_neg_zero(bin_expr.right.as_ref())
{
self.emit_report(bin_expr.op, bin_expr.span);
}
}
}
}
impl Visit for NoCompareNegZero {
fn visit_bin_expr(&mut self, bin_expr: &BinExpr) {
self.check(bin_expr);
bin_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_cond_assign.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Expected a conditional expression and instead saw an assignment";
pub fn no_cond_assign(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoCondAssign::new(config))),
}
}
#[derive(Debug, Default)]
struct NoCondAssign {
expected_reaction: LintRuleReaction,
inside_test_clause: bool,
}
impl NoCondAssign {
fn new(config: &RuleConfig<()>) -> Self {
Self {
expected_reaction: config.get_rule_reaction(),
inside_test_clause: false,
}
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
fn check(&self, span: Span) {
if self.inside_test_clause {
self.emit_report(span);
}
}
}
impl Visit for NoCondAssign {
fn visit_if_stmt(&mut self, if_stmt: &IfStmt) {
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = true;
if_stmt.test.visit_with(self);
self.inside_test_clause = prev_inside_test_clause;
if_stmt.cons.visit_with(self);
if_stmt.alt.visit_with(self);
}
fn visit_cond_expr(&mut self, cond_expr: &CondExpr) {
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = true;
cond_expr.test.visit_with(self);
self.inside_test_clause = prev_inside_test_clause;
cond_expr.cons.visit_with(self);
cond_expr.alt.visit_with(self);
}
fn visit_for_stmt(&mut self, for_stmt: &ForStmt) {
for_stmt.init.visit_with(self);
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = true;
for_stmt.test.visit_with(self);
self.inside_test_clause = prev_inside_test_clause;
for_stmt.update.visit_with(self);
for_stmt.body.visit_with(self);
}
fn visit_while_stmt(&mut self, while_stmt: &WhileStmt) {
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = true;
while_stmt.test.visit_with(self);
self.inside_test_clause = prev_inside_test_clause;
while_stmt.body.visit_with(self);
}
fn visit_do_while_stmt(&mut self, do_while_stmt: &DoWhileStmt) {
do_while_stmt.body.visit_with(self);
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = true;
do_while_stmt.test.visit_with(self);
self.inside_test_clause = prev_inside_test_clause;
}
fn visit_arrow_expr(&mut self, arrow_expr: &ArrowExpr) {
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = false;
arrow_expr.visit_children_with(self);
self.inside_test_clause = prev_inside_test_clause;
}
fn visit_function(&mut self, function: &Function) {
let prev_inside_test_clause = self.inside_test_clause;
self.inside_test_clause = false;
function.visit_children_with(self);
self.inside_test_clause = prev_inside_test_clause;
}
fn visit_assign_expr(&mut self, assign_expr: &AssignExpr) {
self.check(assign_expr.span);
assign_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_console.rs | Rust | use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Unexpected console statement";
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct NoConsoleConfig {
allow: Option<FxHashSet<String>>,
}
pub fn no_console(
config: &RuleConfig<NoConsoleConfig>,
unresolved_ctxt: SyntaxContext,
) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoConsole::new(config, unresolved_ctxt))),
}
}
#[derive(Debug, Default)]
struct NoConsole {
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
allow: Option<FxHashSet<Atom>>,
}
impl NoConsole {
fn new(config: &RuleConfig<NoConsoleConfig>, unresolved_ctxt: SyntaxContext) -> Self {
Self {
expected_reaction: config.get_rule_reaction(),
allow: config.get_rule_config().allow.as_ref().map(|method_names| {
method_names
.iter()
.map(|method_name| Atom::from(method_name.as_str()))
.collect()
}),
unresolved_ctxt,
}
}
fn check(&self, span: Span, ident: &Ident, method: &Atom) {
if &*ident.sym == "console" && ident.ctxt == self.unresolved_ctxt {
if let Some(allow) = &self.allow {
if allow.contains(method) {
return;
}
}
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
}
}
impl Visit for NoConsole {
noop_visit_type!();
fn visit_member_expr(&mut self, member: &MemberExpr) {
if let Expr::Ident(ident) = member.obj.as_ref() {
match &member.prop {
MemberProp::Ident(IdentName { sym, .. }) => {
self.check(member.span, ident, sym);
}
MemberProp::Computed(ComputedPropName { expr, .. }) => {
if let Expr::Lit(Lit::Str(Str { value, .. })) = expr.as_ref() {
self.check(member.span, ident, value);
} else {
expr.visit_with(self);
}
}
_ => {}
}
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_debugger.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Unexpected 'debugger' statement";
pub fn no_debugger(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoDebugger::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoDebugger {
expected_reaction: LintRuleReaction,
}
impl NoDebugger {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn check(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
}
impl Visit for NoDebugger {
noop_visit_type!();
fn visit_debugger_stmt(&mut self, debugger_stmt: &DebuggerStmt) {
self.check(debugger_stmt.span);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_dupe_args.rs | Rust | use std::collections::hash_map::Entry;
use rustc_hash::FxHashMap;
use swc_common::errors::HANDLER;
use swc_ecma_ast::*;
use swc_ecma_utils::{
for_each_binding_ident,
parallel::{cpu_count, Parallel, ParallelExt},
};
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::rule::{visitor_rule, Rule};
pub fn no_dupe_args() -> Box<dyn Rule> {
visitor_rule(NoDupeArgs)
}
#[derive(Debug, Default)]
struct NoDupeArgs;
impl Parallel for NoDupeArgs {
fn create(&self) -> Self {
Self
}
fn merge(&mut self, _: Self) {}
}
#[cold]
fn error(first: &BindingIdent, second: &BindingIdent) {
HANDLER.with(|handler| {
handler
.struct_span_err(
second.span,
&format!(
"the name `{}` is bound more than once in this parameter list",
first.sym
),
)
.span_label(first.span, "previous definition here".to_string())
.span_label(second.span, "used as parameter more than once".to_string())
.emit();
});
}
/// This has time complexity of O(n^2), but it's fine as the number of paramters
/// is usually small.
macro_rules! check {
($node:expr) => {{
// This vector allocates only if there are duplicate parameters.
// This is used to handle the case where the same parameter is used 3 or more
// times.
let mut done = Vec::new();
let mut hash_mode = false;
let mut i1 = 0;
for_each_binding_ident($node, |id1| {
i1 += 1;
if !hash_mode {
let mut i2 = 0;
for_each_binding_ident($node, |id2| {
i2 += 1;
if hash_mode {
return;
} else if i2 >= 100 {
// While iterating for the first `id1`, we detect that there are more than
// 100 identifiers. We switch to hash mode.
hash_mode = true;
}
if i1 >= i2 || done.contains(&i1) {
return;
}
if id1.ctxt == id2.ctxt && id1.sym == id2.sym {
done.push(i1);
error(id1, id2);
}
});
}
});
if hash_mode {
let mut map = FxHashMap::default();
for_each_binding_ident($node, |id| {
//
match map.entry((id.sym.clone(), id.ctxt)) {
Entry::Occupied(v) => {
error(v.get(), id);
}
Entry::Vacant(v) => {
v.insert(id.clone());
}
}
});
}
}};
}
impl Visit for NoDupeArgs {
noop_visit_type!();
fn visit_arrow_expr(&mut self, f: &ArrowExpr) {
check!(&f.params);
f.visit_children_with(self);
}
fn visit_class_members(&mut self, members: &[ClassMember]) {
self.maybe_par(cpu_count(), members, |v, member| {
member.visit_with(v);
});
}
fn visit_constructor(&mut self, f: &Constructor) {
check!(&f.params);
f.visit_children_with(self);
}
fn visit_expr_or_spreads(&mut self, n: &[ExprOrSpread]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_exprs(&mut self, exprs: &[Box<Expr>]) {
self.maybe_par(cpu_count(), exprs, |v, expr| {
expr.visit_with(v);
});
}
fn visit_function(&mut self, f: &Function) {
check!(&f.params);
f.visit_children_with(self);
}
fn visit_module_items(&mut self, items: &[ModuleItem]) {
self.maybe_par(cpu_count(), items, |v, item| {
item.visit_with(v);
});
}
fn visit_opt_vec_expr_or_spreads(&mut self, n: &[Option<ExprOrSpread>]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_prop_or_spreads(&mut self, n: &[PropOrSpread]) {
self.maybe_par(cpu_count(), n, |v, n| {
n.visit_with(v);
});
}
fn visit_stmts(&mut self, stmts: &[Stmt]) {
self.maybe_par(cpu_count(), stmts, |v, stmt| {
stmt.visit_with(v);
});
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_empty_function.rs | Rust | use std::{
fmt::{self, Debug},
sync::Arc,
};
use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, SourceMap, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
#[derive(Debug, Serialize, Clone, Copy, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "camelCase")]
enum FunctionModifiers {
All,
Simple,
Generator,
Getter,
Setter,
Async,
Private,
Protected,
Static,
Public,
}
impl FunctionModifiers {
fn get_human_readable(&self) -> &'static str {
match self {
FunctionModifiers::Generator => "generator",
FunctionModifiers::Getter => "getter",
FunctionModifiers::Setter => "setter",
FunctionModifiers::Async => "async",
FunctionModifiers::Private => "private",
FunctionModifiers::Protected => "protected",
FunctionModifiers::Static => "static",
FunctionModifiers::Public => "public",
FunctionModifiers::Simple | FunctionModifiers::All => {
unreachable!();
}
}
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NoEmptyFunctionConfig {
consider_comments: Option<bool>,
functions: Option<FxHashSet<FunctionModifiers>>,
arrow_functions: Option<FxHashSet<FunctionModifiers>>,
methods: Option<FxHashSet<FunctionModifiers>>,
constructors: Option<FxHashSet<FunctionModifiers>>,
}
pub fn no_empty_function(
source_map: &Arc<SourceMap>,
config: &RuleConfig<NoEmptyFunctionConfig>,
) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoEmptyFunction::new(
source_map.clone(),
config,
))),
}
}
#[derive(Default)]
struct NoEmptyFunction {
source_map: Arc<SourceMap>,
expected_reaction: LintRuleReaction,
consider_comments: bool,
functions: Option<FxHashSet<FunctionModifiers>>,
arrow_functions: Option<FxHashSet<FunctionModifiers>>,
methods: Option<FxHashSet<FunctionModifiers>>,
constructors: Option<FxHashSet<FunctionModifiers>>,
}
impl Debug for NoEmptyFunction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("NoEmptyFunction")
.field("expected_reaction", &self.expected_reaction)
.field("consider_comments", &self.consider_comments)
.field("functions", &self.functions)
.field("arrow_functions", &self.arrow_functions)
.field("methods", &self.methods)
.field("check_constructor", &self.constructors)
.finish()
}
}
#[derive(Default)]
struct FunctionMarkers {
is_async: bool,
is_generator: bool,
is_getter: bool,
is_setter: bool,
is_private: bool,
is_static: bool,
is_protected: bool,
is_public: bool,
}
fn get_modifiers<const MAX_ATTRS: usize>(
FunctionMarkers {
is_async,
is_generator,
is_getter,
is_setter,
is_private,
is_static,
is_protected,
is_public,
}: FunctionMarkers,
) -> (usize, [FunctionModifiers; MAX_ATTRS]) {
let mut modifiers: [FunctionModifiers; MAX_ATTRS] = [FunctionModifiers::Simple; MAX_ATTRS];
let mut idx: usize = 0;
if is_async {
modifiers[idx] = FunctionModifiers::Async;
idx += 1;
}
if is_generator {
modifiers[idx] = FunctionModifiers::Generator;
idx += 1;
}
if is_private {
modifiers[idx] = FunctionModifiers::Private;
idx += 1;
}
if is_static {
modifiers[idx] = FunctionModifiers::Static;
idx += 1;
}
if is_protected {
modifiers[idx] = FunctionModifiers::Protected;
idx += 1;
}
if is_public {
modifiers[idx] = FunctionModifiers::Public;
idx += 1;
}
if is_getter {
modifiers[idx] = FunctionModifiers::Getter;
idx += 1;
}
if is_setter {
modifiers[idx] = FunctionModifiers::Setter;
idx += 1;
}
(idx, modifiers)
}
impl NoEmptyFunction {
fn new(source_map: Arc<SourceMap>, config: &RuleConfig<NoEmptyFunctionConfig>) -> Self {
let no_empty_function_config = config.get_rule_config();
Self {
source_map,
expected_reaction: config.get_rule_reaction(),
consider_comments: no_empty_function_config.consider_comments.unwrap_or(true),
functions: no_empty_function_config.functions.clone(),
arrow_functions: no_empty_function_config.arrow_functions.clone(),
methods: no_empty_function_config.methods.clone(),
constructors: no_empty_function_config.constructors.clone(),
}
}
fn emit_report(
&self,
span: Span,
target_type: &str,
function_type: Option<&FunctionModifiers>,
) {
let message = if let Some(fn_modifier) = function_type {
format!(
"Unexpected empty {} {} pattern",
fn_modifier.get_human_readable(),
target_type
)
} else {
format!("Unexpected empty {} pattern", target_type)
};
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn check(
&self,
span: Span,
target_type: &str,
allowed: Option<&FxHashSet<FunctionModifiers>>,
modifiers: &[FunctionModifiers],
) {
if let Some(allowed) = allowed {
if allowed.contains(&FunctionModifiers::All) {
return;
}
if modifiers.is_empty() && allowed.contains(&FunctionModifiers::Simple) {
return;
}
if modifiers.iter().any(|marker| allowed.contains(marker)) {
return;
}
}
self.emit_report(span, target_type, modifiers.last());
}
fn has_comment_in_body(&self, span: &Span) -> bool {
let fn_lo = self.source_map.lookup_byte_offset(span.lo);
let fn_hi = self.source_map.lookup_byte_offset(span.hi);
let lo_idx = fn_lo.pos.0 as usize;
let hi_idx = fn_hi.pos.0 as usize;
let body = &fn_lo.sf.src.as_str()[lo_idx..hi_idx];
let mut prev_char: char = '\0';
for ch in body.chars() {
match (prev_char, ch) {
('/', '/') => {
return true;
}
('/', '*') => {
return true;
}
_ => {}
}
prev_char = ch;
}
false
}
}
impl Visit for NoEmptyFunction {
noop_visit_type!();
fn visit_function(&mut self, function: &Function) {
if let Some(BlockStmt { stmts, span, .. }) = &function.body {
if self.consider_comments && self.has_comment_in_body(span) {
return;
}
if stmts.is_empty() {
let (count, modifiers) = get_modifiers::<2>(FunctionMarkers {
is_async: function.is_async,
is_generator: function.is_generator,
..Default::default()
});
self.check(
function.span,
"function",
self.functions.as_ref(),
&modifiers[0..count],
);
return;
}
function.visit_children_with(self);
}
}
fn visit_arrow_expr(&mut self, function: &ArrowExpr) {
if let BlockStmtOrExpr::BlockStmt(BlockStmt { stmts, span, .. }) = &*function.body {
if self.consider_comments && self.has_comment_in_body(span) {
return;
}
if stmts.is_empty() {
let (count, modifiers) = get_modifiers::<2>(FunctionMarkers {
is_async: function.is_async,
is_generator: function.is_generator,
..Default::default()
});
self.check(
function.span,
"arrow function",
self.arrow_functions.as_ref(),
&modifiers[0..count],
);
return;
}
}
function.visit_children_with(self);
}
fn visit_constructor(&mut self, constructor: &Constructor) {
if let Some(BlockStmt { span, stmts, .. }) = &constructor.body {
if self.consider_comments && self.has_comment_in_body(span) {
return;
}
if stmts.is_empty() {
let (count, modifiers) = get_modifiers::<1>(FunctionMarkers {
is_private: constructor.accessibility.eq(&Some(Accessibility::Private)),
is_public: constructor.accessibility.eq(&Some(Accessibility::Public)),
is_protected: constructor
.accessibility
.eq(&Some(Accessibility::Protected)),
..Default::default()
});
self.check(
constructor.span,
"constructor",
self.constructors.as_ref(),
&modifiers[0..count],
);
}
}
constructor.visit_children_with(self);
}
fn visit_class_method(&mut self, class_method: &ClassMethod) {
let method = &class_method.function;
if let Some(BlockStmt { span, stmts, .. }) = &method.body {
if self.consider_comments && self.has_comment_in_body(span) {
return;
}
if stmts.is_empty() {
let (count, modifiers) = get_modifiers::<3>(FunctionMarkers {
is_async: method.is_async,
is_generator: method.is_generator,
is_getter: class_method.kind.eq(&MethodKind::Getter),
is_setter: class_method.kind.eq(&MethodKind::Setter),
is_private: class_method.accessibility.eq(&Some(Accessibility::Private)),
is_public: class_method.accessibility.eq(&Some(Accessibility::Public)),
is_protected: class_method
.accessibility
.eq(&Some(Accessibility::Protected)),
..Default::default()
});
self.check(
class_method.span,
"method",
self.methods.as_ref(),
&modifiers[0..count],
);
return;
}
class_method.visit_children_with(self);
}
}
fn visit_getter_prop(&mut self, getter_prop: &GetterProp) {
if self.consider_comments && self.has_comment_in_body(&getter_prop.span) {
return;
}
if let Some(BlockStmt { stmts, .. }) = &getter_prop.body {
if stmts.is_empty() {
self.check(
getter_prop.span,
"method",
self.methods.as_ref(),
&[FunctionModifiers::Getter],
);
}
}
getter_prop.visit_children_with(self);
}
fn visit_setter_prop(&mut self, setter_prop: &SetterProp) {
if self.consider_comments && self.has_comment_in_body(&setter_prop.span) {
return;
}
if let Some(BlockStmt { stmts, .. }) = &setter_prop.body {
if stmts.is_empty() {
self.check(
setter_prop.span,
"method",
self.methods.as_ref(),
&[FunctionModifiers::Setter],
);
}
}
setter_prop.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_empty_pattern.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
pub fn no_empty_pattern(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoEmptyPattern::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoEmptyPattern {
expected_reaction: LintRuleReaction,
}
impl NoEmptyPattern {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn emit_report(&self, span: Span, format_type: &str) {
let message = format!("Unexpected empty {} pattern", format_type);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
}
impl Visit for NoEmptyPattern {
fn visit_array_pat(&mut self, array_pat: &ArrayPat) {
if array_pat.elems.is_empty() {
self.emit_report(array_pat.span, "array");
}
array_pat.visit_children_with(self);
}
fn visit_object_pat(&mut self, object_pat: &ObjectPat) {
if object_pat.props.is_empty() {
self.emit_report(object_pat.span, "object");
}
object_pat.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_loop_func.rs | Rust | use rustc_hash::{FxHashMap, FxHashSet};
use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span, DUMMY_SP};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
pub fn no_loop_func(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoLoopFunc::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoLoopFunc {
expected_reaction: LintRuleReaction,
current_var_kind: Option<VarDeclKind>,
loop_depth: usize,
function_depth: usize,
inside_loop_decl: bool,
scopes: Vec<Span>,
scoped_unsafe_vars: FxHashMap<Span, FxHashSet<Id>>,
current_fn_unsafe_vars: FxHashSet<Atom>,
}
impl NoLoopFunc {
fn new(expected_reaction: LintRuleReaction) -> Self {
let root_scope = DUMMY_SP;
let mut scoped_vars: FxHashMap<Span, FxHashSet<Id>> = Default::default();
scoped_vars.insert(root_scope, Default::default());
Self {
expected_reaction,
current_var_kind: None,
loop_depth: 0,
function_depth: 0,
inside_loop_decl: false,
scopes: vec![root_scope],
scoped_unsafe_vars: scoped_vars,
current_fn_unsafe_vars: Default::default(),
}
}
fn emit_report(&self, span: Span) {
let mut names = self
.current_fn_unsafe_vars
.iter()
.map(|atom| atom as &str)
.collect::<Vec<&str>>();
let message = if names.len() == 1 {
format!(
"Function declared in a loop contains unsafe references to variable {}",
names.join(", ")
)
} else {
names.sort_unstable();
format!(
"Function declared in a loop contains unsafe references to variable(s) {}",
names.join(", ")
)
};
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn is_unsafe_variable(&self, id: &Id) -> bool {
self.scopes
.iter()
.rev()
.any(|scope| self.scoped_unsafe_vars.get(scope).unwrap().contains(id))
}
fn extract_vars(&mut self, pat: &Pat) {
match pat {
Pat::Ident(ident) => {
self.scoped_unsafe_vars
.get_mut(self.scopes.last().unwrap())
.unwrap()
.insert(ident.to_id());
}
Pat::Array(ArrayPat { elems, .. }) => {
elems.iter().for_each(|elem| {
if let Some(elem) = elem {
self.extract_vars(elem);
}
});
}
Pat::Object(ObjectPat { props, .. }) => {
props.iter().for_each(|prop| match prop {
ObjectPatProp::Assign(AssignPatProp { key, .. }) => {
self.scoped_unsafe_vars
.get_mut(self.scopes.last().unwrap())
.unwrap()
.insert(key.to_id());
}
ObjectPatProp::KeyValue(KeyValuePatProp { value, .. }) => {
self.extract_vars(value.as_ref());
}
ObjectPatProp::Rest(RestPat { arg, .. }) => {
self.extract_vars(arg.as_ref());
}
});
}
Pat::Rest(RestPat { arg, .. }) => {
self.extract_vars(arg.as_ref());
}
Pat::Assign(AssignPat { left, .. }) => {
self.extract_vars(left.as_ref());
}
Pat::Invalid(_) => {}
Pat::Expr(_) => {}
}
}
}
impl Visit for NoLoopFunc {
noop_visit_type!();
fn visit_block_stmt(&mut self, block: &BlockStmt) {
self.scopes.push(block.span);
self.scoped_unsafe_vars
.insert(block.span, Default::default());
block.visit_children_with(self);
self.scopes.pop();
self.scoped_unsafe_vars.remove(&block.span);
}
fn visit_for_stmt(&mut self, for_stmt: &ForStmt) {
self.loop_depth += 1;
self.inside_loop_decl = true;
if let Some(clause) = &for_stmt.init {
clause.visit_children_with(self);
}
if let Some(clause) = &for_stmt.test {
clause.visit_children_with(self);
}
if let Some(clause) = &for_stmt.update {
clause.visit_children_with(self);
}
self.inside_loop_decl = false;
for_stmt.body.visit_children_with(self);
self.loop_depth -= 1;
}
fn visit_for_of_stmt(&mut self, for_of_stmt: &ForOfStmt) {
self.loop_depth += 1;
self.inside_loop_decl = true;
for_of_stmt.left.visit_children_with(self);
for_of_stmt.right.visit_children_with(self);
self.inside_loop_decl = false;
for_of_stmt.body.visit_children_with(self);
self.loop_depth -= 1;
}
fn visit_for_in_stmt(&mut self, for_in_stmt: &ForInStmt) {
self.loop_depth += 1;
self.inside_loop_decl = true;
for_in_stmt.left.visit_children_with(self);
for_in_stmt.right.visit_children_with(self);
self.inside_loop_decl = false;
for_in_stmt.body.visit_children_with(self);
self.loop_depth -= 1;
}
fn visit_while_stmt(&mut self, while_stmt: &WhileStmt) {
self.loop_depth += 1;
self.inside_loop_decl = true;
while_stmt.test.visit_children_with(self);
self.inside_loop_decl = false;
while_stmt.body.visit_children_with(self);
self.loop_depth -= 1;
}
fn visit_do_while_stmt(&mut self, do_while_stmt: &DoWhileStmt) {
self.loop_depth += 1;
self.inside_loop_decl = true;
do_while_stmt.test.visit_children_with(self);
self.inside_loop_decl = false;
do_while_stmt.body.visit_children_with(self);
self.loop_depth -= 1;
}
fn visit_var_decl(&mut self, n: &VarDecl) {
self.current_var_kind = Some(n.kind);
n.visit_children_with(self);
self.current_var_kind = None;
}
fn visit_var_declarator(&mut self, var_declarator: &VarDeclarator) {
match self.current_var_kind {
Some(VarDeclKind::Const) => {
// const always safe
return;
}
Some(VarDeclKind::Let) => {
if self.inside_loop_decl {
// case when var declared into loop head
// for (let i = 0; ...) {}
return;
}
// case when var declared into loop
// while (cond) { let x = v; ... }
if self.loop_depth > 0 {
return;
}
}
_ => {}
};
self.extract_vars(&var_declarator.name);
}
fn visit_function(&mut self, function: &Function) {
let prev_fn_vars = std::mem::take(&mut self.current_fn_unsafe_vars);
self.function_depth += 1;
function.visit_children_with(self);
if !self.current_fn_unsafe_vars.is_empty() {
self.emit_report(function.span);
}
self.current_fn_unsafe_vars = prev_fn_vars;
self.function_depth -= 1;
}
fn visit_arrow_expr(&mut self, arrow_function: &ArrowExpr) {
let prev_fn_vars = std::mem::take(&mut self.current_fn_unsafe_vars);
self.function_depth += 1;
arrow_function.visit_children_with(self);
if !self.current_fn_unsafe_vars.is_empty() {
self.emit_report(arrow_function.span);
}
self.current_fn_unsafe_vars = prev_fn_vars;
self.function_depth -= 1;
}
fn visit_ident(&mut self, ident: &Ident) {
let inside_function_defined_in_loop = self.loop_depth > 0 && self.function_depth > 0;
if inside_function_defined_in_loop && self.is_unsafe_variable(&ident.to_id()) {
self.current_fn_unsafe_vars.insert(ident.sym.clone());
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_new.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Do not use 'new' for side effects";
pub fn no_new(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoNew::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoNew {
expected_reaction: LintRuleReaction,
}
impl NoNew {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
fn check_and_pass_new_expr(&mut self, expr: &Expr) {
match expr {
Expr::New(NewExpr { args, .. }) => {
// prevent report
// new is expected
// also check args is required
if let Some(args) = args {
args.visit_children_with(self);
}
}
Expr::Seq(SeqExpr { exprs, .. }) => {
let last_idx = exprs.len() - 1;
exprs.iter().enumerate().for_each(|(idx, expr)| {
// check case when new on end of seq
// var x = (0, new A());
if idx == last_idx {
self.check_and_pass_new_expr(expr.as_ref());
} else {
expr.visit_children_with(self);
}
});
}
Expr::Paren(ParenExpr { expr, .. }) => {
// dive into parens
// (0, new A(), 0)
self.check_and_pass_new_expr(expr.as_ref());
}
_ => {
expr.visit_children_with(self);
}
}
}
fn check_var_name(&mut self, pat: &Pat) {
match &pat {
Pat::Assign(AssignPat { right, .. }) => {
self.check_and_pass_new_expr(right.as_ref());
}
Pat::Array(ArrayPat { elems, .. }) => elems.iter().for_each(|elem| {
if let Some(elem) = elem {
// cases
// var [ x = new A() ] = arr;
// var [ a, [ y = new A() ] ] = arr;
self.check_var_name(elem);
}
}),
Pat::Object(ObjectPat { props, .. }) => {
props.iter().for_each(|prop| match prop {
ObjectPatProp::KeyValue(KeyValuePatProp { value, .. }) => {
self.check_var_name(value.as_ref());
}
ObjectPatProp::Assign(AssignPatProp { value, .. }) => {
if let Some(expr) = value.as_ref() {
// cases
// var { x = new A() } = obj
// var { a: { x = new A() } } = obj
self.check_and_pass_new_expr(expr.as_ref());
}
}
_ => {}
});
}
_ => {}
}
}
}
impl Visit for NoNew {
noop_visit_type!();
fn visit_var_declarator(&mut self, var_decl: &VarDeclarator) {
self.check_var_name(&var_decl.name);
if let Some(init) = &var_decl.init {
self.check_and_pass_new_expr(init.as_ref())
}
}
fn visit_key_value_prop(&mut self, key_value_prop: &KeyValueProp) {
self.check_and_pass_new_expr(key_value_prop.value.as_ref());
}
fn visit_call_expr(&mut self, call: &CallExpr) {
call.callee.visit_children_with(self);
call.args.iter().for_each(|arg| {
self.check_and_pass_new_expr(arg.expr.as_ref());
});
}
fn visit_assign_expr(&mut self, assign_expr: &AssignExpr) {
assign_expr.left.visit_children_with(self);
self.check_and_pass_new_expr(assign_expr.right.as_ref());
}
fn visit_class_prop(&mut self, class_prop: &ClassProp) {
class_prop.decorators.visit_children_with(self);
class_prop.key.visit_children_with(self);
if let Some(value) = &class_prop.value {
self.check_and_pass_new_expr(value.as_ref());
}
}
fn visit_new_expr(&mut self, new_expr: &NewExpr) {
self.emit_report(new_expr.span);
if let Some(args) = &new_expr.args {
args.iter().for_each(|arg| {
self.check_and_pass_new_expr(arg.expr.as_ref());
});
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_new_object.rs | Rust | use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "The object literal notation {} is preferable";
pub fn no_new_object(
unresolved_ctxt: SyntaxContext,
config: &RuleConfig<()>,
) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoNewObject::new(
rule_reaction,
unresolved_ctxt,
))),
}
}
#[derive(Debug, Default)]
struct NoNewObject {
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
}
impl NoNewObject {
fn new(expected_reaction: LintRuleReaction, unresolved_ctxt: SyntaxContext) -> Self {
Self {
expected_reaction,
unresolved_ctxt,
}
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
}
impl Visit for NoNewObject {
noop_visit_type!();
fn visit_new_expr(&mut self, new_expr: &NewExpr) {
if let Expr::Ident(callee) = new_expr.callee.as_ref() {
if callee.sym == "Object" && callee.ctxt == self.unresolved_ctxt {
self.emit_report(new_expr.span);
}
}
new_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_new_symbol.rs | Rust | use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "`Symbol` cannot be called as a constructor";
pub fn no_new_symbol(
unresolved_ctxt: SyntaxContext,
config: &RuleConfig<()>,
) -> Option<Box<dyn Rule>> {
let expected_reaction = config.get_rule_reaction();
match expected_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoNewSymbol::new(
unresolved_ctxt,
expected_reaction,
))),
}
}
#[derive(Debug, Default)]
struct NoNewSymbol {
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
}
impl NoNewSymbol {
fn new(unresolved_ctxt: SyntaxContext, expected_reaction: LintRuleReaction) -> Self {
Self {
expected_reaction,
unresolved_ctxt,
}
}
fn check(&self, span: Span, ident: &Ident) {
if ident.ctxt != self.unresolved_ctxt {
return;
}
if &*ident.sym == "Symbol" {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
}
}
impl Visit for NoNewSymbol {
noop_visit_type!();
fn visit_new_expr(&mut self, new_expr: &NewExpr) {
if let Expr::Ident(ident) = new_expr.callee.as_ref() {
self.check(new_expr.span, ident);
}
new_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_obj_calls.rs | Rust | use swc_common::{errors::HANDLER, Span, SyntaxContext};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const OBJECTS_NAMES: &[&str] = &["Math", "JSON", "Reflect", "Atomics"];
pub fn no_obj_calls(
unresolved_ctxt: SyntaxContext,
config: &RuleConfig<()>,
) -> Option<Box<dyn Rule>> {
let expected_reaction = config.get_rule_reaction();
match expected_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoObjCalls::new(
unresolved_ctxt,
expected_reaction,
))),
}
}
#[derive(Debug, Default)]
struct NoObjCalls {
expected_reaction: LintRuleReaction,
unresolved_ctxt: SyntaxContext,
}
impl NoObjCalls {
fn new(unresolved_ctxt: SyntaxContext, expected_reaction: LintRuleReaction) -> Self {
Self {
expected_reaction,
unresolved_ctxt,
}
}
fn emit_report(&self, span: Span, name: &str) {
let message = format!("'{}' is not a function", name);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn check(&self, span: Span, ident: &Ident) {
if ident.ctxt != self.unresolved_ctxt {
return;
}
let name: &str = &ident.sym;
if OBJECTS_NAMES.contains(&name) {
self.emit_report(span, name);
}
}
}
impl Visit for NoObjCalls {
fn visit_new_expr(&mut self, new_expr: &NewExpr) {
if let Expr::Ident(ident) = new_expr.callee.as_ref() {
self.check(new_expr.span, ident);
}
new_expr.visit_children_with(self);
}
fn visit_call_expr(&mut self, call_expr: &CallExpr) {
if let Callee::Expr(expr) = &call_expr.callee {
if let Expr::Ident(ident) = expr.as_ref() {
self.check(call_expr.span, ident);
}
}
call_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_param_reassign.rs | Rust | use dashmap::DashMap;
use regex::Regex;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, sync::Lazy, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const INVALID_REGEX_MESSAGE: &str = "no-param-reassign: invalid regex pattern in allowPattern. Check syntax documentation https://docs.rs/regex/latest/regex/#syntax";
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NoParamReassignConfig {
props: Option<bool>,
ignore_property_modifications_for: Option<FxHashSet<String>>,
ignore_property_modifications_for_regex: Option<Vec<String>>,
}
pub fn no_param_reassign(config: &RuleConfig<NoParamReassignConfig>) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoParamReassign::new(config))),
}
}
#[derive(Debug, Default)]
struct NoParamReassign {
expected_reaction: LintRuleReaction,
scoped_params: FxHashMap<Span, FxHashSet<Id>>,
scopes: Vec<Span>,
check_props: bool,
ignore_names: Option<FxHashSet<String>>,
ignore_names_patterns: Option<Vec<String>>,
}
impl NoParamReassign {
fn new(config: &RuleConfig<NoParamReassignConfig>) -> Self {
let rule_config = config.get_rule_config();
Self {
expected_reaction: config.get_rule_reaction(),
scoped_params: Default::default(),
scopes: Vec::new(),
check_props: rule_config.props.unwrap_or(true),
ignore_names: rule_config.ignore_property_modifications_for.clone(),
ignore_names_patterns: rule_config.ignore_property_modifications_for_regex.clone(),
}
}
fn emit_report(&self, span: Span, name: &str) {
let message = format!("Assignment to function parameter '{}'", name);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn collect_function_params(&mut self, pat: &Pat) {
match pat {
Pat::Ident(id) => {
self.scoped_params
.get_mut(self.scopes.last().unwrap())
.unwrap()
.insert(id.to_id());
}
Pat::Object(ObjectPat { props, .. }) => props.iter().for_each(|prop| {
match prop {
ObjectPatProp::Assign(AssignPatProp { key, .. }) => {
self.scoped_params
.get_mut(self.scopes.last().unwrap())
.unwrap()
.insert(key.to_id());
}
ObjectPatProp::KeyValue(KeyValuePatProp { value, .. }) => {
self.collect_function_params(value.as_ref());
}
_ => {}
};
}),
Pat::Array(ArrayPat { elems, .. }) => elems.iter().for_each(|elem| {
if let Some(elem) = elem {
self.collect_function_params(elem);
}
}),
_ => {}
}
}
fn is_satisfying_function_param(&self, ident: &Ident) -> bool {
if let Some(ignore_names) = &self.ignore_names {
if ignore_names.contains(&*ident.sym) {
return false;
}
}
let is_function_param = self.scopes.iter().rev().any(|scope| {
self.scoped_params
.get(scope)
.unwrap()
.contains(&ident.to_id())
});
if !is_function_param {
return false;
}
if let Some(ignore_names_patterns) = &self.ignore_names_patterns {
static REGEX_CACHE: Lazy<DashMap<String, Regex, FxBuildHasher>> =
Lazy::new(Default::default);
let sym = &*ident.sym;
let ignored_by_pattern = ignore_names_patterns.iter().any(|pattern| {
if !REGEX_CACHE.contains_key(pattern) {
REGEX_CACHE.insert(
pattern.clone(),
Regex::new(pattern).expect(INVALID_REGEX_MESSAGE),
);
}
return REGEX_CACHE.get(pattern).unwrap().is_match(sym);
});
if ignored_by_pattern {
return false;
}
}
true
}
fn check_obj_member(&self, member_expr: &MemberExpr) {
if !self.check_props {
return;
}
match member_expr.obj.unwrap_seqs_and_parens() {
Expr::Ident(ident) => {
if self.is_satisfying_function_param(ident) {
self.emit_report(ident.span, &ident.sym);
}
}
Expr::Member(member_expr) => {
self.check_obj_member(member_expr);
}
_ => {}
}
}
fn check_pat_or_expr(&self, pat_or_expr: &AssignTarget) {
match pat_or_expr {
AssignTarget::Pat(pat) => match pat {
AssignTargetPat::Array(array_pat) => {
self.check_array_pat(array_pat);
}
AssignTargetPat::Object(object_pat) => {
self.check_object_pat(object_pat);
}
AssignTargetPat::Invalid(..) => {}
},
AssignTarget::Simple(expr) => match expr {
SimpleAssignTarget::Ident(ident) => {
if self.is_satisfying_function_param(&Ident::from(ident)) {
self.emit_report(ident.span, &ident.sym);
}
}
SimpleAssignTarget::Member(member_expr) => {
self.check_obj_member(member_expr);
}
_ => {}
},
}
}
fn check_expr(&self, expr: &Expr) {
match expr.unwrap_seqs_and_parens() {
Expr::Ident(ident) => {
if self.is_satisfying_function_param(ident) {
self.emit_report(ident.span, &ident.sym);
}
}
Expr::Member(member_expr) => {
self.check_obj_member(member_expr);
}
_ => {}
}
}
fn check_array_pat(&self, ArrayPat { elems, .. }: &ArrayPat) {
elems.iter().for_each(|elem| {
if let Some(elem) = elem {
self.check_pat(elem);
}
});
}
fn check_object_pat(&self, ObjectPat { props, .. }: &ObjectPat) {
props.iter().for_each(|prop| match prop {
ObjectPatProp::Assign(AssignPatProp { key, .. }) => {
if self.is_satisfying_function_param(&Ident::from(key)) {
self.emit_report(key.span, &key.sym);
}
}
ObjectPatProp::KeyValue(KeyValuePatProp { value, .. }) => {
self.check_pat(value.as_ref());
}
_ => {}
});
}
fn check_pat(&self, pat: &Pat) {
match pat {
Pat::Ident(id) => {
if self.is_satisfying_function_param(&Ident::from(id)) {
self.emit_report(id.span, &id.sym);
}
}
Pat::Expr(expr) => {
if let Expr::Member(member_expr) = expr.as_ref() {
self.check_obj_member(member_expr);
}
}
Pat::Object(p) => {
self.check_object_pat(p);
}
Pat::Array(p) => {
self.check_array_pat(p);
}
_ => {}
}
}
}
impl Visit for NoParamReassign {
noop_visit_type!();
fn visit_function(&mut self, function: &Function) {
self.scopes.push(function.span);
self.scoped_params.insert(function.span, Default::default());
function.params.iter().for_each(|param| {
self.collect_function_params(¶m.pat);
});
function.visit_children_with(self);
self.scopes.pop();
self.scoped_params.remove(&function.span);
}
fn visit_assign_expr(&mut self, assign_expr: &AssignExpr) {
self.check_pat_or_expr(&assign_expr.left);
assign_expr.visit_children_with(self);
}
fn visit_update_expr(&mut self, update_expr: &UpdateExpr) {
self.check_expr(update_expr.arg.as_ref());
update_expr.visit_children_with(self);
}
fn visit_for_of_stmt(&mut self, for_of_stmt: &ForOfStmt) {
if let ForHead::Pat(pat) = &for_of_stmt.left {
self.check_pat(pat);
}
for_of_stmt.visit_children_with(self);
}
fn visit_for_in_stmt(&mut self, for_in_stmt: &ForInStmt) {
if let ForHead::Pat(pat) = &for_in_stmt.left {
self.check_pat(pat);
}
for_in_stmt.visit_children_with(self);
}
fn visit_unary_expr(&mut self, unary_expr: &UnaryExpr) {
if let op!("delete") = unary_expr.op {
self.check_expr(unary_expr.arg.as_ref());
}
unary_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_prototype_builtins.rs | Rust | use swc_atoms::Atom;
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_utils::ExprExt;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const METHODS: [&str; 3] = ["hasOwnProperty", "isPrototypeOf", "propertyIsEnumerable"];
pub fn no_prototype_builtins(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoPrototypeBuiltins::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct CallInfo {
chain: Vec<Atom>,
method_span: Option<Span>,
}
#[derive(Debug, Default)]
struct NoPrototypeBuiltins {
expected_reaction: LintRuleReaction,
call_info: CallInfo,
}
impl NoPrototypeBuiltins {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self {
expected_reaction,
call_info: Default::default(),
}
}
fn emit_error(&self, span: Span, method: &str) {
let message = format!(
"Do not access Object.prototype method '{}' from target object",
method
);
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, &message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, &message).emit();
}
_ => {}
});
}
fn extend_chain(&mut self, span: Span, atom: Atom) {
if self.call_info.method_span.is_none() {
self.call_info.method_span = Some(span);
}
self.call_info.chain.push(atom);
}
fn extract_path(&mut self, expr: &Expr) {
match expr {
Expr::Member(member) => {
match &member.prop {
MemberProp::Ident(ident) => {
self.extend_chain(ident.span, ident.sym.clone());
}
MemberProp::Computed(computed_prop_name) => {
match computed_prop_name.expr.as_ref() {
Expr::Lit(_) | Expr::Tpl(_) | Expr::Paren(_) | Expr::Seq(_) => {
self.extract_path(&computed_prop_name.expr);
}
_ => {}
}
}
_ => {}
}
self.extract_path(member.obj.as_ref());
}
Expr::OptChain(OptChainExpr { base, .. }) => {
if let Some(member_expr) = base.as_member() {
if let Some(ident) = member_expr.prop.as_ident() {
self.extend_chain(ident.span, ident.sym.clone());
}
self.extract_path(member_expr.obj.as_ref());
}
}
Expr::Paren(ParenExpr { expr, .. }) => {
self.extract_path(expr.as_ref());
}
Expr::Seq(SeqExpr { exprs, .. }) => {
self.extract_path(exprs.last().unwrap().as_ref());
}
Expr::Lit(Lit::Str(lit_str)) => {
self.extend_chain(lit_str.span, lit_str.value.clone());
}
Expr::Tpl(tpl) => {
if tpl.exprs.is_empty() && tpl.quasis.len() == 1 {
self.extend_chain(tpl.span, tpl.quasis[0].raw.clone());
}
}
Expr::Ident(ident) => {
self.extend_chain(ident.span, ident.sym.clone());
}
_ => {}
}
}
fn check(&mut self, expr: &Expr) {
let prev_call_info = std::mem::take(&mut self.call_info);
self.extract_path(expr);
if self.call_info.chain.len() > 1 {
let method_name = self.call_info.chain[0].as_str();
if METHODS.contains(&method_name) {
self.emit_error(self.call_info.method_span.unwrap(), method_name);
}
}
self.call_info = prev_call_info;
}
}
impl Visit for NoPrototypeBuiltins {
noop_visit_type!();
fn visit_opt_chain_base(&mut self, opt_chain_base: &OptChainBase) {
if let OptChainBase::Call(opt_call) = opt_chain_base {
self.check(opt_call.callee.as_expr());
}
opt_chain_base.visit_children_with(self);
}
fn visit_call_expr(&mut self, call_expr: &CallExpr) {
if let Some(expr) = call_expr.callee.as_expr() {
self.check(expr.as_ref());
}
call_expr.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_restricted_syntax.rs | Rust | use serde::{Deserialize, Serialize};
use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_visit_type, Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct BinaryExpression {
message: String,
operator: BinaryOp,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForInExpression {
message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForOfExpression {
message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct WithStatement {
message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct LabelStatement {
message: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NoRestrictedSyntaxConfig {
binary_expression: Option<Vec<BinaryExpression>>,
for_in_expression: Option<ForInExpression>,
for_of_expression: Option<ForOfExpression>,
with_statement: Option<Vec<WithStatement>>,
label_statement: Option<Vec<LabelStatement>>,
}
pub fn no_restricted_syntax(
config: &RuleConfig<NoRestrictedSyntaxConfig>,
) -> Option<Box<dyn Rule>> {
match config.get_rule_reaction() {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoRestrictedSyntax::new(config))),
}
}
#[derive(Debug, Default)]
struct NoRestrictedSyntax {
expected_reaction: LintRuleReaction,
binary_expr: Option<Vec<BinaryExpression>>,
for_in_expression: Option<ForInExpression>,
for_of_expression: Option<ForOfExpression>,
with_statement: Option<Vec<WithStatement>>,
label_statement: Option<Vec<LabelStatement>>,
}
impl NoRestrictedSyntax {
fn new(config: &RuleConfig<NoRestrictedSyntaxConfig>) -> Self {
let rule_config = config.get_rule_config();
Self {
expected_reaction: config.get_rule_reaction(),
binary_expr: rule_config.binary_expression.clone(),
for_in_expression: rule_config.for_in_expression.clone(),
for_of_expression: rule_config.for_of_expression.clone(),
with_statement: rule_config.with_statement.clone(),
label_statement: rule_config.label_statement.clone(),
}
}
fn emit_report(&self, span: Span, message: &str) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, message).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, message).emit();
}
_ => {}
});
}
}
impl Visit for NoRestrictedSyntax {
noop_visit_type!();
fn visit_expr(&mut self, expr: &Expr) {
if let Expr::Bin(BinExpr { span, op, .. }) = expr {
let op = *op;
if let Some(binary_expr) = &self.binary_expr {
let rule = binary_expr.iter().find(|rule| rule.operator == op);
if let Some(BinaryExpression { message, .. }) = rule {
self.emit_report(*span, message);
}
}
}
expr.visit_children_with(self);
}
fn visit_for_in_stmt(&mut self, for_in: &ForInStmt) {
if let Some(ForInExpression { message }) = &self.for_in_expression {
self.emit_report(for_in.span, message);
}
for_in.visit_children_with(self);
}
fn visit_for_of_stmt(&mut self, for_of: &ForOfStmt) {
if let Some(ForOfExpression { message }) = &self.for_of_expression {
self.emit_report(for_of.span, message);
}
for_of.visit_children_with(self);
}
fn visit_with_stmt(&mut self, with_stmt: &WithStmt) {
if let Some(rules) = &self.with_statement {
rules.iter().for_each(|rule| {
self.emit_report(with_stmt.span, &rule.message);
});
}
with_stmt.visit_children_with(self);
}
fn visit_labeled_stmt(&mut self, labeled_stmt: &LabeledStmt) {
if let Some(rules) = &self.label_statement {
rules.iter().for_each(|rule| {
self.emit_report(labeled_stmt.span, &rule.message);
});
}
labeled_stmt.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_ecma_lints/src/rules/no_sparse_arrays.rs | Rust | use swc_common::{errors::HANDLER, Span};
use swc_ecma_ast::*;
use swc_ecma_visit::{Visit, VisitWith};
use crate::{
config::{LintRuleReaction, RuleConfig},
rule::{visitor_rule, Rule},
};
const MESSAGE: &str = "Unexpected comma in middle of array";
pub fn no_sparse_arrays(config: &RuleConfig<()>) -> Option<Box<dyn Rule>> {
let rule_reaction = config.get_rule_reaction();
match rule_reaction {
LintRuleReaction::Off => None,
_ => Some(visitor_rule(NoSparseArrays::new(rule_reaction))),
}
}
#[derive(Debug, Default)]
struct NoSparseArrays {
expected_reaction: LintRuleReaction,
}
impl NoSparseArrays {
fn new(expected_reaction: LintRuleReaction) -> Self {
Self { expected_reaction }
}
fn emit_report(&self, span: Span) {
HANDLER.with(|handler| match self.expected_reaction {
LintRuleReaction::Error => {
handler.struct_span_err(span, MESSAGE).emit();
}
LintRuleReaction::Warning => {
handler.struct_span_warn(span, MESSAGE).emit();
}
_ => {}
});
}
fn check(&self, span: Span, elems: &[Option<ExprOrSpread>]) {
let len = elems.len();
// case
// []
if len == 0 {
return;
}
// case
// [,]
if len == 1 && elems[0].is_none() {
self.emit_report(span);
return;
}
let last_idx = len - 1;
let is_sparse_array = elems
.iter()
.enumerate()
.any(|(idx, x)| idx != last_idx && x.is_none());
// cases like
// [1,,2]
if is_sparse_array {
self.emit_report(span);
}
}
}
impl Visit for NoSparseArrays {
fn visit_array_lit(&mut self, n: &ArrayLit) {
self.check(n.span, n.elems.as_slice());
n.visit_children_with(self);
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.