repo stringlengths 6 47 | file_url stringlengths 77 269 | file_path stringlengths 5 186 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-07 08:35:43 2026-01-07 08:55:24 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/transaction.go | callbacks/transaction.go | package callbacks
import (
"gorm.io/gorm"
)
func BeginTransaction(db *gorm.DB) {
if !db.Config.SkipDefaultTransaction && db.Error == nil {
if tx := db.Begin(); tx.Error == nil {
db.Statement.ConnPool = tx.Statement.ConnPool
db.InstanceSet("gorm:started_transaction", true)
} else if tx.Error == gorm.ErrInvalidTransaction {
tx.Error = nil
} else {
db.Error = tx.Error
}
}
}
func CommitOrRollbackTransaction(db *gorm.DB) {
if !db.Config.SkipDefaultTransaction {
if _, ok := db.InstanceGet("gorm:started_transaction"); ok {
if db.Error != nil {
db.Rollback()
} else {
db.Commit()
}
db.Statement.ConnPool = db.ConnPool
}
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/interfaces.go | callbacks/interfaces.go | package callbacks
import "gorm.io/gorm"
type BeforeCreateInterface interface {
BeforeCreate(*gorm.DB) error
}
type AfterCreateInterface interface {
AfterCreate(*gorm.DB) error
}
type BeforeUpdateInterface interface {
BeforeUpdate(*gorm.DB) error
}
type AfterUpdateInterface interface {
AfterUpdate(*gorm.DB) error
}
type BeforeSaveInterface interface {
BeforeSave(*gorm.DB) error
}
type AfterSaveInterface interface {
AfterSave(*gorm.DB) error
}
type BeforeDeleteInterface interface {
BeforeDelete(*gorm.DB) error
}
type AfterDeleteInterface interface {
AfterDelete(*gorm.DB) error
}
type AfterFindInterface interface {
AfterFind(*gorm.DB) error
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/delete.go | callbacks/delete.go | package callbacks
import (
"reflect"
"strings"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils"
)
func BeforeDelete(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.BeforeDelete {
callMethod(db, func(value interface{}, tx *gorm.DB) bool {
if i, ok := value.(BeforeDeleteInterface); ok {
db.AddError(i.BeforeDelete(tx))
return true
}
return false
})
}
}
func DeleteBeforeAssociations(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil {
selectColumns, restricted := db.Statement.SelectAndOmitColumns(true, false)
if !restricted {
return
}
for column, v := range selectColumns {
if !v {
continue
}
rel, ok := db.Statement.Schema.Relationships.Relations[column]
if !ok {
continue
}
switch rel.Type {
case schema.HasOne, schema.HasMany:
queryConds := rel.ToQueryConditions(db.Statement.Context, db.Statement.ReflectValue)
modelValue := reflect.New(rel.FieldSchema.ModelType).Interface()
tx := db.Session(&gorm.Session{NewDB: true}).Model(modelValue)
withoutConditions := false
if db.Statement.Unscoped {
tx = tx.Unscoped()
}
if len(db.Statement.Selects) > 0 {
selects := make([]string, 0, len(db.Statement.Selects))
for _, s := range db.Statement.Selects {
if s == clause.Associations {
selects = append(selects, s)
} else if columnPrefix := column + "."; strings.HasPrefix(s, columnPrefix) {
selects = append(selects, strings.TrimPrefix(s, columnPrefix))
}
}
if len(selects) > 0 {
tx = tx.Select(selects)
}
}
for _, cond := range queryConds {
if c, ok := cond.(clause.IN); ok && len(c.Values) == 0 {
withoutConditions = true
break
}
}
if !withoutConditions && db.AddError(tx.Clauses(clause.Where{Exprs: queryConds}).Delete(modelValue).Error) != nil {
return
}
case schema.Many2Many:
var (
queryConds = make([]clause.Expression, 0, len(rel.References))
foreignFields = make([]*schema.Field, 0, len(rel.References))
relForeignKeys = make([]string, 0, len(rel.References))
modelValue = reflect.New(rel.JoinTable.ModelType).Interface()
table = rel.JoinTable.Table
tx = db.Session(&gorm.Session{NewDB: true}).Model(modelValue).Table(table)
)
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
foreignFields = append(foreignFields, ref.PrimaryKey)
relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName)
} else if ref.PrimaryValue != "" {
queryConds = append(queryConds, clause.Eq{
Column: clause.Column{Table: rel.JoinTable.Table, Name: ref.ForeignKey.DBName},
Value: ref.PrimaryValue,
})
}
}
_, foreignValues := schema.GetIdentityFieldValuesMap(db.Statement.Context, db.Statement.ReflectValue, foreignFields)
column, values := schema.ToQueryValues(table, relForeignKeys, foreignValues)
queryConds = append(queryConds, clause.IN{Column: column, Values: values})
if db.AddError(tx.Clauses(clause.Where{Exprs: queryConds}).Delete(modelValue).Error) != nil {
return
}
}
}
}
}
func Delete(config *Config) func(db *gorm.DB) {
supportReturning := utils.Contains(config.DeleteClauses, "RETURNING")
return func(db *gorm.DB) {
if db.Error != nil {
return
}
if db.Statement.Schema != nil {
for _, c := range db.Statement.Schema.DeleteClauses {
db.Statement.AddClause(c)
}
}
if db.Statement.SQL.Len() == 0 {
db.Statement.SQL.Grow(100)
db.Statement.AddClauseIfNotExists(clause.Delete{})
if db.Statement.Schema != nil {
_, queryValues := schema.GetIdentityFieldValuesMap(db.Statement.Context, db.Statement.ReflectValue, db.Statement.Schema.PrimaryFields)
column, values := schema.ToQueryValues(db.Statement.Table, db.Statement.Schema.PrimaryFieldDBNames, queryValues)
if len(values) > 0 {
db.Statement.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}})
}
if db.Statement.ReflectValue.CanAddr() && db.Statement.Dest != db.Statement.Model && db.Statement.Model != nil {
_, queryValues = schema.GetIdentityFieldValuesMap(db.Statement.Context, reflect.ValueOf(db.Statement.Model), db.Statement.Schema.PrimaryFields)
column, values = schema.ToQueryValues(db.Statement.Table, db.Statement.Schema.PrimaryFieldDBNames, queryValues)
if len(values) > 0 {
db.Statement.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}})
}
}
}
db.Statement.AddClauseIfNotExists(clause.From{})
db.Statement.Build(db.Statement.BuildClauses...)
}
checkMissingWhereConditions(db)
if !db.DryRun && db.Error == nil {
ok, mode := hasReturning(db, supportReturning)
if !ok {
result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
if db.AddError(err) == nil {
db.RowsAffected, _ = result.RowsAffected()
if db.Statement.Result != nil {
db.Statement.Result.Result = result
db.Statement.Result.RowsAffected = db.RowsAffected
}
}
return
}
if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil {
gorm.Scan(rows, db, mode)
if db.Statement.Result != nil {
db.Statement.Result.RowsAffected = db.RowsAffected
}
db.AddError(rows.Close())
}
}
}
}
func AfterDelete(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.AfterDelete {
callMethod(db, func(value interface{}, tx *gorm.DB) bool {
if i, ok := value.(AfterDeleteInterface); ok {
db.AddError(i.AfterDelete(tx))
return true
}
return false
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/create_test.go | callbacks/create_test.go | package callbacks
import (
"reflect"
"sync"
"testing"
"time"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
)
var schemaCache = &sync.Map{}
func TestConvertToCreateValues_DestType_Slice(t *testing.T) {
type user struct {
ID int `gorm:"primaryKey"`
Name string
Email string `gorm:"default:(-)"`
Age int `gorm:"default:(-)"`
}
s, err := schema.Parse(&user{}, schemaCache, schema.NamingStrategy{})
if err != nil {
t.Errorf("parse schema error: %v, is not expected", err)
return
}
dest := []*user{
{
ID: 1,
Name: "alice",
Email: "email",
Age: 18,
},
{
ID: 2,
Name: "bob",
Email: "email",
Age: 19,
},
}
stmt := &gorm.Statement{
DB: &gorm.DB{
Config: &gorm.Config{
NowFunc: func() time.Time { return time.Time{} },
},
Statement: &gorm.Statement{
Settings: sync.Map{},
Schema: s,
},
},
ReflectValue: reflect.ValueOf(dest),
Dest: dest,
}
stmt.Schema = s
values := ConvertToCreateValues(stmt)
expected := clause.Values{
// column has value + defaultValue column has value (which should have a stable order)
Columns: []clause.Column{{Name: "name"}, {Name: "email"}, {Name: "age"}, {Name: "id"}},
Values: [][]interface{}{
{"alice", "email", 18, 1},
{"bob", "email", 19, 2},
},
}
if !reflect.DeepEqual(expected, values) {
t.Errorf("expected: %v got %v", expected, values)
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/helper_test.go | callbacks/helper_test.go | package callbacks
import (
"reflect"
"testing"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
func TestLoadOrStoreVisitMap(t *testing.T) {
var vm visitMap
var loaded bool
type testM struct {
Name string
}
t1 := testM{Name: "t1"}
t2 := testM{Name: "t2"}
t3 := testM{Name: "t3"}
vm = make(visitMap)
if loaded = loadOrStoreVisitMap(&vm, reflect.ValueOf(&t1)); loaded {
t.Fatalf("loaded should be false")
}
if loaded = loadOrStoreVisitMap(&vm, reflect.ValueOf(&t1)); !loaded {
t.Fatalf("loaded should be true")
}
// t1 already exist but t2 not
if loaded = loadOrStoreVisitMap(&vm, reflect.ValueOf([]*testM{&t1, &t2, &t3})); loaded {
t.Fatalf("loaded should be false")
}
if loaded = loadOrStoreVisitMap(&vm, reflect.ValueOf([]*testM{&t2, &t3})); !loaded {
t.Fatalf("loaded should be true")
}
}
func TestConvertMapToValuesForCreate(t *testing.T) {
testCase := []struct {
name string
input map[string]interface{}
expect clause.Values
}{
{
name: "Test convert string value",
input: map[string]interface{}{
"name": "my name",
},
expect: clause.Values{
Columns: []clause.Column{{Name: "name"}},
Values: [][]interface{}{{"my name"}},
},
},
{
name: "Test convert int value",
input: map[string]interface{}{
"age": 18,
},
expect: clause.Values{
Columns: []clause.Column{{Name: "age"}},
Values: [][]interface{}{{18}},
},
},
{
name: "Test convert float value",
input: map[string]interface{}{
"score": 99.5,
},
expect: clause.Values{
Columns: []clause.Column{{Name: "score"}},
Values: [][]interface{}{{99.5}},
},
},
{
name: "Test convert bool value",
input: map[string]interface{}{
"active": true,
},
expect: clause.Values{
Columns: []clause.Column{{Name: "active"}},
Values: [][]interface{}{{true}},
},
},
}
for _, tc := range testCase {
t.Run(tc.name, func(t *testing.T) {
actual := ConvertMapToValuesForCreate(&gorm.Statement{}, tc.input)
if !reflect.DeepEqual(actual, tc.expect) {
t.Errorf("expect %v got %v", tc.expect, actual)
}
})
}
}
func TestConvertSliceOfMapToValuesForCreate(t *testing.T) {
testCase := []struct {
name string
input []map[string]interface{}
expect clause.Values
}{
{
name: "Test convert slice of string value",
input: []map[string]interface{}{
{"name": "my name"},
},
expect: clause.Values{
Columns: []clause.Column{{Name: "name"}},
Values: [][]interface{}{{"my name"}},
},
},
{
name: "Test convert slice of int value",
input: []map[string]interface{}{
{"age": 18},
},
expect: clause.Values{
Columns: []clause.Column{{Name: "age"}},
Values: [][]interface{}{{18}},
},
},
{
name: "Test convert slice of float value",
input: []map[string]interface{}{
{"score": 99.5},
},
expect: clause.Values{
Columns: []clause.Column{{Name: "score"}},
Values: [][]interface{}{{99.5}},
},
},
{
name: "Test convert slice of bool value",
input: []map[string]interface{}{
{"active": true},
},
expect: clause.Values{
Columns: []clause.Column{{Name: "active"}},
Values: [][]interface{}{{true}},
},
},
}
for _, tc := range testCase {
t.Run(tc.name, func(t *testing.T) {
actual := ConvertSliceOfMapToValuesForCreate(&gorm.Statement{}, tc.input)
if !reflect.DeepEqual(actual, tc.expect) {
t.Errorf("expected %v but got %v", tc.expect, actual)
}
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/associations.go | callbacks/associations.go | package callbacks
import (
"reflect"
"strings"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils"
)
func SaveBeforeAssociations(create bool) func(db *gorm.DB) {
return func(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil {
selectColumns, restricted := db.Statement.SelectAndOmitColumns(create, !create)
// Save Belongs To associations
for _, rel := range db.Statement.Schema.Relationships.BelongsTo {
if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) {
continue
}
setupReferences := func(obj reflect.Value, elem reflect.Value) {
for _, ref := range rel.References {
if !ref.OwnPrimaryKey {
pv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, elem)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, obj, pv))
if dest, ok := db.Statement.Dest.(map[string]interface{}); ok {
dest[ref.ForeignKey.DBName] = pv
if _, ok := dest[rel.Name]; ok {
dest[rel.Name] = elem.Interface()
}
}
}
}
}
switch db.Statement.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
var (
rValLen = db.Statement.ReflectValue.Len()
objs = make([]reflect.Value, 0, rValLen)
fieldType = rel.Field.FieldType
isPtr = fieldType.Kind() == reflect.Ptr
)
if !isPtr {
fieldType = reflect.PointerTo(fieldType)
}
elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
distinctElems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
identityMap := map[string]bool{}
for i := 0; i < rValLen; i++ {
obj := db.Statement.ReflectValue.Index(i)
if reflect.Indirect(obj).Kind() != reflect.Struct {
break
}
if _, zero := rel.Field.ValueOf(db.Statement.Context, obj); !zero { // check belongs to relation value
rv := rel.Field.ReflectValueOf(db.Statement.Context, obj) // relation reflect value
if !isPtr {
rv = rv.Addr()
}
objs = append(objs, obj)
elems = reflect.Append(elems, rv)
relPrimaryValues := make([]interface{}, 0, len(rel.FieldSchema.PrimaryFields))
for _, pf := range rel.FieldSchema.PrimaryFields {
if pfv, ok := pf.ValueOf(db.Statement.Context, rv); !ok {
relPrimaryValues = append(relPrimaryValues, pfv)
}
}
cacheKey := utils.ToStringKey(relPrimaryValues...)
if len(relPrimaryValues) != len(rel.FieldSchema.PrimaryFields) || !identityMap[cacheKey] {
if cacheKey != "" { // has primary fields
identityMap[cacheKey] = true
}
distinctElems = reflect.Append(distinctElems, rv)
}
}
}
if elems.Len() > 0 {
if saveAssociations(db, rel, distinctElems, selectColumns, restricted, nil) == nil {
for i := 0; i < elems.Len(); i++ {
setupReferences(objs[i], elems.Index(i))
}
}
}
case reflect.Struct:
if _, zero := rel.Field.ValueOf(db.Statement.Context, db.Statement.ReflectValue); !zero {
rv := rel.Field.ReflectValueOf(db.Statement.Context, db.Statement.ReflectValue) // relation reflect value
if rv.Kind() != reflect.Ptr {
rv = rv.Addr()
}
if saveAssociations(db, rel, rv, selectColumns, restricted, nil) == nil {
setupReferences(db.Statement.ReflectValue, rv)
}
}
}
}
}
}
}
func SaveAfterAssociations(create bool) func(db *gorm.DB) {
return func(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil {
selectColumns, restricted := db.Statement.SelectAndOmitColumns(create, !create)
// Save Has One associations
for _, rel := range db.Statement.Schema.Relationships.HasOne {
if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) {
continue
}
switch db.Statement.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
var (
fieldType = rel.Field.FieldType
isPtr = fieldType.Kind() == reflect.Ptr
)
if !isPtr {
fieldType = reflect.PointerTo(fieldType)
}
elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
for i := 0; i < db.Statement.ReflectValue.Len(); i++ {
obj := db.Statement.ReflectValue.Index(i)
if reflect.Indirect(obj).Kind() == reflect.Struct {
if _, zero := rel.Field.ValueOf(db.Statement.Context, obj); !zero {
rv := rel.Field.ReflectValueOf(db.Statement.Context, obj)
if rv.Kind() != reflect.Ptr {
rv = rv.Addr()
}
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, obj)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, rv, fv))
} else if ref.PrimaryValue != "" {
db.AddError(ref.ForeignKey.Set(db.Statement.Context, rv, ref.PrimaryValue))
}
}
elems = reflect.Append(elems, rv)
}
}
}
if elems.Len() > 0 {
assignmentColumns := make([]string, 0, len(rel.References))
for _, ref := range rel.References {
assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName)
}
saveAssociations(db, rel, elems, selectColumns, restricted, assignmentColumns)
}
case reflect.Struct:
if _, zero := rel.Field.ValueOf(db.Statement.Context, db.Statement.ReflectValue); !zero {
f := rel.Field.ReflectValueOf(db.Statement.Context, db.Statement.ReflectValue)
if f.Kind() != reflect.Ptr {
f = f.Addr()
}
assignmentColumns := make([]string, 0, len(rel.References))
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, db.Statement.ReflectValue)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, f, fv))
} else if ref.PrimaryValue != "" {
db.AddError(ref.ForeignKey.Set(db.Statement.Context, f, ref.PrimaryValue))
}
assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName)
}
saveAssociations(db, rel, f, selectColumns, restricted, assignmentColumns)
}
}
}
// Save Has Many associations
for _, rel := range db.Statement.Schema.Relationships.HasMany {
if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) {
continue
}
fieldType := rel.Field.IndirectFieldType.Elem()
isPtr := fieldType.Kind() == reflect.Ptr
if !isPtr {
fieldType = reflect.PointerTo(fieldType)
}
elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
identityMap := map[string]bool{}
appendToElems := func(v reflect.Value) {
if _, zero := rel.Field.ValueOf(db.Statement.Context, v); !zero {
f := reflect.Indirect(rel.Field.ReflectValueOf(db.Statement.Context, v))
for i := 0; i < f.Len(); i++ {
elem := f.Index(i)
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
pv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, v)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, elem, pv))
} else if ref.PrimaryValue != "" {
db.AddError(ref.ForeignKey.Set(db.Statement.Context, elem, ref.PrimaryValue))
}
}
relPrimaryValues := make([]interface{}, 0, len(rel.FieldSchema.PrimaryFields))
for _, pf := range rel.FieldSchema.PrimaryFields {
if pfv, ok := pf.ValueOf(db.Statement.Context, elem); !ok {
relPrimaryValues = append(relPrimaryValues, pfv)
}
}
cacheKey := utils.ToStringKey(relPrimaryValues...)
if len(relPrimaryValues) != len(rel.FieldSchema.PrimaryFields) || !identityMap[cacheKey] {
if cacheKey != "" { // has primary fields
identityMap[cacheKey] = true
}
if isPtr {
elems = reflect.Append(elems, elem)
} else {
elems = reflect.Append(elems, elem.Addr())
}
}
}
}
}
switch db.Statement.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
for i := 0; i < db.Statement.ReflectValue.Len(); i++ {
obj := db.Statement.ReflectValue.Index(i)
if reflect.Indirect(obj).Kind() == reflect.Struct {
appendToElems(obj)
}
}
case reflect.Struct:
appendToElems(db.Statement.ReflectValue)
}
if elems.Len() > 0 {
assignmentColumns := make([]string, 0, len(rel.References))
for _, ref := range rel.References {
assignmentColumns = append(assignmentColumns, ref.ForeignKey.DBName)
}
saveAssociations(db, rel, elems, selectColumns, restricted, assignmentColumns)
}
}
// Save Many2Many associations
for _, rel := range db.Statement.Schema.Relationships.Many2Many {
if v, ok := selectColumns[rel.Name]; (ok && !v) || (!ok && restricted) {
continue
}
fieldType := rel.Field.IndirectFieldType.Elem()
isPtr := fieldType.Kind() == reflect.Ptr
if !isPtr {
fieldType = reflect.PointerTo(fieldType)
}
elems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
distinctElems := reflect.MakeSlice(reflect.SliceOf(fieldType), 0, 10)
joins := reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.JoinTable.ModelType)), 0, 10)
objs := []reflect.Value{}
appendToJoins := func(obj reflect.Value, elem reflect.Value) {
joinValue := reflect.New(rel.JoinTable.ModelType)
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, obj)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, fv))
} else if ref.PrimaryValue != "" {
db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, ref.PrimaryValue))
} else {
fv, _ := ref.PrimaryKey.ValueOf(db.Statement.Context, elem)
db.AddError(ref.ForeignKey.Set(db.Statement.Context, joinValue, fv))
}
}
joins = reflect.Append(joins, joinValue)
}
identityMap := map[string]bool{}
appendToElems := func(v reflect.Value) {
if _, zero := rel.Field.ValueOf(db.Statement.Context, v); !zero {
f := reflect.Indirect(rel.Field.ReflectValueOf(db.Statement.Context, v))
for i := 0; i < f.Len(); i++ {
elem := f.Index(i)
if !isPtr {
elem = elem.Addr()
}
objs = append(objs, v)
elems = reflect.Append(elems, elem)
relPrimaryValues := make([]interface{}, 0, len(rel.FieldSchema.PrimaryFields))
for _, pf := range rel.FieldSchema.PrimaryFields {
if pfv, ok := pf.ValueOf(db.Statement.Context, elem); !ok {
relPrimaryValues = append(relPrimaryValues, pfv)
}
}
cacheKey := utils.ToStringKey(relPrimaryValues...)
if len(relPrimaryValues) != len(rel.FieldSchema.PrimaryFields) || !identityMap[cacheKey] {
if cacheKey != "" { // has primary fields
identityMap[cacheKey] = true
}
distinctElems = reflect.Append(distinctElems, elem)
}
}
}
}
switch db.Statement.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
for i := 0; i < db.Statement.ReflectValue.Len(); i++ {
obj := db.Statement.ReflectValue.Index(i)
if reflect.Indirect(obj).Kind() == reflect.Struct {
appendToElems(obj)
}
}
case reflect.Struct:
appendToElems(db.Statement.ReflectValue)
}
// optimize elems of reflect value length
if elemLen := elems.Len(); elemLen > 0 {
if v, ok := selectColumns[rel.Name+".*"]; !ok || v {
saveAssociations(db, rel, distinctElems, selectColumns, restricted, nil)
}
for i := 0; i < elemLen; i++ {
appendToJoins(objs[i], elems.Index(i))
}
}
if joins.Len() > 0 {
db.AddError(db.Session(&gorm.Session{NewDB: true}).Clauses(clause.OnConflict{DoNothing: true}).Session(&gorm.Session{
SkipHooks: db.Statement.SkipHooks,
DisableNestedTransaction: true,
}).Create(joins.Interface()).Error)
}
}
}
}
}
func onConflictOption(stmt *gorm.Statement, s *schema.Schema, defaultUpdatingColumns []string) (onConflict clause.OnConflict) {
if len(defaultUpdatingColumns) > 0 || stmt.DB.FullSaveAssociations {
onConflict.Columns = make([]clause.Column, 0, len(s.PrimaryFieldDBNames))
for _, dbName := range s.PrimaryFieldDBNames {
onConflict.Columns = append(onConflict.Columns, clause.Column{Name: dbName})
}
onConflict.UpdateAll = stmt.DB.FullSaveAssociations
if !onConflict.UpdateAll {
onConflict.DoUpdates = clause.AssignmentColumns(defaultUpdatingColumns)
}
} else {
onConflict.DoNothing = true
}
return
}
func saveAssociations(db *gorm.DB, rel *schema.Relationship, rValues reflect.Value, selectColumns map[string]bool, restricted bool, defaultUpdatingColumns []string) error {
// stop save association loop
if checkAssociationsSaved(db, rValues) {
return nil
}
var (
selects, omits []string
onConflict = onConflictOption(db.Statement, rel.FieldSchema, defaultUpdatingColumns)
refName = rel.Name + "."
values = rValues.Interface()
)
for name, ok := range selectColumns {
columnName := ""
if strings.HasPrefix(name, refName) {
columnName = strings.TrimPrefix(name, refName)
}
if columnName != "" {
if ok {
selects = append(selects, columnName)
} else {
omits = append(omits, columnName)
}
}
}
tx := db.Session(&gorm.Session{NewDB: true}).Clauses(onConflict).Session(&gorm.Session{
FullSaveAssociations: db.FullSaveAssociations,
SkipHooks: db.Statement.SkipHooks,
DisableNestedTransaction: true,
})
db.Statement.Settings.Range(func(k, v interface{}) bool {
tx.Statement.Settings.Store(k, v)
return true
})
if tx.Statement.FullSaveAssociations {
tx = tx.Set("gorm:update_track_time", true)
}
if len(selects) > 0 {
tx = tx.Select(selects)
} else if restricted && len(omits) == 0 {
tx = tx.Omit(clause.Associations)
}
if len(omits) > 0 {
tx = tx.Omit(omits...)
}
return db.AddError(tx.Create(values).Error)
}
// check association values has been saved
// if values kind is Struct, check it has been saved
// if values kind is Slice/Array, check all items have been saved
var visitMapStoreKey = "gorm:saved_association_map"
func checkAssociationsSaved(db *gorm.DB, values reflect.Value) bool {
if visit, ok := db.Get(visitMapStoreKey); ok {
if v, ok := visit.(*visitMap); ok {
if loadOrStoreVisitMap(v, values) {
return true
}
}
} else {
vistMap := make(visitMap)
loadOrStoreVisitMap(&vistMap, values)
db.Set(visitMapStoreKey, &vistMap)
}
return false
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/preload.go | callbacks/preload.go | package callbacks
import (
"fmt"
"reflect"
"sort"
"strings"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils"
)
// parsePreloadMap extracts nested preloads. e.g.
//
// // schema has a "k0" relation and a "k7.k8" embedded relation
// parsePreloadMap(schema, map[string][]interface{}{
// clause.Associations: {"arg1"},
// "k1": {"arg2"},
// "k2.k3": {"arg3"},
// "k4.k5.k6": {"arg4"},
// })
// // preloadMap is
// map[string]map[string][]interface{}{
// "k0": {},
// "k7": {
// "k8": {},
// },
// "k1": {},
// "k2": {
// "k3": {"arg3"},
// },
// "k4": {
// "k5.k6": {"arg4"},
// },
// }
func parsePreloadMap(s *schema.Schema, preloads map[string][]interface{}) map[string]map[string][]interface{} {
preloadMap := map[string]map[string][]interface{}{}
setPreloadMap := func(name, value string, args []interface{}) {
if _, ok := preloadMap[name]; !ok {
preloadMap[name] = map[string][]interface{}{}
}
if value != "" {
preloadMap[name][value] = args
}
}
for name, args := range preloads {
preloadFields := strings.Split(name, ".")
value := strings.TrimPrefix(strings.TrimPrefix(name, preloadFields[0]), ".")
if preloadFields[0] == clause.Associations {
for _, relation := range s.Relationships.Relations {
if relation.Schema == s {
setPreloadMap(relation.Name, value, args)
}
}
for embedded, embeddedRelations := range s.Relationships.EmbeddedRelations {
for _, value := range embeddedValues(embeddedRelations) {
setPreloadMap(embedded, value, args)
}
}
} else {
setPreloadMap(preloadFields[0], value, args)
}
}
return preloadMap
}
func embeddedValues(embeddedRelations *schema.Relationships) []string {
if embeddedRelations == nil {
return nil
}
names := make([]string, 0, len(embeddedRelations.Relations)+len(embeddedRelations.EmbeddedRelations))
for _, relation := range embeddedRelations.Relations {
// skip first struct name
names = append(names, strings.Join(relation.Field.EmbeddedBindNames[1:], "."))
}
for _, relations := range embeddedRelations.EmbeddedRelations {
names = append(names, embeddedValues(relations)...)
}
return names
}
// preloadEntryPoint enters layer by layer. It will call real preload if it finds the right entry point.
// If the current relationship is embedded or joined, current query will be ignored.
//
//nolint:cyclop
func preloadEntryPoint(db *gorm.DB, joins []string, relationships *schema.Relationships, preloads map[string][]interface{}, associationsConds []interface{}) error {
preloadMap := parsePreloadMap(db.Statement.Schema, preloads)
// avoid random traversal of the map
preloadNames := make([]string, 0, len(preloadMap))
for key := range preloadMap {
preloadNames = append(preloadNames, key)
}
sort.Strings(preloadNames)
isJoined := func(name string) (joined bool, nestedJoins []string) {
for _, join := range joins {
if _, ok := relationships.Relations[join]; ok && name == join {
joined = true
continue
}
join0, join1, cut := strings.Cut(join, ".")
if cut {
if _, ok := relationships.Relations[join0]; ok && name == join0 {
joined = true
nestedJoins = append(nestedJoins, join1)
}
}
}
return joined, nestedJoins
}
for _, name := range preloadNames {
if relations := relationships.EmbeddedRelations[name]; relations != nil {
if err := preloadEntryPoint(db, joins, relations, preloadMap[name], associationsConds); err != nil {
return err
}
} else if rel := relationships.Relations[name]; rel != nil {
if joined, nestedJoins := isJoined(name); joined {
switch rv := db.Statement.ReflectValue; rv.Kind() {
case reflect.Slice, reflect.Array:
if rv.Len() > 0 {
reflectValue := rel.FieldSchema.MakeSlice().Elem()
for i := 0; i < rv.Len(); i++ {
frv := rel.Field.ReflectValueOf(db.Statement.Context, rv.Index(i))
if frv.Kind() != reflect.Ptr {
reflectValue = reflect.Append(reflectValue, frv.Addr())
} else {
if frv.IsNil() {
continue
}
reflectValue = reflect.Append(reflectValue, frv)
}
}
tx := preloadDB(db, reflectValue, reflectValue.Interface())
if err := preloadEntryPoint(tx, nestedJoins, &tx.Statement.Schema.Relationships, preloadMap[name], associationsConds); err != nil {
return err
}
}
case reflect.Struct, reflect.Pointer:
reflectValue := rel.Field.ReflectValueOf(db.Statement.Context, rv)
tx := preloadDB(db, reflectValue, reflectValue.Interface())
if err := preloadEntryPoint(tx, nestedJoins, &tx.Statement.Schema.Relationships, preloadMap[name], associationsConds); err != nil {
return err
}
default:
return gorm.ErrInvalidData
}
} else {
tx := db.Table("").Session(&gorm.Session{Context: db.Statement.Context, SkipHooks: db.Statement.SkipHooks})
tx.Statement.ReflectValue = db.Statement.ReflectValue
tx.Statement.Unscoped = db.Statement.Unscoped
if err := preload(tx, rel, append(preloads[name], associationsConds...), preloadMap[name]); err != nil {
return err
}
}
} else {
return fmt.Errorf("%s: %w for schema %s", name, gorm.ErrUnsupportedRelation, db.Statement.Schema.Name)
}
}
return nil
}
func preloadDB(db *gorm.DB, reflectValue reflect.Value, dest interface{}) *gorm.DB {
tx := db.Session(&gorm.Session{Context: db.Statement.Context, NewDB: true, SkipHooks: db.Statement.SkipHooks, Initialized: true})
db.Statement.Settings.Range(func(k, v interface{}) bool {
tx.Statement.Settings.Store(k, v)
return true
})
if err := tx.Statement.Parse(dest); err != nil {
tx.AddError(err)
return tx
}
tx.Statement.ReflectValue = reflectValue
tx.Statement.Unscoped = db.Statement.Unscoped
return tx
}
func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preloads map[string][]interface{}) error {
var (
reflectValue = tx.Statement.ReflectValue
relForeignKeys []string
relForeignFields []*schema.Field
foreignFields []*schema.Field
foreignValues [][]interface{}
identityMap = map[string][]reflect.Value{}
inlineConds []interface{}
)
if rel.JoinTable != nil {
var (
joinForeignFields = make([]*schema.Field, 0, len(rel.References))
joinRelForeignFields = make([]*schema.Field, 0, len(rel.References))
joinForeignKeys = make([]string, 0, len(rel.References))
)
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
joinForeignKeys = append(joinForeignKeys, ref.ForeignKey.DBName)
joinForeignFields = append(joinForeignFields, ref.ForeignKey)
foreignFields = append(foreignFields, ref.PrimaryKey)
} else if ref.PrimaryValue != "" {
tx = tx.Where(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue})
} else {
joinRelForeignFields = append(joinRelForeignFields, ref.ForeignKey)
relForeignKeys = append(relForeignKeys, ref.PrimaryKey.DBName)
relForeignFields = append(relForeignFields, ref.PrimaryKey)
}
}
joinIdentityMap, joinForeignValues := schema.GetIdentityFieldValuesMap(tx.Statement.Context, reflectValue, foreignFields)
if len(joinForeignValues) == 0 {
return nil
}
joinResults := rel.JoinTable.MakeSlice().Elem()
column, values := schema.ToQueryValues(clause.CurrentTable, joinForeignKeys, joinForeignValues)
if err := tx.Where(clause.IN{Column: column, Values: values}).Find(joinResults.Addr().Interface()).Error; err != nil {
return err
}
// convert join identity map to relation identity map
fieldValues := make([]interface{}, len(joinForeignFields))
joinFieldValues := make([]interface{}, len(joinRelForeignFields))
for i := 0; i < joinResults.Len(); i++ {
joinIndexValue := joinResults.Index(i)
for idx, field := range joinForeignFields {
fieldValues[idx], _ = field.ValueOf(tx.Statement.Context, joinIndexValue)
}
for idx, field := range joinRelForeignFields {
joinFieldValues[idx], _ = field.ValueOf(tx.Statement.Context, joinIndexValue)
}
if results, ok := joinIdentityMap[utils.ToStringKey(fieldValues...)]; ok {
joinKey := utils.ToStringKey(joinFieldValues...)
identityMap[joinKey] = append(identityMap[joinKey], results...)
}
}
_, foreignValues = schema.GetIdentityFieldValuesMap(tx.Statement.Context, joinResults, joinRelForeignFields)
} else {
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName)
relForeignFields = append(relForeignFields, ref.ForeignKey)
foreignFields = append(foreignFields, ref.PrimaryKey)
} else if ref.PrimaryValue != "" {
tx = tx.Where(clause.Eq{Column: ref.ForeignKey.DBName, Value: ref.PrimaryValue})
} else {
relForeignKeys = append(relForeignKeys, ref.PrimaryKey.DBName)
relForeignFields = append(relForeignFields, ref.PrimaryKey)
foreignFields = append(foreignFields, ref.ForeignKey)
}
}
identityMap, foreignValues = schema.GetIdentityFieldValuesMap(tx.Statement.Context, reflectValue, foreignFields)
if len(foreignValues) == 0 {
return nil
}
}
// nested preload
for p, pvs := range preloads {
tx = tx.Preload(p, pvs...)
}
reflectResults := rel.FieldSchema.MakeSlice().Elem()
column, values := schema.ToQueryValues(clause.CurrentTable, relForeignKeys, foreignValues)
if len(values) != 0 {
tx = tx.Model(reflectResults.Addr().Interface()).Where(clause.IN{Column: column, Values: values})
for _, cond := range conds {
if fc, ok := cond.(func(*gorm.DB) *gorm.DB); ok {
tx = fc(tx)
} else {
inlineConds = append(inlineConds, cond)
}
}
if len(inlineConds) > 0 {
tx = tx.Where(inlineConds[0], inlineConds[1:]...)
}
if err := tx.Find(reflectResults.Addr().Interface()).Error; err != nil {
return err
}
}
fieldValues := make([]interface{}, len(relForeignFields))
// clean up old values before preloading
switch reflectValue.Kind() {
case reflect.Struct:
switch rel.Type {
case schema.HasMany, schema.Many2Many:
tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue, reflect.MakeSlice(rel.Field.IndirectFieldType, 0, 10).Interface()))
default:
tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue, reflect.New(rel.Field.FieldType).Interface()))
}
case reflect.Slice, reflect.Array:
for i := 0; i < reflectValue.Len(); i++ {
switch rel.Type {
case schema.HasMany, schema.Many2Many:
tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue.Index(i), reflect.MakeSlice(rel.Field.IndirectFieldType, 0, 10).Interface()))
default:
tx.AddError(rel.Field.Set(tx.Statement.Context, reflectValue.Index(i), reflect.New(rel.Field.FieldType).Interface()))
}
}
}
for i := 0; i < reflectResults.Len(); i++ {
elem := reflectResults.Index(i)
for idx, field := range relForeignFields {
fieldValues[idx], _ = field.ValueOf(tx.Statement.Context, elem)
}
datas, ok := identityMap[utils.ToStringKey(fieldValues...)]
if !ok {
return fmt.Errorf("failed to assign association %#v, make sure foreign fields exists", elem.Interface())
}
for _, data := range datas {
reflectFieldValue := rel.Field.ReflectValueOf(tx.Statement.Context, data)
if reflectFieldValue.Kind() == reflect.Ptr && reflectFieldValue.IsNil() {
reflectFieldValue.Set(reflect.New(rel.Field.FieldType.Elem()))
}
reflectFieldValue = reflect.Indirect(reflectFieldValue)
switch reflectFieldValue.Kind() {
case reflect.Struct:
tx.AddError(rel.Field.Set(tx.Statement.Context, data, elem.Interface()))
case reflect.Slice, reflect.Array:
if reflectFieldValue.Type().Elem().Kind() == reflect.Ptr {
tx.AddError(rel.Field.Set(tx.Statement.Context, data, reflect.Append(reflectFieldValue, elem).Interface()))
} else {
tx.AddError(rel.Field.Set(tx.Statement.Context, data, reflect.Append(reflectFieldValue, elem.Elem()).Interface()))
}
}
}
}
return tx.Error
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/query.go | callbacks/query.go | package callbacks
import (
"fmt"
"reflect"
"strings"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils"
)
func Query(db *gorm.DB) {
if db.Error == nil {
BuildQuerySQL(db)
if !db.DryRun && db.Error == nil {
rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
if err != nil {
db.AddError(err)
return
}
defer func() {
db.AddError(rows.Close())
}()
gorm.Scan(rows, db, 0)
if db.Statement.Result != nil {
db.Statement.Result.RowsAffected = db.RowsAffected
}
}
}
}
func BuildQuerySQL(db *gorm.DB) {
if db.Statement.Schema != nil {
for _, c := range db.Statement.Schema.QueryClauses {
db.Statement.AddClause(c)
}
}
if db.Statement.SQL.Len() == 0 {
db.Statement.SQL.Grow(100)
clauseSelect := clause.Select{Distinct: db.Statement.Distinct}
if db.Statement.ReflectValue.Kind() == reflect.Struct && db.Statement.ReflectValue.Type() == db.Statement.Schema.ModelType {
var conds []clause.Expression
for _, primaryField := range db.Statement.Schema.PrimaryFields {
if v, isZero := primaryField.ValueOf(db.Statement.Context, db.Statement.ReflectValue); !isZero {
conds = append(conds, clause.Eq{Column: clause.Column{Table: db.Statement.Table, Name: primaryField.DBName}, Value: v})
}
}
if len(conds) > 0 {
db.Statement.AddClause(clause.Where{Exprs: conds})
}
}
if len(db.Statement.Selects) > 0 {
clauseSelect.Columns = make([]clause.Column, len(db.Statement.Selects))
for idx, name := range db.Statement.Selects {
if db.Statement.Schema == nil {
clauseSelect.Columns[idx] = clause.Column{Name: name, Raw: true}
} else if f := db.Statement.Schema.LookUpField(name); f != nil {
clauseSelect.Columns[idx] = clause.Column{Name: f.DBName}
} else {
clauseSelect.Columns[idx] = clause.Column{Name: name, Raw: true}
}
}
} else if db.Statement.Schema != nil && len(db.Statement.Omits) > 0 {
selectColumns, _ := db.Statement.SelectAndOmitColumns(false, false)
clauseSelect.Columns = make([]clause.Column, 0, len(db.Statement.Schema.DBNames))
for _, dbName := range db.Statement.Schema.DBNames {
if v, ok := selectColumns[dbName]; (ok && v) || !ok {
clauseSelect.Columns = append(clauseSelect.Columns, clause.Column{Table: db.Statement.Table, Name: dbName})
}
}
} else if db.Statement.Schema != nil && db.Statement.ReflectValue.IsValid() {
queryFields := db.QueryFields
if !queryFields {
switch db.Statement.ReflectValue.Kind() {
case reflect.Struct:
queryFields = db.Statement.ReflectValue.Type() != db.Statement.Schema.ModelType
case reflect.Slice:
queryFields = db.Statement.ReflectValue.Type().Elem() != db.Statement.Schema.ModelType
}
}
if queryFields {
stmt := gorm.Statement{DB: db}
// smaller struct
if err := stmt.Parse(db.Statement.Dest); err == nil && (db.QueryFields || stmt.Schema.ModelType != db.Statement.Schema.ModelType) {
clauseSelect.Columns = make([]clause.Column, len(stmt.Schema.DBNames))
for idx, dbName := range stmt.Schema.DBNames {
clauseSelect.Columns[idx] = clause.Column{Table: db.Statement.Table, Name: dbName}
}
}
}
}
// inline joins
fromClause := clause.From{}
if v, ok := db.Statement.Clauses["FROM"].Expression.(clause.From); ok {
fromClause = v
}
if len(db.Statement.Joins) != 0 || len(fromClause.Joins) != 0 {
if len(db.Statement.Selects) == 0 && len(db.Statement.Omits) == 0 && db.Statement.Schema != nil {
clauseSelect.Columns = make([]clause.Column, len(db.Statement.Schema.DBNames))
for idx, dbName := range db.Statement.Schema.DBNames {
clauseSelect.Columns[idx] = clause.Column{Table: db.Statement.Table, Name: dbName}
}
}
specifiedRelationsName := map[string]string{clause.CurrentTable: clause.CurrentTable}
for _, join := range db.Statement.Joins {
if db.Statement.Schema != nil {
var isRelations bool // is relations or raw sql
var relations []*schema.Relationship
relation, ok := db.Statement.Schema.Relationships.Relations[join.Name]
if ok {
isRelations = true
relations = append(relations, relation)
} else {
// handle nested join like "Manager.Company"
nestedJoinNames := strings.Split(join.Name, ".")
if len(nestedJoinNames) > 1 {
isNestedJoin := true
guessNestedRelations := make([]*schema.Relationship, 0, len(nestedJoinNames))
currentRelations := db.Statement.Schema.Relationships.Relations
for _, relname := range nestedJoinNames {
// incomplete match, only treated as raw sql
if relation, ok = currentRelations[relname]; ok {
guessNestedRelations = append(guessNestedRelations, relation)
currentRelations = relation.FieldSchema.Relationships.Relations
} else {
isNestedJoin = false
break
}
}
if isNestedJoin {
isRelations = true
relations = guessNestedRelations
}
}
}
if isRelations {
genJoinClause := func(joinType clause.JoinType, tableAliasName string, parentTableName string, relation *schema.Relationship) clause.Join {
columnStmt := gorm.Statement{
Table: tableAliasName, DB: db, Schema: relation.FieldSchema,
Selects: join.Selects, Omits: join.Omits,
}
selectColumns, restricted := columnStmt.SelectAndOmitColumns(false, false)
for _, s := range relation.FieldSchema.DBNames {
if v, ok := selectColumns[s]; (ok && v) || (!ok && !restricted) {
clauseSelect.Columns = append(clauseSelect.Columns, clause.Column{
Table: tableAliasName,
Name: s,
Alias: utils.NestedRelationName(tableAliasName, s),
})
}
}
if join.Expression != nil {
return clause.Join{
Type: join.JoinType,
Expression: join.Expression,
}
}
exprs := make([]clause.Expression, len(relation.References))
for idx, ref := range relation.References {
if ref.OwnPrimaryKey {
exprs[idx] = clause.Eq{
Column: clause.Column{Table: parentTableName, Name: ref.PrimaryKey.DBName},
Value: clause.Column{Table: tableAliasName, Name: ref.ForeignKey.DBName},
}
} else {
if ref.PrimaryValue == "" {
exprs[idx] = clause.Eq{
Column: clause.Column{Table: parentTableName, Name: ref.ForeignKey.DBName},
Value: clause.Column{Table: tableAliasName, Name: ref.PrimaryKey.DBName},
}
} else {
exprs[idx] = clause.Eq{
Column: clause.Column{Table: tableAliasName, Name: ref.ForeignKey.DBName},
Value: ref.PrimaryValue,
}
}
}
}
{
onStmt := gorm.Statement{Table: tableAliasName, DB: db, Clauses: map[string]clause.Clause{}}
for _, c := range relation.FieldSchema.QueryClauses {
onStmt.AddClause(c)
}
if join.On != nil {
onStmt.AddClause(join.On)
}
if cs, ok := onStmt.Clauses["WHERE"]; ok {
if where, ok := cs.Expression.(clause.Where); ok {
where.Build(&onStmt)
if onSQL := onStmt.SQL.String(); onSQL != "" {
vars := onStmt.Vars
for idx, v := range vars {
bindvar := strings.Builder{}
onStmt.Vars = vars[0 : idx+1]
db.Dialector.BindVarTo(&bindvar, &onStmt, v)
onSQL = strings.Replace(onSQL, bindvar.String(), "?", 1)
}
exprs = append(exprs, clause.Expr{SQL: onSQL, Vars: vars})
}
}
}
}
return clause.Join{
Type: joinType,
Table: clause.Table{Name: relation.FieldSchema.Table, Alias: tableAliasName},
ON: clause.Where{Exprs: exprs},
}
}
parentTableName := clause.CurrentTable
for idx, rel := range relations {
// joins table alias like "Manager, Company, Manager__Company"
curAliasName := rel.Name
if parentTableName != clause.CurrentTable {
curAliasName = utils.NestedRelationName(parentTableName, curAliasName)
}
if _, ok := specifiedRelationsName[curAliasName]; !ok {
aliasName := curAliasName
if idx == len(relations)-1 && join.Alias != "" {
aliasName = join.Alias
}
fromClause.Joins = append(fromClause.Joins, genJoinClause(join.JoinType, aliasName, specifiedRelationsName[parentTableName], rel))
specifiedRelationsName[curAliasName] = aliasName
}
parentTableName = curAliasName
}
} else {
fromClause.Joins = append(fromClause.Joins, clause.Join{
Expression: clause.NamedExpr{SQL: join.Name, Vars: join.Conds},
})
}
} else {
fromClause.Joins = append(fromClause.Joins, clause.Join{
Expression: clause.NamedExpr{SQL: join.Name, Vars: join.Conds},
})
}
}
db.Statement.AddClause(fromClause)
} else {
db.Statement.AddClauseIfNotExists(clause.From{})
}
db.Statement.AddClauseIfNotExists(clauseSelect)
db.Statement.Build(db.Statement.BuildClauses...)
}
}
func Preload(db *gorm.DB) {
if db.Error == nil && len(db.Statement.Preloads) > 0 {
if db.Statement.Schema == nil {
db.AddError(fmt.Errorf("%w when using preload", gorm.ErrModelValueRequired))
return
}
joins := make([]string, 0, len(db.Statement.Joins))
for _, join := range db.Statement.Joins {
joins = append(joins, join.Name)
}
tx := preloadDB(db, db.Statement.ReflectValue, db.Statement.Dest)
if tx.Error != nil {
return
}
db.AddError(preloadEntryPoint(tx, joins, &tx.Statement.Schema.Relationships, db.Statement.Preloads, db.Statement.Preloads[clause.Associations]))
}
}
func AfterQuery(db *gorm.DB) {
// clear the joins after query because preload need it
if v, ok := db.Statement.Clauses["FROM"].Expression.(clause.From); ok {
fromClause := db.Statement.Clauses["FROM"]
fromClause.Expression = clause.From{Tables: v.Tables, Joins: utils.RTrimSlice(v.Joins, len(db.Statement.Joins))} // keep the original From Joins
db.Statement.Clauses["FROM"] = fromClause
}
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.AfterFind && db.RowsAffected > 0 {
callMethod(db, func(value interface{}, tx *gorm.DB) bool {
if i, ok := value.(AfterFindInterface); ok {
db.AddError(i.AfterFind(tx))
return true
}
return false
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/raw.go | callbacks/raw.go | package callbacks
import (
"gorm.io/gorm"
)
func RawExec(db *gorm.DB) {
if db.Error == nil && !db.DryRun {
result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
if err != nil {
db.AddError(err)
return
}
db.RowsAffected, _ = result.RowsAffected()
if db.Statement.Result != nil {
db.Statement.Result.Result = result
db.Statement.Result.RowsAffected = db.RowsAffected
}
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/callmethod.go | callbacks/callmethod.go | package callbacks
import (
"reflect"
"gorm.io/gorm"
)
func callMethod(db *gorm.DB, fc func(value interface{}, tx *gorm.DB) bool) {
tx := db.Session(&gorm.Session{NewDB: true})
if called := fc(db.Statement.ReflectValue.Interface(), tx); !called {
switch db.Statement.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
db.Statement.CurDestIndex = 0
for i := 0; i < db.Statement.ReflectValue.Len(); i++ {
if value := reflect.Indirect(db.Statement.ReflectValue.Index(i)); value.CanAddr() {
fc(value.Addr().Interface(), tx)
} else {
db.AddError(gorm.ErrInvalidValue)
return
}
db.Statement.CurDestIndex++
}
case reflect.Struct:
if db.Statement.ReflectValue.CanAddr() {
fc(db.Statement.ReflectValue.Addr().Interface(), tx)
} else {
db.AddError(gorm.ErrInvalidValue)
}
}
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/callbacks.go | callbacks/callbacks.go | package callbacks
import (
"gorm.io/gorm"
)
var (
createClauses = []string{"INSERT", "VALUES", "ON CONFLICT"}
queryClauses = []string{"SELECT", "FROM", "WHERE", "GROUP BY", "ORDER BY", "LIMIT", "FOR"}
updateClauses = []string{"UPDATE", "SET", "WHERE"}
deleteClauses = []string{"DELETE", "FROM", "WHERE"}
)
type Config struct {
LastInsertIDReversed bool
CreateClauses []string
QueryClauses []string
UpdateClauses []string
DeleteClauses []string
}
func RegisterDefaultCallbacks(db *gorm.DB, config *Config) {
enableTransaction := func(db *gorm.DB) bool {
return !db.SkipDefaultTransaction
}
if len(config.CreateClauses) == 0 {
config.CreateClauses = createClauses
}
if len(config.QueryClauses) == 0 {
config.QueryClauses = queryClauses
}
if len(config.DeleteClauses) == 0 {
config.DeleteClauses = deleteClauses
}
if len(config.UpdateClauses) == 0 {
config.UpdateClauses = updateClauses
}
createCallback := db.Callback().Create()
createCallback.Match(enableTransaction).Register("gorm:begin_transaction", BeginTransaction)
createCallback.Register("gorm:before_create", BeforeCreate)
createCallback.Register("gorm:save_before_associations", SaveBeforeAssociations(true))
createCallback.Register("gorm:create", Create(config))
createCallback.Register("gorm:save_after_associations", SaveAfterAssociations(true))
createCallback.Register("gorm:after_create", AfterCreate)
createCallback.Match(enableTransaction).Register("gorm:commit_or_rollback_transaction", CommitOrRollbackTransaction)
createCallback.Clauses = config.CreateClauses
queryCallback := db.Callback().Query()
queryCallback.Register("gorm:query", Query)
queryCallback.Register("gorm:preload", Preload)
queryCallback.Register("gorm:after_query", AfterQuery)
queryCallback.Clauses = config.QueryClauses
deleteCallback := db.Callback().Delete()
deleteCallback.Match(enableTransaction).Register("gorm:begin_transaction", BeginTransaction)
deleteCallback.Register("gorm:before_delete", BeforeDelete)
deleteCallback.Register("gorm:delete_before_associations", DeleteBeforeAssociations)
deleteCallback.Register("gorm:delete", Delete(config))
deleteCallback.Register("gorm:after_delete", AfterDelete)
deleteCallback.Match(enableTransaction).Register("gorm:commit_or_rollback_transaction", CommitOrRollbackTransaction)
deleteCallback.Clauses = config.DeleteClauses
updateCallback := db.Callback().Update()
updateCallback.Match(enableTransaction).Register("gorm:begin_transaction", BeginTransaction)
updateCallback.Register("gorm:setup_reflect_value", SetupUpdateReflectValue)
updateCallback.Register("gorm:before_update", BeforeUpdate)
updateCallback.Register("gorm:save_before_associations", SaveBeforeAssociations(false))
updateCallback.Register("gorm:update", Update(config))
updateCallback.Register("gorm:save_after_associations", SaveAfterAssociations(false))
updateCallback.Register("gorm:after_update", AfterUpdate)
updateCallback.Match(enableTransaction).Register("gorm:commit_or_rollback_transaction", CommitOrRollbackTransaction)
updateCallback.Clauses = config.UpdateClauses
rowCallback := db.Callback().Row()
rowCallback.Register("gorm:row", RowQuery)
rowCallback.Clauses = config.QueryClauses
rawCallback := db.Callback().Raw()
rawCallback.Register("gorm:raw", RawExec)
rawCallback.Clauses = config.QueryClauses
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/update.go | callbacks/update.go | package callbacks
import (
"reflect"
"sort"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils"
)
func SetupUpdateReflectValue(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil {
if !db.Statement.ReflectValue.CanAddr() || db.Statement.Model != db.Statement.Dest {
db.Statement.ReflectValue = reflect.ValueOf(db.Statement.Model)
for db.Statement.ReflectValue.Kind() == reflect.Ptr {
db.Statement.ReflectValue = db.Statement.ReflectValue.Elem()
}
if dest, ok := db.Statement.Dest.(map[string]interface{}); ok {
for _, rel := range db.Statement.Schema.Relationships.BelongsTo {
if _, ok := dest[rel.Name]; ok {
db.AddError(rel.Field.Set(db.Statement.Context, db.Statement.ReflectValue, dest[rel.Name]))
}
}
}
}
}
}
// BeforeUpdate before update hooks
func BeforeUpdate(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.BeforeSave || db.Statement.Schema.BeforeUpdate) {
callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) {
if db.Statement.Schema.BeforeSave {
if i, ok := value.(BeforeSaveInterface); ok {
called = true
db.AddError(i.BeforeSave(tx))
}
}
if db.Statement.Schema.BeforeUpdate {
if i, ok := value.(BeforeUpdateInterface); ok {
called = true
db.AddError(i.BeforeUpdate(tx))
}
}
return called
})
}
}
// Update update hook
func Update(config *Config) func(db *gorm.DB) {
supportReturning := utils.Contains(config.UpdateClauses, "RETURNING")
return func(db *gorm.DB) {
if db.Error != nil {
return
}
if db.Statement.Schema != nil {
for _, c := range db.Statement.Schema.UpdateClauses {
db.Statement.AddClause(c)
}
}
if db.Statement.SQL.Len() == 0 {
db.Statement.SQL.Grow(180)
db.Statement.AddClauseIfNotExists(clause.Update{})
if _, ok := db.Statement.Clauses["SET"]; !ok {
if set := ConvertToAssignments(db.Statement); len(set) != 0 {
defer delete(db.Statement.Clauses, "SET")
db.Statement.AddClause(set)
} else {
return
}
}
db.Statement.Build(db.Statement.BuildClauses...)
}
checkMissingWhereConditions(db)
if !db.DryRun && db.Error == nil {
if ok, mode := hasReturning(db, supportReturning); ok {
if rows, err := db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...); db.AddError(err) == nil {
dest := db.Statement.Dest
db.Statement.Dest = db.Statement.ReflectValue.Addr().Interface()
gorm.Scan(rows, db, mode)
db.Statement.Dest = dest
db.AddError(rows.Close())
if db.Statement.Result != nil {
db.Statement.Result.RowsAffected = db.RowsAffected
}
}
} else {
result, err := db.Statement.ConnPool.ExecContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
if db.AddError(err) == nil {
db.RowsAffected, _ = result.RowsAffected()
}
if db.Statement.Result != nil {
db.Statement.Result.Result = result
db.Statement.Result.RowsAffected = db.RowsAffected
}
}
}
}
}
// AfterUpdate after update hooks
func AfterUpdate(db *gorm.DB) {
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && (db.Statement.Schema.AfterSave || db.Statement.Schema.AfterUpdate) {
callMethod(db, func(value interface{}, tx *gorm.DB) (called bool) {
if db.Statement.Schema.AfterUpdate {
if i, ok := value.(AfterUpdateInterface); ok {
called = true
db.AddError(i.AfterUpdate(tx))
}
}
if db.Statement.Schema.AfterSave {
if i, ok := value.(AfterSaveInterface); ok {
called = true
db.AddError(i.AfterSave(tx))
}
}
return called
})
}
}
// ConvertToAssignments convert to update assignments
func ConvertToAssignments(stmt *gorm.Statement) (set clause.Set) {
var (
selectColumns, restricted = stmt.SelectAndOmitColumns(false, true)
assignValue func(field *schema.Field, value interface{})
)
switch stmt.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
assignValue = func(field *schema.Field, value interface{}) {
for i := 0; i < stmt.ReflectValue.Len(); i++ {
if stmt.ReflectValue.CanAddr() {
field.Set(stmt.Context, stmt.ReflectValue.Index(i), value)
}
}
}
case reflect.Struct:
assignValue = func(field *schema.Field, value interface{}) {
if stmt.ReflectValue.CanAddr() {
field.Set(stmt.Context, stmt.ReflectValue, value)
}
}
default:
assignValue = func(field *schema.Field, value interface{}) {
}
}
updatingValue := reflect.ValueOf(stmt.Dest)
for updatingValue.Kind() == reflect.Ptr {
updatingValue = updatingValue.Elem()
}
if !updatingValue.CanAddr() || stmt.Dest != stmt.Model {
switch stmt.ReflectValue.Kind() {
case reflect.Slice, reflect.Array:
if size := stmt.ReflectValue.Len(); size > 0 {
var isZero bool
for i := 0; i < size; i++ {
for _, field := range stmt.Schema.PrimaryFields {
_, isZero = field.ValueOf(stmt.Context, stmt.ReflectValue.Index(i))
if !isZero {
break
}
}
}
if !isZero {
_, primaryValues := schema.GetIdentityFieldValuesMap(stmt.Context, stmt.ReflectValue, stmt.Schema.PrimaryFields)
column, values := schema.ToQueryValues("", stmt.Schema.PrimaryFieldDBNames, primaryValues)
stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.IN{Column: column, Values: values}}})
}
}
case reflect.Struct:
for _, field := range stmt.Schema.PrimaryFields {
if value, isZero := field.ValueOf(stmt.Context, stmt.ReflectValue); !isZero {
stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.Eq{Column: field.DBName, Value: value}}})
}
}
}
}
switch value := updatingValue.Interface().(type) {
case map[string]interface{}:
set = make([]clause.Assignment, 0, len(value))
keys := make([]string, 0, len(value))
for k := range value {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
kv := value[k]
if _, ok := kv.(*gorm.DB); ok {
kv = []interface{}{kv}
}
if stmt.Schema != nil {
if field := stmt.Schema.LookUpField(k); field != nil {
if field.DBName != "" {
if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && !restricted) {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: kv})
assignValue(field, value[k])
}
} else if v, ok := selectColumns[field.Name]; (ok && v) || (!ok && !restricted) {
assignValue(field, value[k])
}
continue
}
}
if v, ok := selectColumns[k]; (ok && v) || (!ok && !restricted) {
set = append(set, clause.Assignment{Column: clause.Column{Name: k}, Value: kv})
}
}
if !stmt.SkipHooks && stmt.Schema != nil {
for _, dbName := range stmt.Schema.DBNames {
field := stmt.Schema.LookUpField(dbName)
if field.AutoUpdateTime > 0 && value[field.Name] == nil && value[field.DBName] == nil {
if v, ok := selectColumns[field.DBName]; (ok && v) || !ok {
now := stmt.DB.NowFunc()
assignValue(field, now)
if field.AutoUpdateTime == schema.UnixNanosecond {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.UnixNano()})
} else if field.AutoUpdateTime == schema.UnixMillisecond {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.UnixMilli()})
} else if field.AutoUpdateTime == schema.UnixSecond {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now.Unix()})
} else {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: now})
}
}
}
}
}
default:
updatingSchema := stmt.Schema
var isDiffSchema bool
if !updatingValue.CanAddr() || stmt.Dest != stmt.Model {
// different schema
updatingStmt := &gorm.Statement{DB: stmt.DB}
if err := updatingStmt.Parse(stmt.Dest); err == nil {
updatingSchema = updatingStmt.Schema
isDiffSchema = true
}
}
switch updatingValue.Kind() {
case reflect.Struct:
set = make([]clause.Assignment, 0, len(stmt.Schema.FieldsByDBName))
for _, dbName := range stmt.Schema.DBNames {
if field := updatingSchema.LookUpField(dbName); field != nil {
if !field.PrimaryKey || !updatingValue.CanAddr() || stmt.Dest != stmt.Model {
if v, ok := selectColumns[field.DBName]; (ok && v) || (!ok && (!restricted || (!stmt.SkipHooks && field.AutoUpdateTime > 0))) {
value, isZero := field.ValueOf(stmt.Context, updatingValue)
if !stmt.SkipHooks && field.AutoUpdateTime > 0 {
if field.AutoUpdateTime == schema.UnixNanosecond {
value = stmt.DB.NowFunc().UnixNano()
} else if field.AutoUpdateTime == schema.UnixMillisecond {
value = stmt.DB.NowFunc().UnixMilli()
} else if field.AutoUpdateTime == schema.UnixSecond {
value = stmt.DB.NowFunc().Unix()
} else {
value = stmt.DB.NowFunc()
}
isZero = false
}
if (ok || !isZero) && field.Updatable {
set = append(set, clause.Assignment{Column: clause.Column{Name: field.DBName}, Value: value})
assignField := field
if isDiffSchema {
if originField := stmt.Schema.LookUpField(dbName); originField != nil {
assignField = originField
}
}
assignValue(assignField, value)
}
}
} else {
if value, isZero := field.ValueOf(stmt.Context, updatingValue); !isZero {
stmt.AddClause(clause.Where{Exprs: []clause.Expression{clause.Eq{Column: field.DBName, Value: value}}})
}
}
}
}
default:
stmt.AddError(gorm.ErrInvalidData)
}
}
return
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/callbacks/row.go | callbacks/row.go | package callbacks
import (
"gorm.io/gorm"
)
func RowQuery(db *gorm.DB) {
if db.Error == nil {
BuildQuerySQL(db)
if db.DryRun || db.Error != nil {
return
}
if isRows, ok := db.Get("rows"); ok && isRows.(bool) {
db.Statement.Settings.Delete("rows")
db.Statement.Dest, db.Error = db.Statement.ConnPool.QueryContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
} else {
db.Statement.Dest = db.Statement.ConnPool.QueryRowContext(db.Statement.Context, db.Statement.SQL.String(), db.Statement.Vars...)
}
db.RowsAffected = -1
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/internal/lru/lru.go | internal/lru/lru.go | package lru
// golang -lru
// https://github.com/hashicorp/golang-lru
import (
"sync"
"time"
)
// EvictCallback is used to get a callback when a cache entry is evicted
type EvictCallback[K comparable, V any] func(key K, value V)
// LRU implements a thread-safe LRU with expirable entries.
type LRU[K comparable, V any] struct {
size int
evictList *LruList[K, V]
items map[K]*Entry[K, V]
onEvict EvictCallback[K, V]
// expirable options
mu sync.RWMutex
ttl time.Duration
done chan struct{}
// buckets for expiration
buckets []bucket[K, V]
// uint8 because it's number between 0 and numBuckets
nextCleanupBucket uint8
}
// bucket is a container for holding entries to be expired
type bucket[K comparable, V any] struct {
entries map[K]*Entry[K, V]
newestEntry time.Time
}
// noEvictionTTL - very long ttl to prevent eviction
const noEvictionTTL = time.Hour * 24 * 365 * 10
// because of uint8 usage for nextCleanupBucket, should not exceed 256.
// casting it as uint8 explicitly requires type conversions in multiple places
const numBuckets = 100
// NewLRU returns a new thread-safe cache with expirable entries.
//
// Size parameter set to 0 makes cache of unlimited size, e.g. turns LRU mechanism off.
//
// Providing 0 TTL turns expiring off.
//
// Delete expired entries every 1/100th of ttl value. Goroutine which deletes expired entries runs indefinitely.
func NewLRU[K comparable, V any](size int, onEvict EvictCallback[K, V], ttl time.Duration) *LRU[K, V] {
if size < 0 {
size = 0
}
if ttl <= 0 {
ttl = noEvictionTTL
}
res := LRU[K, V]{
ttl: ttl,
size: size,
evictList: NewList[K, V](),
items: make(map[K]*Entry[K, V]),
onEvict: onEvict,
done: make(chan struct{}),
}
// initialize the buckets
res.buckets = make([]bucket[K, V], numBuckets)
for i := 0; i < numBuckets; i++ {
res.buckets[i] = bucket[K, V]{entries: make(map[K]*Entry[K, V])}
}
// enable deleteExpired() running in separate goroutine for cache with non-zero TTL
//
// Important: done channel is never closed, so deleteExpired() goroutine will never exit,
// it's decided to add functionality to close it in the version later than v2.
if res.ttl != noEvictionTTL {
go func(done <-chan struct{}) {
ticker := time.NewTicker(res.ttl / numBuckets)
defer ticker.Stop()
for {
select {
case <-done:
return
case <-ticker.C:
res.deleteExpired()
}
}
}(res.done)
}
return &res
}
// Purge clears the cache completely.
// onEvict is called for each evicted key.
func (c *LRU[K, V]) Purge() {
c.mu.Lock()
defer c.mu.Unlock()
for k, v := range c.items {
if c.onEvict != nil {
c.onEvict(k, v.Value)
}
delete(c.items, k)
}
for _, b := range c.buckets {
for _, ent := range b.entries {
delete(b.entries, ent.Key)
}
}
c.evictList.Init()
}
// Add adds a value to the cache. Returns true if an eviction occurred.
// Returns false if there was no eviction: the item was already in the cache,
// or the size was not exceeded.
func (c *LRU[K, V]) Add(key K, value V) (evicted bool) {
c.mu.Lock()
defer c.mu.Unlock()
now := time.Now()
// Check for existing item
if ent, ok := c.items[key]; ok {
c.evictList.MoveToFront(ent)
c.removeFromBucket(ent) // remove the entry from its current bucket as expiresAt is renewed
ent.Value = value
ent.ExpiresAt = now.Add(c.ttl)
c.addToBucket(ent)
return false
}
// Add new item
ent := c.evictList.PushFrontExpirable(key, value, now.Add(c.ttl))
c.items[key] = ent
c.addToBucket(ent) // adds the entry to the appropriate bucket and sets entry.expireBucket
evict := c.size > 0 && c.evictList.Length() > c.size
// Verify size not exceeded
if evict {
c.removeOldest()
}
return evict
}
// Get looks up a key's value from the cache.
func (c *LRU[K, V]) Get(key K) (value V, ok bool) {
c.mu.Lock()
defer c.mu.Unlock()
var ent *Entry[K, V]
if ent, ok = c.items[key]; ok {
// Expired item check
if time.Now().After(ent.ExpiresAt) {
return value, false
}
c.evictList.MoveToFront(ent)
return ent.Value, true
}
return
}
// Contains checks if a key is in the cache, without updating the recent-ness
// or deleting it for being stale.
func (c *LRU[K, V]) Contains(key K) (ok bool) {
c.mu.RLock()
defer c.mu.RUnlock()
_, ok = c.items[key]
return ok
}
// Peek returns the key value (or undefined if not found) without updating
// the "recently used"-ness of the key.
func (c *LRU[K, V]) Peek(key K) (value V, ok bool) {
c.mu.RLock()
defer c.mu.RUnlock()
var ent *Entry[K, V]
if ent, ok = c.items[key]; ok {
// Expired item check
if time.Now().After(ent.ExpiresAt) {
return value, false
}
return ent.Value, true
}
return
}
// Remove removes the provided key from the cache, returning if the
// key was contained.
func (c *LRU[K, V]) Remove(key K) bool {
c.mu.Lock()
defer c.mu.Unlock()
if ent, ok := c.items[key]; ok {
c.removeElement(ent)
return true
}
return false
}
// RemoveOldest removes the oldest item from the cache.
func (c *LRU[K, V]) RemoveOldest() (key K, value V, ok bool) {
c.mu.Lock()
defer c.mu.Unlock()
if ent := c.evictList.Back(); ent != nil {
c.removeElement(ent)
return ent.Key, ent.Value, true
}
return
}
// GetOldest returns the oldest entry
func (c *LRU[K, V]) GetOldest() (key K, value V, ok bool) {
c.mu.RLock()
defer c.mu.RUnlock()
if ent := c.evictList.Back(); ent != nil {
return ent.Key, ent.Value, true
}
return
}
func (c *LRU[K, V]) KeyValues() map[K]V {
c.mu.RLock()
defer c.mu.RUnlock()
maps := make(map[K]V)
now := time.Now()
for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() {
if now.After(ent.ExpiresAt) {
continue
}
maps[ent.Key] = ent.Value
// keys = append(keys, ent.Key)
}
return maps
}
// Keys returns a slice of the keys in the cache, from oldest to newest.
// Expired entries are filtered out.
func (c *LRU[K, V]) Keys() []K {
c.mu.RLock()
defer c.mu.RUnlock()
keys := make([]K, 0, len(c.items))
now := time.Now()
for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() {
if now.After(ent.ExpiresAt) {
continue
}
keys = append(keys, ent.Key)
}
return keys
}
// Values returns a slice of the values in the cache, from oldest to newest.
// Expired entries are filtered out.
func (c *LRU[K, V]) Values() []V {
c.mu.RLock()
defer c.mu.RUnlock()
values := make([]V, 0, len(c.items))
now := time.Now()
for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() {
if now.After(ent.ExpiresAt) {
continue
}
values = append(values, ent.Value)
}
return values
}
// Len returns the number of items in the cache.
func (c *LRU[K, V]) Len() int {
c.mu.RLock()
defer c.mu.RUnlock()
return c.evictList.Length()
}
// Resize changes the cache size. Size of 0 means unlimited.
func (c *LRU[K, V]) Resize(size int) (evicted int) {
c.mu.Lock()
defer c.mu.Unlock()
if size <= 0 {
c.size = 0
return 0
}
diff := c.evictList.Length() - size
if diff < 0 {
diff = 0
}
for i := 0; i < diff; i++ {
c.removeOldest()
}
c.size = size
return diff
}
// Close destroys cleanup goroutine. To clean up the cache, run Purge() before Close().
// func (c *LRU[K, V]) Close() {
// c.mu.Lock()
// defer c.mu.Unlock()
// select {
// case <-c.done:
// return
// default:
// }
// close(c.done)
// }
// removeOldest removes the oldest item from the cache. Has to be called with lock!
func (c *LRU[K, V]) removeOldest() {
if ent := c.evictList.Back(); ent != nil {
c.removeElement(ent)
}
}
// removeElement is used to remove a given list element from the cache. Has to be called with lock!
func (c *LRU[K, V]) removeElement(e *Entry[K, V]) {
c.evictList.Remove(e)
delete(c.items, e.Key)
c.removeFromBucket(e)
if c.onEvict != nil {
c.onEvict(e.Key, e.Value)
}
}
// deleteExpired deletes expired records from the oldest bucket, waiting for the newest entry
// in it to expire first.
func (c *LRU[K, V]) deleteExpired() {
c.mu.Lock()
bucketIdx := c.nextCleanupBucket
timeToExpire := time.Until(c.buckets[bucketIdx].newestEntry)
// wait for newest entry to expire before cleanup without holding lock
if timeToExpire > 0 {
c.mu.Unlock()
time.Sleep(timeToExpire)
c.mu.Lock()
}
for _, ent := range c.buckets[bucketIdx].entries {
c.removeElement(ent)
}
c.nextCleanupBucket = (c.nextCleanupBucket + 1) % numBuckets
c.mu.Unlock()
}
// addToBucket adds entry to expire bucket so that it will be cleaned up when the time comes. Has to be called with lock!
func (c *LRU[K, V]) addToBucket(e *Entry[K, V]) {
bucketID := (numBuckets + c.nextCleanupBucket - 1) % numBuckets
e.ExpireBucket = bucketID
c.buckets[bucketID].entries[e.Key] = e
if c.buckets[bucketID].newestEntry.Before(e.ExpiresAt) {
c.buckets[bucketID].newestEntry = e.ExpiresAt
}
}
// removeFromBucket removes the entry from its corresponding bucket. Has to be called with lock!
func (c *LRU[K, V]) removeFromBucket(e *Entry[K, V]) {
delete(c.buckets[e.ExpireBucket].entries, e.Key)
}
// Cap returns the capacity of the cache
func (c *LRU[K, V]) Cap() int {
return c.size
}
// Entry is an LRU Entry
type Entry[K comparable, V any] struct {
// Next and previous pointers in the doubly-linked list of elements.
// To simplify the implementation, internally a list l is implemented
// as a ring, such that &l.root is both the next element of the last
// list element (l.Back()) and the previous element of the first list
// element (l.Front()).
next, prev *Entry[K, V]
// The list to which this element belongs.
list *LruList[K, V]
// The LRU Key of this element.
Key K
// The Value stored with this element.
Value V
// The time this element would be cleaned up, optional
ExpiresAt time.Time
// The expiry bucket item was put in, optional
ExpireBucket uint8
}
// PrevEntry returns the previous list element or nil.
func (e *Entry[K, V]) PrevEntry() *Entry[K, V] {
if p := e.prev; e.list != nil && p != &e.list.root {
return p
}
return nil
}
// LruList represents a doubly linked list.
// The zero Value for LruList is an empty list ready to use.
type LruList[K comparable, V any] struct {
root Entry[K, V] // sentinel list element, only &root, root.prev, and root.next are used
len int // current list Length excluding (this) sentinel element
}
// Init initializes or clears list l.
func (l *LruList[K, V]) Init() *LruList[K, V] {
l.root.next = &l.root
l.root.prev = &l.root
l.len = 0
return l
}
// NewList returns an initialized list.
func NewList[K comparable, V any]() *LruList[K, V] { return new(LruList[K, V]).Init() }
// Length returns the number of elements of list l.
// The complexity is O(1).
func (l *LruList[K, V]) Length() int { return l.len }
// Back returns the last element of list l or nil if the list is empty.
func (l *LruList[K, V]) Back() *Entry[K, V] {
if l.len == 0 {
return nil
}
return l.root.prev
}
// lazyInit lazily initializes a zero List Value.
func (l *LruList[K, V]) lazyInit() {
if l.root.next == nil {
l.Init()
}
}
// insert inserts e after at, increments l.len, and returns e.
func (l *LruList[K, V]) insert(e, at *Entry[K, V]) *Entry[K, V] {
e.prev = at
e.next = at.next
e.prev.next = e
e.next.prev = e
e.list = l
l.len++
return e
}
// insertValue is a convenience wrapper for insert(&Entry{Value: v, ExpiresAt: ExpiresAt}, at).
func (l *LruList[K, V]) insertValue(k K, v V, expiresAt time.Time, at *Entry[K, V]) *Entry[K, V] {
return l.insert(&Entry[K, V]{Value: v, Key: k, ExpiresAt: expiresAt}, at)
}
// Remove removes e from its list, decrements l.len
func (l *LruList[K, V]) Remove(e *Entry[K, V]) V {
e.prev.next = e.next
e.next.prev = e.prev
e.next = nil // avoid memory leaks
e.prev = nil // avoid memory leaks
e.list = nil
l.len--
return e.Value
}
// move moves e to next to at.
func (l *LruList[K, V]) move(e, at *Entry[K, V]) {
if e == at {
return
}
e.prev.next = e.next
e.next.prev = e.prev
e.prev = at
e.next = at.next
e.prev.next = e
e.next.prev = e
}
// PushFront inserts a new element e with value v at the front of list l and returns e.
func (l *LruList[K, V]) PushFront(k K, v V) *Entry[K, V] {
l.lazyInit()
return l.insertValue(k, v, time.Time{}, &l.root)
}
// PushFrontExpirable inserts a new expirable element e with Value v at the front of list l and returns e.
func (l *LruList[K, V]) PushFrontExpirable(k K, v V, expiresAt time.Time) *Entry[K, V] {
l.lazyInit()
return l.insertValue(k, v, expiresAt, &l.root)
}
// MoveToFront moves element e to the front of list l.
// If e is not an element of l, the list is not modified.
// The element must not be nil.
func (l *LruList[K, V]) MoveToFront(e *Entry[K, V]) {
if e.list != l || l.root.next == e {
return
}
// see comment in List.Remove about initialization of l
l.move(e, &l.root)
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/internal/stmt_store/stmt_store.go | internal/stmt_store/stmt_store.go | package stmt_store
import (
"context"
"database/sql"
"math"
"sync"
"time"
"gorm.io/gorm/internal/lru"
)
type Stmt struct {
*sql.Stmt
Transaction bool
prepared chan struct{}
prepareErr error
}
func (stmt *Stmt) Error() error {
return stmt.prepareErr
}
func (stmt *Stmt) Close() error {
<-stmt.prepared
if stmt.Stmt != nil {
return stmt.Stmt.Close()
}
return nil
}
// Store defines an interface for managing the caching operations of SQL statements (Stmt).
// This interface provides methods for creating new statements, retrieving all cache keys,
// getting cached statements, setting cached statements, and deleting cached statements.
type Store interface {
// New creates a new Stmt object and caches it.
// Parameters:
// ctx: The context for the request, which can carry deadlines, cancellation signals, etc.
// key: The key representing the SQL query, used for caching and preparing the statement.
// isTransaction: Indicates whether this operation is part of a transaction, which may affect the caching strategy.
// connPool: A connection pool that provides database connections.
// locker: A synchronization lock that is unlocked after initialization to avoid deadlocks.
// Returns:
// *Stmt: A newly created statement object for executing SQL operations.
// error: An error if the statement preparation fails.
New(ctx context.Context, key string, isTransaction bool, connPool ConnPool, locker sync.Locker) (*Stmt, error)
// Keys returns a slice of all cache keys in the store.
Keys() []string
// Get retrieves a Stmt object from the store based on the given key.
// Parameters:
// key: The key used to look up the Stmt object.
// Returns:
// *Stmt: The found Stmt object, or nil if not found.
// bool: Indicates whether the corresponding Stmt object was successfully found.
Get(key string) (*Stmt, bool)
// Set stores the given Stmt object in the store and associates it with the specified key.
// Parameters:
// key: The key used to associate the Stmt object.
// value: The Stmt object to be stored.
Set(key string, value *Stmt)
// Delete removes the Stmt object corresponding to the specified key from the store.
// Parameters:
// key: The key associated with the Stmt object to be deleted.
Delete(key string)
}
// defaultMaxSize defines the default maximum capacity of the cache.
// Its value is the maximum value of the int64 type, which means that when the cache size is not specified,
// the cache can theoretically store as many elements as possible.
// (1 << 63) - 1 is the maximum value that an int64 type can represent.
const (
defaultMaxSize = math.MaxInt
// defaultTTL defines the default time-to-live (TTL) for each cache entry.
// When the TTL for cache entries is not specified, each cache entry will expire after 24 hours.
defaultTTL = time.Hour * 24
)
// New creates and returns a new Store instance.
//
// Parameters:
// - size: The maximum capacity of the cache. If the provided size is less than or equal to 0,
// it defaults to defaultMaxSize.
// - ttl: The time-to-live duration for each cache entry. If the provided ttl is less than or equal to 0,
// it defaults to defaultTTL.
//
// This function defines an onEvicted callback that is invoked when a cache entry is evicted.
// The callback ensures that if the evicted value (v) is not nil, its Close method is called asynchronously
// to release associated resources.
//
// Returns:
// - A Store instance implemented by lruStore, which internally uses an LRU cache with the specified size,
// eviction callback, and TTL.
func New(size int, ttl time.Duration) Store {
if size <= 0 {
size = defaultMaxSize
}
if ttl <= 0 {
ttl = defaultTTL
}
onEvicted := func(k string, v *Stmt) {
if v != nil {
go v.Close()
}
}
return &lruStore{lru: lru.NewLRU[string, *Stmt](size, onEvicted, ttl)}
}
type lruStore struct {
lru *lru.LRU[string, *Stmt]
}
func (s *lruStore) Keys() []string {
return s.lru.Keys()
}
func (s *lruStore) Get(key string) (*Stmt, bool) {
stmt, ok := s.lru.Get(key)
if ok && stmt != nil {
<-stmt.prepared
}
return stmt, ok
}
func (s *lruStore) Set(key string, value *Stmt) {
s.lru.Add(key, value)
}
func (s *lruStore) Delete(key string) {
s.lru.Remove(key)
}
type ConnPool interface {
PrepareContext(ctx context.Context, query string) (*sql.Stmt, error)
}
// New creates a new Stmt object for executing SQL queries.
// It caches the Stmt object for future use and handles preparation and error states.
// Parameters:
//
// ctx: Context for the request, used to carry deadlines, cancellation signals, etc.
// key: The key representing the SQL query, used for caching and preparing the statement.
// isTransaction: Indicates whether this operation is part of a transaction, affecting cache strategy.
// conn: A connection pool that provides database connections.
// locker: A synchronization lock that is unlocked after initialization to avoid deadlocks.
//
// Returns:
//
// *Stmt: A newly created statement object for executing SQL operations.
// error: An error if the statement preparation fails.
func (s *lruStore) New(ctx context.Context, key string, isTransaction bool, conn ConnPool, locker sync.Locker) (_ *Stmt, err error) {
// Create a Stmt object and set its Transaction property.
// The prepared channel is used to synchronize the statement preparation state.
cacheStmt := &Stmt{
Transaction: isTransaction,
prepared: make(chan struct{}),
}
// Cache the Stmt object with the associated key.
s.Set(key, cacheStmt)
// Unlock after completing initialization to prevent deadlocks.
locker.Unlock()
// Ensure the prepared channel is closed after the function execution completes.
defer close(cacheStmt.prepared)
// Prepare the SQL statement using the provided connection.
cacheStmt.Stmt, err = conn.PrepareContext(ctx, key)
if err != nil {
// If statement preparation fails, record the error and remove the invalid Stmt object from the cache.
cacheStmt.prepareErr = err
s.Delete(key)
return &Stmt{}, err
}
// Return the successfully prepared Stmt object.
return cacheStmt, nil
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/constraint_test.go | schema/constraint_test.go | package schema_test
import (
"reflect"
"sync"
"testing"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
type UserCheck struct {
Name string `gorm:"check:name_checker,name <> 'jinzhu'"`
Name2 string `gorm:"check:name <> 'jinzhu'"`
Name3 string `gorm:"check:,name <> 'jinzhu'"`
}
func TestParseCheck(t *testing.T) {
user, err := schema.Parse(&UserCheck{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user check, got error %v", err)
}
results := map[string]schema.CheckConstraint{
"name_checker": {
Name: "name_checker",
Constraint: "name <> 'jinzhu'",
},
"chk_user_checks_name2": {
Name: "chk_user_checks_name2",
Constraint: "name <> 'jinzhu'",
},
"chk_user_checks_name3": {
Name: "chk_user_checks_name3",
Constraint: "name <> 'jinzhu'",
},
}
checks := user.ParseCheckConstraints()
for k, result := range results {
v, ok := checks[k]
if !ok {
t.Errorf("Failed to found check %v from parsed checks %+v", k, checks)
}
for _, name := range []string{"Name", "Constraint"} {
if reflect.ValueOf(result).FieldByName(name).Interface() != reflect.ValueOf(v).FieldByName(name).Interface() {
t.Errorf(
"check %v %v should equal, expects %v, got %v",
k, name, reflect.ValueOf(result).FieldByName(name).Interface(), reflect.ValueOf(v).FieldByName(name).Interface(),
)
}
}
}
}
func TestParseUniqueConstraints(t *testing.T) {
type UserUnique struct {
Name1 string `gorm:"unique"`
Name2 string `gorm:"uniqueIndex"`
}
user, err := schema.Parse(&UserUnique{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user unique, got error %v", err)
}
constraints := user.ParseUniqueConstraints()
results := map[string]schema.UniqueConstraint{
"uni_user_uniques_name1": {
Name: "uni_user_uniques_name1",
Field: &schema.Field{Name: "Name1", Unique: true},
},
}
for k, result := range results {
v, ok := constraints[k]
if !ok {
t.Errorf("Failed to found unique constraint %v from parsed constraints %+v", k, constraints)
}
tests.AssertObjEqual(t, result, v, "Name")
tests.AssertObjEqual(t, result.Field, v.Field, "Name", "Unique", "UniqueIndex")
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/index.go | schema/index.go | package schema
import (
"fmt"
"sort"
"strconv"
"strings"
)
type Index struct {
Name string
Class string // UNIQUE | FULLTEXT | SPATIAL
Type string // btree, hash, gist, spgist, gin, and brin
Where string
Comment string
Option string // WITH PARSER parser_name
Fields []IndexOption // Note: IndexOption's Field maybe the same
}
type IndexOption struct {
*Field
Expression string
Sort string // DESC, ASC
Collate string
Length int
Priority int
}
// ParseIndexes parse schema indexes
func (schema *Schema) ParseIndexes() []*Index {
indexesByName := map[string]*Index{}
indexes := []*Index{}
for _, field := range schema.Fields {
if field.TagSettings["INDEX"] != "" || field.TagSettings["UNIQUEINDEX"] != "" {
fieldIndexes, err := parseFieldIndexes(field)
if err != nil {
schema.err = err
break
}
for _, index := range fieldIndexes {
idx := indexesByName[index.Name]
if idx == nil {
idx = &Index{Name: index.Name}
indexesByName[index.Name] = idx
indexes = append(indexes, idx)
}
idx.Name = index.Name
if idx.Class == "" {
idx.Class = index.Class
}
if idx.Type == "" {
idx.Type = index.Type
}
if idx.Where == "" {
idx.Where = index.Where
}
if idx.Comment == "" {
idx.Comment = index.Comment
}
if idx.Option == "" {
idx.Option = index.Option
}
idx.Fields = append(idx.Fields, index.Fields...)
sort.Slice(idx.Fields, func(i, j int) bool {
return idx.Fields[i].Priority < idx.Fields[j].Priority
})
}
}
}
for _, index := range indexes {
if index.Class == "UNIQUE" && len(index.Fields) == 1 {
index.Fields[0].Field.UniqueIndex = index.Name
}
}
return indexes
}
func (schema *Schema) LookIndex(name string) *Index {
if schema != nil {
indexes := schema.ParseIndexes()
for _, index := range indexes {
if index.Name == name {
return index
}
for _, field := range index.Fields {
if field.Name == name {
return index
}
}
}
}
return nil
}
func parseFieldIndexes(field *Field) (indexes []Index, err error) {
for _, value := range strings.Split(field.Tag.Get("gorm"), ";") {
if value != "" {
v := strings.Split(value, ":")
k := strings.TrimSpace(strings.ToUpper(v[0]))
if k == "INDEX" || k == "UNIQUEINDEX" {
var (
name string
tag = strings.Join(v[1:], ":")
idx = strings.IndexByte(tag, ',')
tagSetting = strings.Join(strings.Split(tag, ",")[1:], ",")
settings = ParseTagSetting(tagSetting, ",")
length, _ = strconv.Atoi(settings["LENGTH"])
)
if idx == -1 {
idx = len(tag)
}
name = tag[0:idx]
if name == "" {
subName := field.Name
const key = "COMPOSITE"
if composite, found := settings[key]; found {
if len(composite) == 0 || composite == key {
err = fmt.Errorf(
"the composite tag of %s.%s cannot be empty",
field.Schema.Name,
field.Name)
return
}
subName = composite
}
name = field.Schema.namer.IndexName(
field.Schema.Table, subName)
}
if (k == "UNIQUEINDEX") || settings["UNIQUE"] != "" {
settings["CLASS"] = "UNIQUE"
}
priority, err := strconv.Atoi(settings["PRIORITY"])
if err != nil {
priority = 10
}
indexes = append(indexes, Index{
Name: name,
Class: settings["CLASS"],
Type: settings["TYPE"],
Where: settings["WHERE"],
Comment: settings["COMMENT"],
Option: settings["OPTION"],
Fields: []IndexOption{{
Field: field,
Expression: settings["EXPRESSION"],
Sort: settings["SORT"],
Collate: settings["COLLATE"],
Length: length,
Priority: priority,
}},
})
}
}
}
err = nil
return
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/schema_helper_test.go | schema/schema_helper_test.go | package schema_test
import (
"context"
"fmt"
"reflect"
"strings"
"testing"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
func checkSchema(t *testing.T, s *schema.Schema, v *schema.Schema, primaryFields []string) {
t.Run("CheckSchema/"+s.Name, func(t *testing.T) {
tests.AssertObjEqual(t, s, v, "Name", "Table")
for idx, field := range primaryFields {
var found bool
for _, f := range s.PrimaryFields {
if f.Name == field {
found = true
}
}
if idx == 0 {
if field != s.PrioritizedPrimaryField.Name {
t.Errorf("schema %v prioritized primary field should be %v, but got %v", s, field, s.PrioritizedPrimaryField.Name)
}
}
if !found {
t.Errorf("schema %v failed to found primary key: %v", s, field)
}
}
})
}
func checkSchemaField(t *testing.T, s *schema.Schema, f *schema.Field, fc func(*schema.Field)) {
t.Run("CheckField/"+f.Name, func(t *testing.T) {
if fc != nil {
fc(f)
}
if f.TagSettings == nil {
if f.Tag != "" {
f.TagSettings = schema.ParseTagSetting(f.Tag.Get("gorm"), ";")
} else {
f.TagSettings = map[string]string{}
}
}
parsedField, ok := s.FieldsByDBName[f.DBName]
if !ok {
parsedField, ok = s.FieldsByName[f.Name]
}
if !ok {
t.Errorf("schema %v failed to look up field with name %v", s, f.Name)
} else {
tests.AssertObjEqual(t, parsedField, f, "Name", "DBName", "BindNames", "DataType", "PrimaryKey", "AutoIncrement", "Creatable", "Updatable", "Readable", "HasDefaultValue", "DefaultValue", "NotNull", "Unique", "Comment", "Size", "Precision", "TagSettings")
if f.DBName != "" {
if field, ok := s.FieldsByDBName[f.DBName]; !ok || parsedField != field {
t.Errorf("schema %v failed to look up field with dbname %v", s, f.DBName)
}
}
for _, name := range []string{f.DBName, f.Name} {
if name != "" {
if field := s.LookUpField(name); field == nil || (field.Name != name && field.DBName != name) {
t.Errorf("schema %v failed to look up field with dbname %v", s, f.DBName)
}
}
}
if f.PrimaryKey {
var found bool
for _, primaryField := range s.PrimaryFields {
if primaryField == parsedField {
found = true
}
}
if !found {
t.Errorf("schema %v doesn't include field %v", s, f.Name)
}
}
}
})
}
type Relation struct {
Name string
Type schema.RelationshipType
Schema string
FieldSchema string
Polymorphic Polymorphic
JoinTable JoinTable
References []Reference
}
type Polymorphic struct {
ID string
Type string
Value string
}
type JoinTable struct {
Name string
Table string
Fields []schema.Field
}
type Reference struct {
PrimaryKey string
PrimarySchema string
ForeignKey string
ForeignSchema string
PrimaryValue string
OwnPrimaryKey bool
}
func checkSchemaRelation(t *testing.T, s *schema.Schema, relation Relation) {
t.Run("CheckRelation/"+relation.Name, func(t *testing.T) {
if r, ok := s.Relationships.Relations[relation.Name]; ok {
if r.Name != relation.Name {
t.Errorf("schema %v relation name expects %v, but got %v", s, r.Name, relation.Name)
}
if r.Type != relation.Type {
t.Errorf("schema %v relation name expects %v, but got %v", s, r.Type, relation.Type)
}
if r.Schema.Name != relation.Schema {
t.Errorf("schema %v relation's schema expects %v, but got %v", s, relation.Schema, r.Schema.Name)
}
if r.FieldSchema.Name != relation.FieldSchema {
t.Errorf("schema %v field relation's schema expects %v, but got %v", s, relation.FieldSchema, r.FieldSchema.Name)
}
if r.Polymorphic != nil {
if r.Polymorphic.PolymorphicID.Name != relation.Polymorphic.ID {
t.Errorf("schema %v relation's polymorphic id field expects %v, but got %v", s, relation.Polymorphic.ID, r.Polymorphic.PolymorphicID.Name)
}
if r.Polymorphic.PolymorphicType.Name != relation.Polymorphic.Type {
t.Errorf("schema %v relation's polymorphic type field expects %v, but got %v", s, relation.Polymorphic.Type, r.Polymorphic.PolymorphicType.Name)
}
if r.Polymorphic.Value != relation.Polymorphic.Value {
t.Errorf("schema %v relation's polymorphic value expects %v, but got %v", s, relation.Polymorphic.Value, r.Polymorphic.Value)
}
}
if r.JoinTable != nil {
if r.JoinTable.Name != relation.JoinTable.Name {
t.Errorf("schema %v relation's join table name expects %v, but got %v", s, relation.JoinTable.Name, r.JoinTable.Name)
}
if r.JoinTable.Table != relation.JoinTable.Table {
t.Errorf("schema %v relation's join table tablename expects %v, but got %v", s, relation.JoinTable.Table, r.JoinTable.Table)
}
for i := range relation.JoinTable.Fields {
checkSchemaField(t, r.JoinTable, &relation.JoinTable.Fields[i], nil)
}
}
if len(relation.References) != len(r.References) {
t.Errorf("schema %v relation's reference's count doesn't match, expects %v, but got %v", s, len(relation.References), len(r.References))
}
for _, ref := range relation.References {
var found bool
for _, rf := range r.References {
if (rf.PrimaryKey == nil || (rf.PrimaryKey.Name == ref.PrimaryKey && rf.PrimaryKey.Schema.Name == ref.PrimarySchema)) && (rf.PrimaryValue == ref.PrimaryValue) && (rf.ForeignKey.Name == ref.ForeignKey && rf.ForeignKey.Schema.Name == ref.ForeignSchema) && (rf.OwnPrimaryKey == ref.OwnPrimaryKey) {
found = true
}
}
if !found {
var refs []string
for _, rf := range r.References {
var primaryKey, primaryKeySchema string
if rf.PrimaryKey != nil {
primaryKey, primaryKeySchema = rf.PrimaryKey.Name, rf.PrimaryKey.Schema.Name
}
refs = append(refs, fmt.Sprintf(
"{PrimaryKey: %v PrimaryKeySchame: %v ForeignKey: %v ForeignKeySchema: %v PrimaryValue: %v OwnPrimaryKey: %v}",
primaryKey, primaryKeySchema, rf.ForeignKey.Name, rf.ForeignKey.Schema.Name, rf.PrimaryValue, rf.OwnPrimaryKey,
))
}
t.Errorf("schema %v relation %v failed to found reference %+v, has %v", s, relation.Name, ref, strings.Join(refs, ", "))
}
}
} else {
t.Errorf("schema %v failed to find relations by name %v", s, relation.Name)
}
})
}
type EmbeddedRelations struct {
Relations map[string]Relation
EmbeddedRelations map[string]EmbeddedRelations
}
func checkEmbeddedRelations(t *testing.T, actual map[string]*schema.Relationships, expected map[string]EmbeddedRelations) {
for name, relations := range actual {
rs := expected[name]
t.Run("CheckEmbeddedRelations/"+name, func(t *testing.T) {
if len(relations.Relations) != len(rs.Relations) {
t.Errorf("schema relations count don't match, expects %d, got %d", len(rs.Relations), len(relations.Relations))
}
if len(relations.EmbeddedRelations) != len(rs.EmbeddedRelations) {
t.Errorf("schema embedded relations count don't match, expects %d, got %d", len(rs.EmbeddedRelations), len(relations.EmbeddedRelations))
}
for n, rel := range relations.Relations {
if r, ok := rs.Relations[n]; !ok {
t.Errorf("failed to find relation by name %s", n)
} else {
checkSchemaRelation(t, &schema.Schema{
Relationships: schema.Relationships{
Relations: map[string]*schema.Relationship{n: rel},
},
}, r)
}
}
checkEmbeddedRelations(t, relations.EmbeddedRelations, rs.EmbeddedRelations)
})
}
}
func checkField(t *testing.T, s *schema.Schema, value reflect.Value, values map[string]interface{}) {
for k, v := range values {
t.Run("CheckField/"+k, func(t *testing.T) {
fv, _ := s.FieldsByDBName[k].ValueOf(context.Background(), value)
tests.AssertEqual(t, v, fv)
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/naming.go | schema/naming.go | package schema
import (
"crypto/sha1"
"encoding/hex"
"regexp"
"strings"
"unicode/utf8"
"github.com/jinzhu/inflection"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
// Namer namer interface
type Namer interface {
TableName(table string) string
SchemaName(table string) string
ColumnName(table, column string) string
JoinTableName(joinTable string) string
RelationshipFKName(Relationship) string
CheckerName(table, column string) string
IndexName(table, column string) string
UniqueName(table, column string) string
}
// Replacer replacer interface like strings.Replacer
type Replacer interface {
Replace(name string) string
}
var _ Namer = (*NamingStrategy)(nil)
// NamingStrategy tables, columns naming strategy
type NamingStrategy struct {
TablePrefix string
SingularTable bool
NameReplacer Replacer
NoLowerCase bool
IdentifierMaxLength int
}
// TableName convert string to table name
func (ns NamingStrategy) TableName(str string) string {
if ns.SingularTable {
return ns.TablePrefix + ns.toDBName(str)
}
return ns.TablePrefix + inflection.Plural(ns.toDBName(str))
}
// SchemaName generate schema name from table name, don't guarantee it is the reverse value of TableName
func (ns NamingStrategy) SchemaName(table string) string {
table = strings.TrimPrefix(table, ns.TablePrefix)
if ns.SingularTable {
return ns.toSchemaName(table)
}
return ns.toSchemaName(inflection.Singular(table))
}
// ColumnName convert string to column name
func (ns NamingStrategy) ColumnName(table, column string) string {
return ns.toDBName(column)
}
// JoinTableName convert string to join table name
func (ns NamingStrategy) JoinTableName(str string) string {
if !ns.NoLowerCase && strings.ToLower(str) == str {
return ns.TablePrefix + str
}
if ns.SingularTable {
return ns.TablePrefix + ns.toDBName(str)
}
return ns.TablePrefix + inflection.Plural(ns.toDBName(str))
}
// RelationshipFKName generate fk name for relation
func (ns NamingStrategy) RelationshipFKName(rel Relationship) string {
return ns.formatName("fk", rel.Schema.Table, ns.toDBName(rel.Name))
}
// CheckerName generate checker name
func (ns NamingStrategy) CheckerName(table, column string) string {
return ns.formatName("chk", table, column)
}
// IndexName generate index name
func (ns NamingStrategy) IndexName(table, column string) string {
return ns.formatName("idx", table, ns.toDBName(column))
}
// UniqueName generate unique constraint name
func (ns NamingStrategy) UniqueName(table, column string) string {
return ns.formatName("uni", table, ns.toDBName(column))
}
func (ns NamingStrategy) formatName(prefix, table, name string) string {
formattedName := strings.ReplaceAll(strings.Join([]string{
prefix, table, name,
}, "_"), ".", "_")
if ns.IdentifierMaxLength == 0 {
ns.IdentifierMaxLength = 64
}
if utf8.RuneCountInString(formattedName) > ns.IdentifierMaxLength {
h := sha1.New()
h.Write([]byte(formattedName))
bs := h.Sum(nil)
formattedName = formattedName[0:ns.IdentifierMaxLength-8] + hex.EncodeToString(bs)[:8]
}
return formattedName
}
var (
// https://github.com/golang/lint/blob/master/lint.go#L770
commonInitialisms = []string{"API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "LHS", "QPS", "RAM", "RHS", "RPC", "SLA", "SMTP", "SSH", "TLS", "TTL", "UID", "UI", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XSRF", "XSS"}
commonInitialismsReplacer *strings.Replacer
)
func init() {
commonInitialismsForReplacer := make([]string, 0, len(commonInitialisms))
for _, initialism := range commonInitialisms {
commonInitialismsForReplacer = append(commonInitialismsForReplacer, initialism, cases.Title(language.Und).String(initialism))
}
commonInitialismsReplacer = strings.NewReplacer(commonInitialismsForReplacer...)
}
func (ns NamingStrategy) toDBName(name string) string {
if name == "" {
return ""
}
if ns.NameReplacer != nil {
tmpName := ns.NameReplacer.Replace(name)
if tmpName == "" {
return name
}
name = tmpName
}
if ns.NoLowerCase {
return name
}
var (
value = commonInitialismsReplacer.Replace(name)
buf strings.Builder
lastCase, nextCase, nextNumber bool // upper case == true
curCase = value[0] <= 'Z' && value[0] >= 'A'
)
for i, v := range value[:len(value)-1] {
nextCase = value[i+1] <= 'Z' && value[i+1] >= 'A'
nextNumber = value[i+1] >= '0' && value[i+1] <= '9'
if curCase {
if lastCase && (nextCase || nextNumber) {
buf.WriteRune(v + 32)
} else {
if i > 0 && value[i-1] != '_' && value[i+1] != '_' {
buf.WriteByte('_')
}
buf.WriteRune(v + 32)
}
} else {
buf.WriteRune(v)
}
lastCase = curCase
curCase = nextCase
}
if curCase {
if !lastCase && len(value) > 1 {
buf.WriteByte('_')
}
buf.WriteByte(value[len(value)-1] + 32)
} else {
buf.WriteByte(value[len(value)-1])
}
ret := buf.String()
return ret
}
func (ns NamingStrategy) toSchemaName(name string) string {
result := strings.ReplaceAll(cases.Title(language.Und, cases.NoLower).String(strings.ReplaceAll(name, "_", " ")), " ", "")
for _, initialism := range commonInitialisms {
result = regexp.MustCompile(cases.Title(language.Und, cases.NoLower).String(strings.ToLower(initialism))+"([A-Z]|$|_)").ReplaceAllString(result, initialism+"$1")
}
return result
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/interfaces.go | schema/interfaces.go | package schema
import (
"gorm.io/gorm/clause"
)
// ConstraintInterface database constraint interface
type ConstraintInterface interface {
GetName() string
Build() (sql string, vars []interface{})
}
// GormDataTypeInterface gorm data type interface
type GormDataTypeInterface interface {
GormDataType() string
}
// FieldNewValuePool field new scan value pool
type FieldNewValuePool interface {
Get() interface{}
Put(interface{})
}
// CreateClausesInterface create clauses interface
type CreateClausesInterface interface {
CreateClauses(*Field) []clause.Interface
}
// QueryClausesInterface query clauses interface
type QueryClausesInterface interface {
QueryClauses(*Field) []clause.Interface
}
// UpdateClausesInterface update clauses interface
type UpdateClausesInterface interface {
UpdateClauses(*Field) []clause.Interface
}
// DeleteClausesInterface delete clauses interface
type DeleteClausesInterface interface {
DeleteClauses(*Field) []clause.Interface
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/utils.go | schema/utils.go | package schema
import (
"context"
"fmt"
"reflect"
"regexp"
"strings"
"gorm.io/gorm/clause"
"gorm.io/gorm/utils"
)
var embeddedCacheKey = "embedded_cache_store"
func ParseTagSetting(str string, sep string) map[string]string {
settings := map[string]string{}
names := strings.Split(str, sep)
var parsedNames []string
for i := 0; i < len(names); i++ {
s := names[i]
for strings.HasSuffix(s, "\\") && i+1 < len(names) {
i++
s = s[:len(s)-1] + sep + names[i]
}
parsedNames = append(parsedNames, s)
}
for _, tag := range parsedNames {
values := strings.Split(tag, ":")
k := strings.TrimSpace(strings.ToUpper(values[0]))
if len(values) >= 2 {
val := strings.Join(values[1:], ":")
val = strings.ReplaceAll(val, `\"`, `"`)
settings[k] = val
} else if k != "" {
settings[k] = k
}
}
return settings
}
func toColumns(val string) (results []string) {
if val != "" {
for _, v := range strings.Split(val, ",") {
results = append(results, strings.TrimSpace(v))
}
}
return
}
func removeSettingFromTag(tag reflect.StructTag, names ...string) reflect.StructTag {
for _, name := range names {
tag = reflect.StructTag(regexp.MustCompile(`(?i)(gorm:.*?)(`+name+`(:.*?)?)(;|("))`).ReplaceAllString(string(tag), "${1}${5}"))
}
return tag
}
func appendSettingFromTag(tag reflect.StructTag, value string) reflect.StructTag {
t := tag.Get("gorm")
if strings.Contains(t, value) {
return tag
}
return reflect.StructTag(fmt.Sprintf(`gorm:"%s;%s"`, value, t))
}
// GetRelationsValues get relations's values from a reflect value
func GetRelationsValues(ctx context.Context, reflectValue reflect.Value, rels []*Relationship) (reflectResults reflect.Value) {
for _, rel := range rels {
reflectResults = reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(rel.FieldSchema.ModelType)), 0, 1)
appendToResults := func(value reflect.Value) {
if _, isZero := rel.Field.ValueOf(ctx, value); !isZero {
result := reflect.Indirect(rel.Field.ReflectValueOf(ctx, value))
switch result.Kind() {
case reflect.Struct:
reflectResults = reflect.Append(reflectResults, result.Addr())
case reflect.Slice, reflect.Array:
for i := 0; i < result.Len(); i++ {
if elem := result.Index(i); elem.Kind() == reflect.Ptr {
reflectResults = reflect.Append(reflectResults, elem)
} else {
reflectResults = reflect.Append(reflectResults, elem.Addr())
}
}
}
}
}
switch reflectValue.Kind() {
case reflect.Struct:
appendToResults(reflectValue)
case reflect.Slice:
for i := 0; i < reflectValue.Len(); i++ {
appendToResults(reflectValue.Index(i))
}
}
reflectValue = reflectResults
}
return
}
// GetIdentityFieldValuesMap get identity map from fields
func GetIdentityFieldValuesMap(ctx context.Context, reflectValue reflect.Value, fields []*Field) (map[string][]reflect.Value, [][]interface{}) {
var (
results = [][]interface{}{}
dataResults = map[string][]reflect.Value{}
loaded = map[interface{}]bool{}
notZero, zero bool
)
if reflectValue.Kind() == reflect.Ptr ||
reflectValue.Kind() == reflect.Interface {
reflectValue = reflectValue.Elem()
}
switch reflectValue.Kind() {
case reflect.Map:
results = [][]interface{}{make([]interface{}, len(fields))}
for idx, field := range fields {
mapValue := reflectValue.MapIndex(reflect.ValueOf(field.DBName))
if mapValue.IsZero() {
mapValue = reflectValue.MapIndex(reflect.ValueOf(field.Name))
}
results[0][idx] = mapValue.Interface()
}
dataResults[utils.ToStringKey(results[0]...)] = []reflect.Value{reflectValue}
case reflect.Struct:
results = [][]interface{}{make([]interface{}, len(fields))}
for idx, field := range fields {
results[0][idx], zero = field.ValueOf(ctx, reflectValue)
notZero = notZero || !zero
}
if !notZero {
return nil, nil
}
dataResults[utils.ToStringKey(results[0]...)] = []reflect.Value{reflectValue}
case reflect.Slice, reflect.Array:
for i := 0; i < reflectValue.Len(); i++ {
elem := reflectValue.Index(i)
elemKey := elem.Interface()
if elem.Kind() != reflect.Ptr && elem.CanAddr() {
elemKey = elem.Addr().Interface()
}
if _, ok := loaded[elemKey]; ok {
continue
}
loaded[elemKey] = true
fieldValues := make([]interface{}, len(fields))
notZero = false
for idx, field := range fields {
fieldValues[idx], zero = field.ValueOf(ctx, elem)
notZero = notZero || !zero
}
if notZero {
dataKey := utils.ToStringKey(fieldValues...)
if _, ok := dataResults[dataKey]; !ok {
results = append(results, fieldValues)
dataResults[dataKey] = []reflect.Value{elem}
} else {
dataResults[dataKey] = append(dataResults[dataKey], elem)
}
}
}
}
return dataResults, results
}
// GetIdentityFieldValuesMapFromValues get identity map from fields
func GetIdentityFieldValuesMapFromValues(ctx context.Context, values []interface{}, fields []*Field) (map[string][]reflect.Value, [][]interface{}) {
resultsMap := map[string][]reflect.Value{}
results := [][]interface{}{}
for _, v := range values {
rm, rs := GetIdentityFieldValuesMap(ctx, reflect.Indirect(reflect.ValueOf(v)), fields)
for k, v := range rm {
resultsMap[k] = append(resultsMap[k], v...)
}
results = append(results, rs...)
}
return resultsMap, results
}
// ToQueryValues to query values
func ToQueryValues(table string, foreignKeys []string, foreignValues [][]interface{}) (interface{}, []interface{}) {
queryValues := make([]interface{}, len(foreignValues))
if len(foreignKeys) == 1 {
for idx, r := range foreignValues {
queryValues[idx] = r[0]
}
return clause.Column{Table: table, Name: foreignKeys[0]}, queryValues
}
columns := make([]clause.Column, len(foreignKeys))
for idx, key := range foreignKeys {
columns[idx] = clause.Column{Table: table, Name: key}
}
for idx, r := range foreignValues {
queryValues[idx] = r
}
return columns, queryValues
}
type embeddedNamer struct {
Table string
Namer
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/field_test.go | schema/field_test.go | package schema_test
import (
"context"
"database/sql"
"reflect"
"sync"
"testing"
"time"
"gorm.io/gorm"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
func TestFieldValuerAndSetter(t *testing.T) {
var (
userSchema, _ = schema.Parse(&tests.User{}, &sync.Map{}, schema.NamingStrategy{})
user = tests.User{
Model: gorm.Model{
ID: 10,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
DeletedAt: gorm.DeletedAt{Time: time.Now(), Valid: true},
},
Name: "valuer_and_setter",
Age: 18,
Birthday: tests.Now(),
Active: true,
}
reflectValue = reflect.ValueOf(&user)
)
// test valuer
values := map[string]interface{}{
"name": user.Name,
"id": user.ID,
"created_at": user.CreatedAt,
"updated_at": user.UpdatedAt,
"deleted_at": user.DeletedAt,
"age": user.Age,
"birthday": user.Birthday,
"active": true,
}
checkField(t, userSchema, reflectValue, values)
var f *bool
// test setter
newValues := map[string]interface{}{
"name": "valuer_and_setter_2",
"id": 2,
"created_at": time.Now(),
"updated_at": nil,
"deleted_at": time.Now(),
"age": 20,
"birthday": time.Now(),
"active": f,
}
for k, v := range newValues {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
newValues["updated_at"] = time.Time{}
newValues["active"] = false
checkField(t, userSchema, reflectValue, newValues)
// test valuer and other type
age := myint(10)
var nilTime *time.Time
newValues2 := map[string]interface{}{
"name": sql.NullString{String: "valuer_and_setter_3", Valid: true},
"id": &sql.NullInt64{Int64: 3, Valid: true},
"created_at": tests.Now(),
"updated_at": nilTime,
"deleted_at": time.Now(),
"age": &age,
"birthday": mytime(time.Now()),
"active": mybool(true),
}
for k, v := range newValues2 {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
newValues2["updated_at"] = time.Time{}
checkField(t, userSchema, reflectValue, newValues2)
}
func TestPointerFieldValuerAndSetter(t *testing.T) {
var (
userSchema, _ = schema.Parse(&User{}, &sync.Map{}, schema.NamingStrategy{})
name = "pointer_field_valuer_and_setter"
age uint = 18
active = true
user = User{
Model: &gorm.Model{
ID: 10,
CreatedAt: time.Now(),
DeletedAt: gorm.DeletedAt{Time: time.Now(), Valid: true},
},
Name: &name,
Age: &age,
Birthday: tests.Now(),
Active: &active,
}
reflectValue = reflect.ValueOf(&user)
)
// test valuer
values := map[string]interface{}{
"name": user.Name,
"id": user.ID,
"created_at": user.CreatedAt,
"deleted_at": user.DeletedAt,
"age": user.Age,
"birthday": user.Birthday,
"active": true,
}
checkField(t, userSchema, reflectValue, values)
// test setter
newValues := map[string]interface{}{
"name": "valuer_and_setter_2",
"id": 2,
"created_at": time.Now(),
"deleted_at": time.Now(),
"age": 20,
"birthday": time.Now(),
"active": false,
}
for k, v := range newValues {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
checkField(t, userSchema, reflectValue, newValues)
// test valuer and other type
age2 := myint(10)
newValues2 := map[string]interface{}{
"name": sql.NullString{String: "valuer_and_setter_3", Valid: true},
"id": &sql.NullInt64{Int64: 3, Valid: true},
"created_at": tests.Now(),
"deleted_at": time.Now(),
"age": &age2,
"birthday": mytime(time.Now()),
"active": mybool(true),
}
for k, v := range newValues2 {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
checkField(t, userSchema, reflectValue, newValues2)
}
func TestAdvancedDataTypeValuerAndSetter(t *testing.T) {
var (
userSchema, _ = schema.Parse(&AdvancedDataTypeUser{}, &sync.Map{}, schema.NamingStrategy{})
name = "advanced_data_type_valuer_and_setter"
deletedAt = mytime(time.Now())
isAdmin = mybool(false)
user = AdvancedDataTypeUser{
ID: sql.NullInt64{Int64: 10, Valid: true},
Name: &sql.NullString{String: name, Valid: true},
Birthday: sql.NullTime{Time: time.Now(), Valid: true},
RegisteredAt: mytime(time.Now()),
DeletedAt: &deletedAt,
Active: mybool(true),
Admin: &isAdmin,
}
reflectValue = reflect.ValueOf(&user)
)
// test valuer
values := map[string]interface{}{
"id": user.ID,
"name": user.Name,
"birthday": user.Birthday,
"registered_at": user.RegisteredAt,
"deleted_at": user.DeletedAt,
"active": user.Active,
"admin": user.Admin,
}
checkField(t, userSchema, reflectValue, values)
// test setter
newDeletedAt := mytime(time.Now())
newIsAdmin := mybool(true)
newValues := map[string]interface{}{
"id": sql.NullInt64{Int64: 1, Valid: true},
"name": &sql.NullString{String: name + "rename", Valid: true},
"birthday": time.Now(),
"registered_at": mytime(time.Now()),
"deleted_at": &newDeletedAt,
"active": mybool(false),
"admin": &newIsAdmin,
}
for k, v := range newValues {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
checkField(t, userSchema, reflectValue, newValues)
newValues2 := map[string]interface{}{
"id": 5,
"name": name + "rename2",
"birthday": time.Now(),
"registered_at": time.Now(),
"deleted_at": time.Now(),
"active": true,
"admin": false,
}
for k, v := range newValues2 {
if err := userSchema.FieldsByDBName[k].Set(context.Background(), reflectValue, v); err != nil {
t.Errorf("no error should happen when assign value to field %v, but got %v", k, err)
}
}
checkField(t, userSchema, reflectValue, newValues2)
}
type UserWithPermissionControl struct {
ID uint
Name string `gorm:"-"`
Name2 string `gorm:"->"`
Name3 string `gorm:"<-"`
Name4 string `gorm:"<-:create"`
Name5 string `gorm:"<-:update"`
Name6 string `gorm:"<-:create,update"`
Name7 string `gorm:"->:false;<-:create,update"`
Name8 string `gorm:"->;-:migration"`
}
func TestParseFieldWithPermission(t *testing.T) {
user, err := schema.Parse(&UserWithPermissionControl{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse user with permission, got error %v", err)
}
fields := []*schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"ID"}, DataType: schema.Uint, PrimaryKey: true, Size: 64, Creatable: true, Updatable: true, Readable: true, HasDefaultValue: true, AutoIncrement: true},
{Name: "Name", DBName: "", BindNames: []string{"Name"}, DataType: "", Tag: `gorm:"-"`, Creatable: false, Updatable: false, Readable: false},
{Name: "Name2", DBName: "name2", BindNames: []string{"Name2"}, DataType: schema.String, Tag: `gorm:"->"`, Creatable: false, Updatable: false, Readable: true},
{Name: "Name3", DBName: "name3", BindNames: []string{"Name3"}, DataType: schema.String, Tag: `gorm:"<-"`, Creatable: true, Updatable: true, Readable: true},
{Name: "Name4", DBName: "name4", BindNames: []string{"Name4"}, DataType: schema.String, Tag: `gorm:"<-:create"`, Creatable: true, Updatable: false, Readable: true},
{Name: "Name5", DBName: "name5", BindNames: []string{"Name5"}, DataType: schema.String, Tag: `gorm:"<-:update"`, Creatable: false, Updatable: true, Readable: true},
{Name: "Name6", DBName: "name6", BindNames: []string{"Name6"}, DataType: schema.String, Tag: `gorm:"<-:create,update"`, Creatable: true, Updatable: true, Readable: true},
{Name: "Name7", DBName: "name7", BindNames: []string{"Name7"}, DataType: schema.String, Tag: `gorm:"->:false;<-:create,update"`, Creatable: true, Updatable: true, Readable: false},
{Name: "Name8", DBName: "name8", BindNames: []string{"Name8"}, DataType: schema.String, Tag: `gorm:"->;-:migration"`, Creatable: false, Updatable: false, Readable: true, IgnoreMigration: true},
}
for _, f := range fields {
checkSchemaField(t, user, f, func(f *schema.Field) {})
}
}
type (
ID int64
INT int
INT8 int8
INT16 int16
INT32 int32
INT64 int64
UINT uint
UINT8 uint8
UINT16 uint16
UINT32 uint32
UINT64 uint64
FLOAT32 float32
FLOAT64 float64
BOOL bool
STRING string
TIME time.Time
BYTES []byte
TypeAlias struct {
ID
INT `gorm:"column:fint"`
INT8 `gorm:"column:fint8"`
INT16 `gorm:"column:fint16"`
INT32 `gorm:"column:fint32"`
INT64 `gorm:"column:fint64"`
UINT `gorm:"column:fuint"`
UINT8 `gorm:"column:fuint8"`
UINT16 `gorm:"column:fuint16"`
UINT32 `gorm:"column:fuint32"`
UINT64 `gorm:"column:fuint64"`
FLOAT32 `gorm:"column:ffloat32"`
FLOAT64 `gorm:"column:ffloat64"`
BOOL `gorm:"column:fbool"`
STRING `gorm:"column:fstring"`
TIME `gorm:"column:ftime"`
BYTES `gorm:"column:fbytes"`
}
)
func TestTypeAliasField(t *testing.T) {
alias, err := schema.Parse(&TypeAlias{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse TypeAlias with permission, got error %v", err)
}
fields := []*schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"ID"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 64, PrimaryKey: true, HasDefaultValue: true, AutoIncrement: true},
{Name: "INT", DBName: "fint", BindNames: []string{"INT"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 64, Tag: `gorm:"column:fint"`},
{Name: "INT8", DBName: "fint8", BindNames: []string{"INT8"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 8, Tag: `gorm:"column:fint8"`},
{Name: "INT16", DBName: "fint16", BindNames: []string{"INT16"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 16, Tag: `gorm:"column:fint16"`},
{Name: "INT32", DBName: "fint32", BindNames: []string{"INT32"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 32, Tag: `gorm:"column:fint32"`},
{Name: "INT64", DBName: "fint64", BindNames: []string{"INT64"}, DataType: schema.Int, Creatable: true, Updatable: true, Readable: true, Size: 64, Tag: `gorm:"column:fint64"`},
{Name: "UINT", DBName: "fuint", BindNames: []string{"UINT"}, DataType: schema.Uint, Creatable: true, Updatable: true, Readable: true, Size: 64, Tag: `gorm:"column:fuint"`},
{Name: "UINT8", DBName: "fuint8", BindNames: []string{"UINT8"}, DataType: schema.Uint, Creatable: true, Updatable: true, Readable: true, Size: 8, Tag: `gorm:"column:fuint8"`},
{Name: "UINT16", DBName: "fuint16", BindNames: []string{"UINT16"}, DataType: schema.Uint, Creatable: true, Updatable: true, Readable: true, Size: 16, Tag: `gorm:"column:fuint16"`},
{Name: "UINT32", DBName: "fuint32", BindNames: []string{"UINT32"}, DataType: schema.Uint, Creatable: true, Updatable: true, Readable: true, Size: 32, Tag: `gorm:"column:fuint32"`},
{Name: "UINT64", DBName: "fuint64", BindNames: []string{"UINT64"}, DataType: schema.Uint, Creatable: true, Updatable: true, Readable: true, Size: 64, Tag: `gorm:"column:fuint64"`},
{Name: "FLOAT32", DBName: "ffloat32", BindNames: []string{"FLOAT32"}, DataType: schema.Float, Creatable: true, Updatable: true, Readable: true, Size: 32, Tag: `gorm:"column:ffloat32"`},
{Name: "FLOAT64", DBName: "ffloat64", BindNames: []string{"FLOAT64"}, DataType: schema.Float, Creatable: true, Updatable: true, Readable: true, Size: 64, Tag: `gorm:"column:ffloat64"`},
{Name: "BOOL", DBName: "fbool", BindNames: []string{"BOOL"}, DataType: schema.Bool, Creatable: true, Updatable: true, Readable: true, Tag: `gorm:"column:fbool"`},
{Name: "STRING", DBName: "fstring", BindNames: []string{"STRING"}, DataType: schema.String, Creatable: true, Updatable: true, Readable: true, Tag: `gorm:"column:fstring"`},
{Name: "TIME", DBName: "ftime", BindNames: []string{"TIME"}, DataType: schema.Time, Creatable: true, Updatable: true, Readable: true, Tag: `gorm:"column:ftime"`},
{Name: "BYTES", DBName: "fbytes", BindNames: []string{"BYTES"}, DataType: schema.Bytes, Creatable: true, Updatable: true, Readable: true, Tag: `gorm:"column:fbytes"`},
}
for _, f := range fields {
checkSchemaField(t, alias, f, func(f *schema.Field) {})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/naming_test.go | schema/naming_test.go | package schema
import (
"strings"
"testing"
)
func TestToDBName(t *testing.T) {
maps := map[string]string{
"": "",
"x": "x",
"X": "x",
"userRestrictions": "user_restrictions",
"ThisIsATest": "this_is_a_test",
"PFAndESI": "pf_and_esi",
"AbcAndJkl": "abc_and_jkl",
"EmployeeID": "employee_id",
"SKU_ID": "sku_id",
"FieldX": "field_x",
"HTTPAndSMTP": "http_and_smtp",
"HTTPServerHandlerForURLID": "http_server_handler_for_url_id",
"UUID": "uuid",
"HTTPURL": "http_url",
"HTTP_URL": "http_url",
"SHA256Hash": "sha256_hash",
"SHA256HASH": "sha256_hash",
"ThisIsActuallyATestSoWeMayBeAbleToUseThisCodeInGormPackageAlsoIdCanBeUsedAtTheEndAsID": "this_is_actually_a_test_so_we_may_be_able_to_use_this_code_in_gorm_package_also_id_can_be_used_at_the_end_as_id",
}
ns := NamingStrategy{}
for key, value := range maps {
if ns.toDBName(key) != value {
t.Errorf("%v toName should equal %v, but got %v", key, value, ns.toDBName(key))
}
}
maps = map[string]string{
"x": "X",
"user_restrictions": "UserRestriction",
"this_is_a_test": "ThisIsATest",
"abc_and_jkl": "AbcAndJkl",
"employee_id": "EmployeeID",
"field_x": "FieldX",
"http_and_smtp": "HTTPAndSMTP",
"http_server_handler_for_url_id": "HTTPServerHandlerForURLID",
"uuid": "UUID",
"http_url": "HTTPURL",
"sha256_hash": "Sha256Hash",
"this_is_actually_a_test_so_we_may_be_able_to_use_this_code_in_gorm_package_also_id_can_be_used_at_the_end_as_id": "ThisIsActuallyATestSoWeMayBeAbleToUseThisCodeInGormPackageAlsoIDCanBeUsedAtTheEndAsID",
}
for key, value := range maps {
if ns.SchemaName(key) != value {
t.Errorf("%v schema name should equal %v, but got %v", key, value, ns.SchemaName(key))
}
}
}
func TestNamingStrategy(t *testing.T) {
ns := NamingStrategy{
TablePrefix: "public.",
SingularTable: true,
NameReplacer: strings.NewReplacer("CID", "Cid"),
}
idxName := ns.IndexName("public.table", "name")
if idxName != "idx_public_table_name" {
t.Errorf("invalid index name generated, got %v", idxName)
}
chkName := ns.CheckerName("public.table", "name")
if chkName != "chk_public_table_name" {
t.Errorf("invalid checker name generated, got %v", chkName)
}
joinTable := ns.JoinTableName("user_languages")
if joinTable != "public.user_languages" {
t.Errorf("invalid join table generated, got %v", joinTable)
}
joinTable2 := ns.JoinTableName("UserLanguage")
if joinTable2 != "public.user_language" {
t.Errorf("invalid join table generated, got %v", joinTable2)
}
tableName := ns.TableName("Company")
if tableName != "public.company" {
t.Errorf("invalid table name generated, got %v", tableName)
}
columdName := ns.ColumnName("", "NameCID")
if columdName != "name_cid" {
t.Errorf("invalid column name generated, got %v", columdName)
}
}
type CustomReplacer struct {
f func(string) string
}
func (r CustomReplacer) Replace(name string) string {
return r.f(name)
}
func TestCustomReplacer(t *testing.T) {
ns := NamingStrategy{
TablePrefix: "public.",
SingularTable: true,
NameReplacer: CustomReplacer{
func(name string) string {
replaced := "REPLACED_" + strings.ToUpper(name)
return strings.NewReplacer("CID", "_Cid").Replace(replaced)
},
},
NoLowerCase: false,
}
idxName := ns.IndexName("public.table", "name")
if idxName != "idx_public_table_replaced_name" {
t.Errorf("invalid index name generated, got %v", idxName)
}
chkName := ns.CheckerName("public.table", "name")
if chkName != "chk_public_table_name" {
t.Errorf("invalid checker name generated, got %v", chkName)
}
joinTable := ns.JoinTableName("user_languages")
if joinTable != "public.user_languages" { // Seems like a bug in NamingStrategy to skip the Replacer when the name is lowercase here.
t.Errorf("invalid join table generated, got %v", joinTable)
}
joinTable2 := ns.JoinTableName("UserLanguage")
if joinTable2 != "public.replaced_userlanguage" {
t.Errorf("invalid join table generated, got %v", joinTable2)
}
tableName := ns.TableName("Company")
if tableName != "public.replaced_company" {
t.Errorf("invalid table name generated, got %v", tableName)
}
columdName := ns.ColumnName("", "NameCID")
if columdName != "replaced_name_cid" {
t.Errorf("invalid column name generated, got %v", columdName)
}
}
func TestCustomReplacerWithNoLowerCase(t *testing.T) {
ns := NamingStrategy{
TablePrefix: "public.",
SingularTable: true,
NameReplacer: CustomReplacer{
func(name string) string {
replaced := "REPLACED_" + strings.ToUpper(name)
return strings.NewReplacer("CID", "_Cid").Replace(replaced)
},
},
NoLowerCase: true,
}
idxName := ns.IndexName("public.table", "name")
if idxName != "idx_public_table_REPLACED_NAME" {
t.Errorf("invalid index name generated, got %v", idxName)
}
chkName := ns.CheckerName("public.table", "name")
if chkName != "chk_public_table_name" {
t.Errorf("invalid checker name generated, got %v", chkName)
}
joinTable := ns.JoinTableName("user_languages")
if joinTable != "public.REPLACED_USER_LANGUAGES" {
t.Errorf("invalid join table generated, got %v", joinTable)
}
joinTable2 := ns.JoinTableName("UserLanguage")
if joinTable2 != "public.REPLACED_USERLANGUAGE" {
t.Errorf("invalid join table generated, got %v", joinTable2)
}
tableName := ns.TableName("Company")
if tableName != "public.REPLACED_COMPANY" {
t.Errorf("invalid table name generated, got %v", tableName)
}
columdName := ns.ColumnName("", "NameCID")
if columdName != "REPLACED_NAME_Cid" {
t.Errorf("invalid column name generated, got %v", columdName)
}
}
func TestFormatNameWithStringLongerThan63Characters(t *testing.T) {
ns := NamingStrategy{IdentifierMaxLength: 63}
formattedName := ns.formatName("prefix", "table", "thisIsAVeryVeryVeryVeryVeryVeryVeryVeryVeryLongString")
if formattedName != "prefix_table_thisIsAVeryVeryVeryVeryVeryVeryVeryVeryVer180f2c67" {
t.Errorf("invalid formatted name generated, got %v", formattedName)
}
}
func TestFormatNameWithStringLongerThan64Characters(t *testing.T) {
ns := NamingStrategy{IdentifierMaxLength: 64}
formattedName := ns.formatName("prefix", "table", "thisIsAVeryVeryVeryVeryVeryVeryVeryVeryVeryLongString")
if formattedName != "prefix_table_thisIsAVeryVeryVeryVeryVeryVeryVeryVeryVery180f2c67" {
t.Errorf("invalid formatted name generated, got %v", formattedName)
}
}
func TestReplaceEmptyTableName(t *testing.T) {
ns := NamingStrategy{
SingularTable: true,
NameReplacer: strings.NewReplacer("Model", ""),
}
tableName := ns.TableName("Model")
if tableName != "Model" {
t.Errorf("invalid table name generated, got %v", tableName)
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/index_test.go | schema/index_test.go | package schema_test
import (
"sync"
"testing"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
type UserIndex struct {
Name string `gorm:"index"`
Name2 string `gorm:"index:idx_name,unique"`
Name3 string `gorm:"index:,sort:desc,collate:utf8,type:btree,length:10,where:name3 != 'jinzhu'"`
Name4 string `gorm:"uniqueIndex"`
Name5 int64 `gorm:"index:,class:FULLTEXT,comment:hello \\, world,where:age > 10"`
Name6 int64 `gorm:"index:profile,comment:hello \\, world,where:age > 10"`
Age int64 `gorm:"index:profile,expression:ABS(age),option:WITH PARSER parser_name"`
OID int64 `gorm:"index:idx_id;index:idx_oid,unique"`
MemberNumber string `gorm:"index:idx_id,priority:1"`
Name7 string `gorm:"index:type"`
Name8 string `gorm:"index:,length:10;index:,collate:utf8"`
CompName1 string `gorm:"index:,unique,composite:idx_compname_1,option:NULLS NOT DISTINCT;not null"`
CompName2 string `gorm:"index:,composite:idx_compname_1"`
// Composite Index: Flattened structure.
Data0A string `gorm:"index:,composite:comp_id0"`
Data0B string `gorm:"index:,composite:comp_id0"`
// Composite Index: Nested structure.
Data1A string `gorm:"index:,composite:comp_id1"`
CompIdxLevel1C
// Composite Index: Unique and priority.
Data2A string `gorm:"index:,unique,composite:comp_id2,priority:2"`
CompIdxLevel2C
}
type CompIdxLevel1C struct {
CompIdxLevel1B
Data1C string `gorm:"index:,composite:comp_id1"`
}
type CompIdxLevel1B struct {
Data1B string `gorm:"index:,composite:comp_id1"`
}
type CompIdxLevel2C struct {
CompIdxLevel2B
Data2C string `gorm:"index:,unique,composite:comp_id2,priority:1"`
}
type CompIdxLevel2B struct {
Data2B string `gorm:"index:,unique,composite:comp_id2,priority:3"`
}
func TestParseIndex(t *testing.T) {
user, err := schema.Parse(&UserIndex{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user index, got error %v", err)
}
results := []*schema.Index{
{
Name: "idx_user_indices_name",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name"}}},
},
{
Name: "idx_name",
Class: "UNIQUE",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name2", UniqueIndex: "idx_name"}}},
},
{
Name: "idx_user_indices_name3",
Type: "btree",
Where: "name3 != 'jinzhu'",
Fields: []schema.IndexOption{{
Field: &schema.Field{Name: "Name3"},
Sort: "desc",
Collate: "utf8",
Length: 10,
}},
},
{
Name: "idx_user_indices_name4",
Class: "UNIQUE",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name4", UniqueIndex: "idx_user_indices_name4"}}},
},
{
Name: "idx_user_indices_name5",
Class: "FULLTEXT",
Comment: "hello , world",
Where: "age > 10",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name5"}}},
},
{
Name: "profile",
Comment: "hello , world",
Where: "age > 10",
Option: "WITH PARSER parser_name",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name6"}}, {
Field: &schema.Field{Name: "Age"},
Expression: "ABS(age)",
}},
},
{
Name: "idx_id",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "MemberNumber"}}, {Field: &schema.Field{Name: "OID", UniqueIndex: "idx_oid"}}},
},
{
Name: "idx_oid",
Class: "UNIQUE",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "OID", UniqueIndex: "idx_oid"}}},
},
{
Name: "type",
Type: "",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name7"}}},
},
{
Name: "idx_user_indices_name8",
Type: "",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "Name8"}, Length: 10},
// Note: Duplicate Columns
{Field: &schema.Field{Name: "Name8"}, Collate: "utf8"},
},
},
{
Class: "UNIQUE",
Name: "idx_user_indices_idx_compname_1",
Option: "NULLS NOT DISTINCT",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "CompName1", NotNull: true}},
{Field: &schema.Field{Name: "CompName2"}},
},
},
{
Name: "idx_user_indices_comp_id0",
Type: "",
Fields: []schema.IndexOption{{
Field: &schema.Field{Name: "Data0A"},
}, {
Field: &schema.Field{Name: "Data0B"},
}},
},
{
Name: "idx_user_indices_comp_id1",
Fields: []schema.IndexOption{{
Field: &schema.Field{Name: "Data1A"},
}, {
Field: &schema.Field{Name: "Data1B"},
}, {
Field: &schema.Field{Name: "Data1C"},
}},
},
{
Name: "idx_user_indices_comp_id2",
Class: "UNIQUE",
Fields: []schema.IndexOption{{
Field: &schema.Field{Name: "Data2C"},
}, {
Field: &schema.Field{Name: "Data2A"},
}, {
Field: &schema.Field{Name: "Data2B"},
}},
},
}
CheckIndices(t, results, user.ParseIndexes())
}
func TestParseIndexWithUniqueIndexAndUnique(t *testing.T) {
type IndexTest struct {
FieldA string `gorm:"unique;index"` // unique and index
FieldB string `gorm:"unique"` // unique
FieldC string `gorm:"index:,unique"` // uniqueIndex
FieldD string `gorm:"uniqueIndex;index"` // uniqueIndex and index
FieldE1 string `gorm:"uniqueIndex:uniq_field_e1_e2"` // mul uniqueIndex
FieldE2 string `gorm:"uniqueIndex:uniq_field_e1_e2"`
FieldF1 string `gorm:"uniqueIndex:uniq_field_f1_f2;index"` // mul uniqueIndex and index
FieldF2 string `gorm:"uniqueIndex:uniq_field_f1_f2;"`
FieldG string `gorm:"unique;uniqueIndex"` // unique and uniqueIndex
FieldH1 string `gorm:"unique;uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
FieldH2 string `gorm:"uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
}
indexSchema, err := schema.Parse(&IndexTest{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user index, got error %v", err)
}
indices := indexSchema.ParseIndexes()
expectedIndices := []*schema.Index{
{
Name: "idx_index_tests_field_a",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldA", Unique: true}}},
},
{
Name: "idx_index_tests_field_c",
Class: "UNIQUE",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldC", UniqueIndex: "idx_index_tests_field_c"}}},
},
{
Name: "idx_index_tests_field_d",
Class: "UNIQUE",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "FieldD"}},
// Note: Duplicate Columns
{Field: &schema.Field{Name: "FieldD"}},
},
},
{
Name: "uniq_field_e1_e2",
Class: "UNIQUE",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "FieldE1"}},
{Field: &schema.Field{Name: "FieldE2"}},
},
},
{
Name: "uniq_field_f1_f2",
Class: "UNIQUE",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "FieldF1"}},
{Field: &schema.Field{Name: "FieldF2"}},
},
},
{
Name: "idx_index_tests_field_f1",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldF1"}}},
},
{
Name: "idx_index_tests_field_g",
Class: "UNIQUE",
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldG", Unique: true, UniqueIndex: "idx_index_tests_field_g"}}},
},
{
Name: "uniq_field_h1_h2",
Class: "UNIQUE",
Fields: []schema.IndexOption{
{Field: &schema.Field{Name: "FieldH1", Unique: true}},
{Field: &schema.Field{Name: "FieldH2"}},
},
},
}
CheckIndices(t, expectedIndices, indices)
}
func CheckIndices(t *testing.T, expected, actual []*schema.Index) {
if len(expected) != len(actual) {
t.Errorf("expected %d indices, but got %d", len(expected), len(actual))
return
}
for i, ei := range expected {
t.Run(ei.Name, func(t *testing.T) {
ai := actual[i]
tests.AssertObjEqual(t, ai, ei, "Name", "Class", "Type", "Where", "Comment", "Option")
if len(ei.Fields) != len(ai.Fields) {
t.Errorf("expected index %q field length is %d but actual %d", ei.Name, len(ei.Fields), len(ai.Fields))
return
}
for i, ef := range ei.Fields {
af := ai.Fields[i]
tests.AssertObjEqual(t, af, ef, "Name", "Unique", "UniqueIndex", "Expression", "Sort", "Collate", "Length", "NotNull")
}
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/relationship.go | schema/relationship.go | package schema
import (
"context"
"fmt"
"reflect"
"strings"
"sync"
"github.com/jinzhu/inflection"
"golang.org/x/text/cases"
"golang.org/x/text/language"
"gorm.io/gorm/clause"
)
// RelationshipType relationship type
type RelationshipType string
const (
HasOne RelationshipType = "has_one" // HasOneRel has one relationship
HasMany RelationshipType = "has_many" // HasManyRel has many relationship
BelongsTo RelationshipType = "belongs_to" // BelongsToRel belongs to relationship
Many2Many RelationshipType = "many_to_many" // Many2ManyRel many to many relationship
has RelationshipType = "has"
)
type Relationships struct {
HasOne []*Relationship
BelongsTo []*Relationship
HasMany []*Relationship
Many2Many []*Relationship
Relations map[string]*Relationship
EmbeddedRelations map[string]*Relationships
Mux sync.RWMutex
}
type Relationship struct {
Name string
Type RelationshipType
Field *Field
Polymorphic *Polymorphic
References []*Reference
Schema *Schema
FieldSchema *Schema
JoinTable *Schema
foreignKeys, primaryKeys []string
}
type Polymorphic struct {
PolymorphicID *Field
PolymorphicType *Field
Value string
}
type Reference struct {
PrimaryKey *Field
PrimaryValue string
ForeignKey *Field
OwnPrimaryKey bool
}
func (schema *Schema) parseRelation(field *Field) *Relationship {
var (
err error
fieldValue = reflect.New(field.IndirectFieldType).Interface()
relation = &Relationship{
Name: field.Name,
Field: field,
Schema: schema,
foreignKeys: toColumns(field.TagSettings["FOREIGNKEY"]),
primaryKeys: toColumns(field.TagSettings["REFERENCES"]),
}
)
if relation.FieldSchema, err = getOrParse(fieldValue, schema.cacheStore, schema.namer); err != nil {
schema.err = fmt.Errorf("failed to parse field: %s, error: %w", field.Name, err)
return nil
}
if hasPolymorphicRelation(field.TagSettings) {
schema.buildPolymorphicRelation(relation, field)
} else if many2many := field.TagSettings["MANY2MANY"]; many2many != "" {
schema.buildMany2ManyRelation(relation, field, many2many)
} else if belongsTo := field.TagSettings["BELONGSTO"]; belongsTo != "" {
schema.guessRelation(relation, field, guessBelongs)
} else {
switch field.IndirectFieldType.Kind() {
case reflect.Struct:
schema.guessRelation(relation, field, guessGuess)
case reflect.Slice:
schema.guessRelation(relation, field, guessHas)
default:
schema.err = fmt.Errorf("unsupported data type %v for %v on field %s", relation.FieldSchema, schema,
field.Name)
}
}
if relation.Type == has {
if relation.FieldSchema != relation.Schema && relation.Polymorphic == nil && field.OwnerSchema == nil {
relation.FieldSchema.Relationships.Mux.Lock()
relation.FieldSchema.Relationships.Relations["_"+relation.Schema.Name+"_"+relation.Name] = relation
relation.FieldSchema.Relationships.Mux.Unlock()
}
switch field.IndirectFieldType.Kind() {
case reflect.Struct:
relation.Type = HasOne
case reflect.Slice:
relation.Type = HasMany
}
}
if schema.err == nil {
schema.setRelation(relation)
switch relation.Type {
case HasOne:
schema.Relationships.HasOne = append(schema.Relationships.HasOne, relation)
case HasMany:
schema.Relationships.HasMany = append(schema.Relationships.HasMany, relation)
case BelongsTo:
schema.Relationships.BelongsTo = append(schema.Relationships.BelongsTo, relation)
case Many2Many:
schema.Relationships.Many2Many = append(schema.Relationships.Many2Many, relation)
}
}
return relation
}
// hasPolymorphicRelation check if has polymorphic relation
// 1. `POLYMORPHIC` tag
// 2. `POLYMORPHICTYPE` and `POLYMORPHICID` tag
func hasPolymorphicRelation(tagSettings map[string]string) bool {
if _, ok := tagSettings["POLYMORPHIC"]; ok {
return true
}
_, hasType := tagSettings["POLYMORPHICTYPE"]
_, hasId := tagSettings["POLYMORPHICID"]
return hasType && hasId
}
func (schema *Schema) setRelation(relation *Relationship) {
schema.Relationships.Mux.Lock()
defer schema.Relationships.Mux.Unlock()
// set non-embedded relation
if rel := schema.Relationships.Relations[relation.Name]; rel != nil {
if len(rel.Field.BindNames) > 1 {
schema.Relationships.Relations[relation.Name] = relation
}
} else {
schema.Relationships.Relations[relation.Name] = relation
}
// set embedded relation
if len(relation.Field.EmbeddedBindNames) <= 1 {
return
}
relationships := &schema.Relationships
for i, name := range relation.Field.EmbeddedBindNames {
if i < len(relation.Field.EmbeddedBindNames)-1 {
if relationships.EmbeddedRelations == nil {
relationships.EmbeddedRelations = map[string]*Relationships{}
}
if r := relationships.EmbeddedRelations[name]; r == nil {
relationships.EmbeddedRelations[name] = &Relationships{}
}
relationships = relationships.EmbeddedRelations[name]
} else {
if relationships.Relations == nil {
relationships.Relations = map[string]*Relationship{}
}
relationships.Relations[relation.Name] = relation
}
}
}
// User has many Toys, its `Polymorphic` is `Owner`, Pet has one Toy, its `Polymorphic` is `Owner`
//
// type User struct {
// Toys []Toy `gorm:"polymorphic:Owner;"`
// }
// type Pet struct {
// Toy Toy `gorm:"polymorphic:Owner;"`
// }
// type Toy struct {
// OwnerID int
// OwnerType string
// }
func (schema *Schema) buildPolymorphicRelation(relation *Relationship, field *Field) {
polymorphic := field.TagSettings["POLYMORPHIC"]
relation.Polymorphic = &Polymorphic{
Value: schema.Table,
}
var (
typeName = polymorphic + "Type"
typeId = polymorphic + "ID"
)
if value, ok := field.TagSettings["POLYMORPHICTYPE"]; ok {
typeName = strings.TrimSpace(value)
}
if value, ok := field.TagSettings["POLYMORPHICID"]; ok {
typeId = strings.TrimSpace(value)
}
relation.Polymorphic.PolymorphicType = relation.FieldSchema.FieldsByName[typeName]
relation.Polymorphic.PolymorphicID = relation.FieldSchema.FieldsByName[typeId]
if value, ok := field.TagSettings["POLYMORPHICVALUE"]; ok {
relation.Polymorphic.Value = strings.TrimSpace(value)
}
if relation.Polymorphic.PolymorphicType == nil {
schema.err = fmt.Errorf("invalid polymorphic type %v for %v on field %s, missing field %s",
relation.FieldSchema, schema, field.Name, polymorphic+"Type")
}
if relation.Polymorphic.PolymorphicID == nil {
schema.err = fmt.Errorf("invalid polymorphic type %v for %v on field %s, missing field %s",
relation.FieldSchema, schema, field.Name, polymorphic+"ID")
}
if schema.err == nil {
relation.References = append(relation.References, &Reference{
PrimaryValue: relation.Polymorphic.Value,
ForeignKey: relation.Polymorphic.PolymorphicType,
})
primaryKeyField := schema.PrioritizedPrimaryField
if len(relation.foreignKeys) > 0 {
if primaryKeyField = schema.LookUpField(relation.foreignKeys[0]); primaryKeyField == nil || len(relation.foreignKeys) > 1 {
schema.err = fmt.Errorf("invalid polymorphic foreign keys %+v for %v on field %s", relation.foreignKeys,
schema, field.Name)
}
}
if primaryKeyField == nil {
schema.err = fmt.Errorf("invalid polymorphic type %v for %v on field %s, missing primaryKey field",
relation.FieldSchema, schema, field.Name)
return
}
// use same data type for foreign keys
if copyableDataType(primaryKeyField.DataType) {
relation.Polymorphic.PolymorphicID.DataType = primaryKeyField.DataType
}
relation.Polymorphic.PolymorphicID.GORMDataType = primaryKeyField.GORMDataType
if relation.Polymorphic.PolymorphicID.Size == 0 {
relation.Polymorphic.PolymorphicID.Size = primaryKeyField.Size
}
relation.References = append(relation.References, &Reference{
PrimaryKey: primaryKeyField,
ForeignKey: relation.Polymorphic.PolymorphicID,
OwnPrimaryKey: true,
})
}
relation.Type = has
}
func (schema *Schema) buildMany2ManyRelation(relation *Relationship, field *Field, many2many string) {
relation.Type = Many2Many
var (
err error
joinTableFields []reflect.StructField
fieldsMap = map[string]*Field{}
ownFieldsMap = map[string]*Field{} // fix self join many2many
referFieldsMap = map[string]*Field{}
joinForeignKeys = toColumns(field.TagSettings["JOINFOREIGNKEY"])
joinReferences = toColumns(field.TagSettings["JOINREFERENCES"])
)
ownForeignFields := schema.PrimaryFields
refForeignFields := relation.FieldSchema.PrimaryFields
if len(relation.foreignKeys) > 0 {
ownForeignFields = []*Field{}
for _, foreignKey := range relation.foreignKeys {
if field := schema.LookUpField(foreignKey); field != nil {
ownForeignFields = append(ownForeignFields, field)
} else {
schema.err = fmt.Errorf("invalid foreign key: %s", foreignKey)
return
}
}
}
if len(relation.primaryKeys) > 0 {
refForeignFields = []*Field{}
for _, foreignKey := range relation.primaryKeys {
if field := relation.FieldSchema.LookUpField(foreignKey); field != nil {
refForeignFields = append(refForeignFields, field)
} else {
schema.err = fmt.Errorf("invalid foreign key: %s", foreignKey)
return
}
}
}
for idx, ownField := range ownForeignFields {
joinFieldName := cases.Title(language.Und, cases.NoLower).String(schema.Name) + ownField.Name
if len(joinForeignKeys) > idx {
joinFieldName = cases.Title(language.Und, cases.NoLower).String(joinForeignKeys[idx])
}
ownFieldsMap[joinFieldName] = ownField
fieldsMap[joinFieldName] = ownField
joinTableFields = append(joinTableFields, reflect.StructField{
Name: joinFieldName,
PkgPath: ownField.StructField.PkgPath,
Type: ownField.StructField.Type,
Tag: removeSettingFromTag(appendSettingFromTag(ownField.StructField.Tag, "primaryKey"),
"column", "autoincrement", "index", "unique", "uniqueindex"),
})
}
for idx, relField := range refForeignFields {
joinFieldName := cases.Title(language.Und, cases.NoLower).String(relation.FieldSchema.Name) + relField.Name
if _, ok := ownFieldsMap[joinFieldName]; ok {
if field.Name != relation.FieldSchema.Name {
joinFieldName = inflection.Singular(field.Name) + relField.Name
} else {
joinFieldName += "Reference"
}
}
if len(joinReferences) > idx {
joinFieldName = cases.Title(language.Und, cases.NoLower).String(joinReferences[idx])
}
referFieldsMap[joinFieldName] = relField
if _, ok := fieldsMap[joinFieldName]; !ok {
fieldsMap[joinFieldName] = relField
joinTableFields = append(joinTableFields, reflect.StructField{
Name: joinFieldName,
PkgPath: relField.StructField.PkgPath,
Type: relField.StructField.Type,
Tag: removeSettingFromTag(appendSettingFromTag(relField.StructField.Tag, "primaryKey"),
"column", "autoincrement", "index", "unique", "uniqueindex"),
})
}
}
joinTableFields = append(joinTableFields, reflect.StructField{
Name: cases.Title(language.Und, cases.NoLower).String(schema.Name) + field.Name,
Type: schema.ModelType,
Tag: `gorm:"-"`,
})
if relation.JoinTable, err = Parse(reflect.New(reflect.StructOf(joinTableFields)).Interface(), schema.cacheStore,
schema.namer); err != nil {
schema.err = err
}
relation.JoinTable.Name = many2many
relation.JoinTable.Table = schema.namer.JoinTableName(many2many)
relation.JoinTable.PrimaryFields = make([]*Field, 0, len(relation.JoinTable.Fields))
relName := relation.Schema.Name
relRefName := relation.FieldSchema.Name
if relName == relRefName {
relRefName = relation.Field.Name
}
if _, ok := relation.JoinTable.Relationships.Relations[relName]; !ok {
relation.JoinTable.Relationships.Relations[relName] = &Relationship{
Name: relName,
Type: BelongsTo,
Schema: relation.JoinTable,
FieldSchema: relation.Schema,
}
} else {
relation.JoinTable.Relationships.Relations[relName].References = []*Reference{}
}
if _, ok := relation.JoinTable.Relationships.Relations[relRefName]; !ok {
relation.JoinTable.Relationships.Relations[relRefName] = &Relationship{
Name: relRefName,
Type: BelongsTo,
Schema: relation.JoinTable,
FieldSchema: relation.FieldSchema,
}
} else {
relation.JoinTable.Relationships.Relations[relRefName].References = []*Reference{}
}
// build references
for _, f := range relation.JoinTable.Fields {
if f.Creatable || f.Readable || f.Updatable {
// use same data type for foreign keys
if copyableDataType(fieldsMap[f.Name].DataType) {
f.DataType = fieldsMap[f.Name].DataType
}
f.GORMDataType = fieldsMap[f.Name].GORMDataType
if f.Size == 0 {
f.Size = fieldsMap[f.Name].Size
}
relation.JoinTable.PrimaryFields = append(relation.JoinTable.PrimaryFields, f)
if of, ok := ownFieldsMap[f.Name]; ok {
joinRel := relation.JoinTable.Relationships.Relations[relName]
joinRel.Field = relation.Field
joinRel.References = append(joinRel.References, &Reference{
PrimaryKey: of,
ForeignKey: f,
})
relation.References = append(relation.References, &Reference{
PrimaryKey: of,
ForeignKey: f,
OwnPrimaryKey: true,
})
}
if rf, ok := referFieldsMap[f.Name]; ok {
joinRefRel := relation.JoinTable.Relationships.Relations[relRefName]
if joinRefRel.Field == nil {
joinRefRel.Field = relation.Field
}
joinRefRel.References = append(joinRefRel.References, &Reference{
PrimaryKey: rf,
ForeignKey: f,
})
relation.References = append(relation.References, &Reference{
PrimaryKey: rf,
ForeignKey: f,
})
}
}
}
}
type guessLevel int
const (
guessGuess guessLevel = iota
guessBelongs
guessEmbeddedBelongs
guessHas
guessEmbeddedHas
)
func (schema *Schema) guessRelation(relation *Relationship, field *Field, cgl guessLevel) {
var (
primaryFields, foreignFields []*Field
primarySchema, foreignSchema = schema, relation.FieldSchema
gl = cgl
)
if gl == guessGuess {
if field.Schema == relation.FieldSchema {
gl = guessBelongs
} else {
gl = guessHas
}
}
reguessOrErr := func() {
switch cgl {
case guessGuess:
schema.guessRelation(relation, field, guessBelongs)
case guessBelongs:
schema.guessRelation(relation, field, guessEmbeddedBelongs)
case guessEmbeddedBelongs:
schema.guessRelation(relation, field, guessHas)
case guessHas:
schema.guessRelation(relation, field, guessEmbeddedHas)
// case guessEmbeddedHas:
default:
schema.err = fmt.Errorf("invalid field found for struct %v's field %s: define a valid foreign key for relations or implement the Valuer/Scanner interface",
schema, field.Name)
}
}
switch gl {
case guessBelongs:
primarySchema, foreignSchema = relation.FieldSchema, schema
case guessEmbeddedBelongs:
if field.OwnerSchema == nil {
reguessOrErr()
return
}
primarySchema, foreignSchema = relation.FieldSchema, field.OwnerSchema
case guessHas:
case guessEmbeddedHas:
if field.OwnerSchema == nil {
reguessOrErr()
return
}
primarySchema, foreignSchema = field.OwnerSchema, relation.FieldSchema
}
if len(relation.foreignKeys) > 0 {
for _, foreignKey := range relation.foreignKeys {
f := foreignSchema.LookUpField(foreignKey)
if f == nil {
reguessOrErr()
return
}
foreignFields = append(foreignFields, f)
}
} else {
primarySchemaName := primarySchema.Name
if primarySchemaName == "" {
primarySchemaName = relation.FieldSchema.Name
}
if len(relation.primaryKeys) > 0 {
for _, primaryKey := range relation.primaryKeys {
if f := primarySchema.LookUpField(primaryKey); f != nil {
primaryFields = append(primaryFields, f)
}
}
} else {
primaryFields = primarySchema.PrimaryFields
}
primaryFieldLoop:
for _, primaryField := range primaryFields {
lookUpName := primarySchemaName + primaryField.Name
if gl == guessBelongs {
lookUpName = field.Name + primaryField.Name
}
lookUpNames := []string{lookUpName}
if len(primaryFields) == 1 {
lookUpNames = append(lookUpNames, strings.TrimSuffix(lookUpName, primaryField.Name)+"ID",
strings.TrimSuffix(lookUpName, primaryField.Name)+"Id", schema.namer.ColumnName(foreignSchema.Table,
strings.TrimSuffix(lookUpName, primaryField.Name)+"ID"))
}
for _, name := range lookUpNames {
if f := foreignSchema.LookUpFieldByBindName(field.BindNames, name); f != nil {
foreignFields = append(foreignFields, f)
primaryFields = append(primaryFields, primaryField)
continue primaryFieldLoop
}
}
for _, name := range lookUpNames {
if f := foreignSchema.LookUpField(name); f != nil {
foreignFields = append(foreignFields, f)
primaryFields = append(primaryFields, primaryField)
continue primaryFieldLoop
}
}
}
}
switch {
case len(foreignFields) == 0:
reguessOrErr()
return
case len(relation.primaryKeys) > 0:
for idx, primaryKey := range relation.primaryKeys {
if f := primarySchema.LookUpField(primaryKey); f != nil {
if len(primaryFields) < idx+1 {
primaryFields = append(primaryFields, f)
} else if f != primaryFields[idx] {
reguessOrErr()
return
}
} else {
reguessOrErr()
return
}
}
case len(primaryFields) == 0:
if len(foreignFields) == 1 && primarySchema.PrioritizedPrimaryField != nil {
primaryFields = append(primaryFields, primarySchema.PrioritizedPrimaryField)
} else if len(primarySchema.PrimaryFields) == len(foreignFields) {
primaryFields = append(primaryFields, primarySchema.PrimaryFields...)
} else {
reguessOrErr()
return
}
}
// build references
for idx, foreignField := range foreignFields {
// use same data type for foreign keys
schema.Relationships.Mux.Lock()
if schema != foreignField.Schema {
foreignField.Schema.Relationships.Mux.Lock()
}
if copyableDataType(primaryFields[idx].DataType) {
foreignField.DataType = primaryFields[idx].DataType
}
foreignField.GORMDataType = primaryFields[idx].GORMDataType
if foreignField.Size == 0 {
foreignField.Size = primaryFields[idx].Size
}
schema.Relationships.Mux.Unlock()
if schema != foreignField.Schema {
foreignField.Schema.Relationships.Mux.Unlock()
}
relation.References = append(relation.References, &Reference{
PrimaryKey: primaryFields[idx],
ForeignKey: foreignField,
OwnPrimaryKey: (schema == primarySchema && gl == guessHas) || (field.OwnerSchema == primarySchema && gl == guessEmbeddedHas),
})
}
if gl == guessHas || gl == guessEmbeddedHas {
relation.Type = has
} else {
relation.Type = BelongsTo
}
}
// Constraint is ForeignKey Constraint
type Constraint struct {
Name string
Field *Field
Schema *Schema
ForeignKeys []*Field
ReferenceSchema *Schema
References []*Field
OnDelete string
OnUpdate string
}
func (constraint *Constraint) GetName() string { return constraint.Name }
func (constraint *Constraint) Build() (sql string, vars []interface{}) {
sql = "CONSTRAINT ? FOREIGN KEY ? REFERENCES ??"
if constraint.OnDelete != "" {
sql += " ON DELETE " + constraint.OnDelete
}
if constraint.OnUpdate != "" {
sql += " ON UPDATE " + constraint.OnUpdate
}
foreignKeys := make([]interface{}, 0, len(constraint.ForeignKeys))
for _, field := range constraint.ForeignKeys {
foreignKeys = append(foreignKeys, clause.Column{Name: field.DBName})
}
references := make([]interface{}, 0, len(constraint.References))
for _, field := range constraint.References {
references = append(references, clause.Column{Name: field.DBName})
}
vars = append(vars, clause.Table{Name: constraint.Name}, foreignKeys, clause.Table{Name: constraint.ReferenceSchema.Table}, references)
return
}
func (rel *Relationship) ParseConstraint() *Constraint {
str := rel.Field.TagSettings["CONSTRAINT"]
if str == "-" {
return nil
}
if rel.Type == BelongsTo {
for _, r := range rel.FieldSchema.Relationships.Relations {
if r != rel && r.FieldSchema == rel.Schema && len(rel.References) == len(r.References) {
matched := true
for idx, ref := range r.References {
if rel.References[idx].PrimaryKey != ref.PrimaryKey ||
rel.References[idx].ForeignKey != ref.ForeignKey ||
rel.References[idx].PrimaryValue != ref.PrimaryValue {
matched = false
break
}
}
if matched {
return nil
}
}
}
}
var (
name string
idx = strings.IndexByte(str, ',')
settings = ParseTagSetting(str, ",")
)
// optimize match english letters and midline
// The following code is basically called in for.
// In order to avoid the performance problems caused by repeated compilation of regular expressions,
// it only needs to be done once outside, so optimization is done here.
if idx != -1 && regEnLetterAndMidline.MatchString(str[0:idx]) {
name = str[0:idx]
} else {
name = rel.Schema.namer.RelationshipFKName(*rel)
}
constraint := Constraint{
Name: name,
Field: rel.Field,
OnUpdate: settings["ONUPDATE"],
OnDelete: settings["ONDELETE"],
}
for _, ref := range rel.References {
if ref.PrimaryKey != nil && (rel.JoinTable == nil || ref.OwnPrimaryKey) {
constraint.ForeignKeys = append(constraint.ForeignKeys, ref.ForeignKey)
constraint.References = append(constraint.References, ref.PrimaryKey)
if ref.OwnPrimaryKey {
constraint.Schema = ref.ForeignKey.Schema
constraint.ReferenceSchema = rel.Schema
} else {
constraint.Schema = rel.Schema
constraint.ReferenceSchema = ref.PrimaryKey.Schema
}
}
}
return &constraint
}
func (rel *Relationship) ToQueryConditions(ctx context.Context, reflectValue reflect.Value) (conds []clause.Expression) {
table := rel.FieldSchema.Table
foreignFields := []*Field{}
relForeignKeys := []string{}
if rel.JoinTable != nil {
table = rel.JoinTable.Table
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
foreignFields = append(foreignFields, ref.PrimaryKey)
relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName)
} else if ref.PrimaryValue != "" {
conds = append(conds, clause.Eq{
Column: clause.Column{Table: rel.JoinTable.Table, Name: ref.ForeignKey.DBName},
Value: ref.PrimaryValue,
})
} else {
conds = append(conds, clause.Eq{
Column: clause.Column{Table: rel.JoinTable.Table, Name: ref.ForeignKey.DBName},
Value: clause.Column{Table: rel.FieldSchema.Table, Name: ref.PrimaryKey.DBName},
})
}
}
} else {
for _, ref := range rel.References {
if ref.OwnPrimaryKey {
relForeignKeys = append(relForeignKeys, ref.ForeignKey.DBName)
foreignFields = append(foreignFields, ref.PrimaryKey)
} else if ref.PrimaryValue != "" {
conds = append(conds, clause.Eq{
Column: clause.Column{Table: rel.FieldSchema.Table, Name: ref.ForeignKey.DBName},
Value: ref.PrimaryValue,
})
} else {
relForeignKeys = append(relForeignKeys, ref.PrimaryKey.DBName)
foreignFields = append(foreignFields, ref.ForeignKey)
}
}
}
_, foreignValues := GetIdentityFieldValuesMap(ctx, reflectValue, foreignFields)
column, values := ToQueryValues(table, relForeignKeys, foreignValues)
conds = append(conds, clause.IN{Column: column, Values: values})
return
}
func copyableDataType(str DataType) bool {
lowerStr := strings.ToLower(string(str))
for _, s := range []string{"auto_increment", "primary key"} {
if strings.Contains(lowerStr, s) {
return false
}
}
return true
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/utils_test.go | schema/utils_test.go | package schema
import (
"reflect"
"testing"
)
func TestRemoveSettingFromTag(t *testing.T) {
tags := map[string]string{
`gorm:"before:value;column:db;after:value" other:"before:value;column:db;after:value"`: `gorm:"before:value;after:value" other:"before:value;column:db;after:value"`,
`gorm:"before:value;column:db;" other:"before:value;column:db;after:value"`: `gorm:"before:value;" other:"before:value;column:db;after:value"`,
`gorm:"before:value;column:db" other:"before:value;column:db;after:value"`: `gorm:"before:value;" other:"before:value;column:db;after:value"`,
`gorm:"column:db" other:"before:value;column:db;after:value"`: `gorm:"" other:"before:value;column:db;after:value"`,
`gorm:"before:value;column:db ;after:value" other:"before:value;column:db;after:value"`: `gorm:"before:value;after:value" other:"before:value;column:db;after:value"`,
`gorm:"before:value;column:db; after:value" other:"before:value;column:db;after:value"`: `gorm:"before:value; after:value" other:"before:value;column:db;after:value"`,
`gorm:"before:value;column; after:value" other:"before:value;column:db;after:value"`: `gorm:"before:value; after:value" other:"before:value;column:db;after:value"`,
}
for k, v := range tags {
if string(removeSettingFromTag(reflect.StructTag(k), "column")) != v {
t.Errorf("%v after removeSettingFromTag should equal %v, but got %v", k, v, removeSettingFromTag(reflect.StructTag(k), "column"))
}
}
}
func TestParseTagSettingWithDoubleQuoteEscape(t *testing.T) {
tag := `gorm:"expression:to_tsvector('english', \"Name\")"`
settings := ParseTagSetting(reflect.StructTag(tag).Get("gorm"), ";")
if v, ok := settings["EXPRESSION"]; !ok || v != `to_tsvector('english', "Name")` {
t.Errorf("ParseTagSetting did not handle escaped double quotes correctly: got %#v", v)
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/serializer.go | schema/serializer.go | package schema
import (
"bytes"
"context"
"database/sql"
"database/sql/driver"
"encoding/gob"
"encoding/json"
"fmt"
"math"
"reflect"
"strings"
"sync"
"time"
)
var serializerMap = sync.Map{}
// RegisterSerializer register serializer
func RegisterSerializer(name string, serializer SerializerInterface) {
serializerMap.Store(strings.ToLower(name), serializer)
}
// GetSerializer get serializer
func GetSerializer(name string) (serializer SerializerInterface, ok bool) {
v, ok := serializerMap.Load(strings.ToLower(name))
if ok {
serializer, ok = v.(SerializerInterface)
}
return serializer, ok
}
func init() {
RegisterSerializer("json", JSONSerializer{})
RegisterSerializer("unixtime", UnixSecondSerializer{})
RegisterSerializer("gob", GobSerializer{})
}
// Serializer field value serializer
type serializer struct {
Field *Field
Serializer SerializerInterface
SerializeValuer SerializerValuerInterface
Destination reflect.Value
Context context.Context
value interface{}
fieldValue interface{}
}
// Scan implements sql.Scanner interface
func (s *serializer) Scan(value interface{}) error {
s.value = value
return nil
}
// Value implements driver.Valuer interface
func (s serializer) Value() (driver.Value, error) {
return s.SerializeValuer.Value(s.Context, s.Field, s.Destination, s.fieldValue)
}
// SerializerInterface serializer interface
type SerializerInterface interface {
Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) error
SerializerValuerInterface
}
// SerializerValuerInterface serializer valuer interface
type SerializerValuerInterface interface {
Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error)
}
// JSONSerializer json serializer
type JSONSerializer struct{}
// Scan implements serializer interface
func (JSONSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) {
fieldValue := reflect.New(field.FieldType)
if dbValue != nil {
var bytes []byte
switch v := dbValue.(type) {
case []byte:
bytes = v
case string:
bytes = []byte(v)
default:
bytes, err = json.Marshal(v)
if err != nil {
return err
}
}
if len(bytes) > 0 {
err = json.Unmarshal(bytes, fieldValue.Interface())
}
}
field.ReflectValueOf(ctx, dst).Set(fieldValue.Elem())
return
}
// Value implements serializer interface
func (JSONSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error) {
result, err := json.Marshal(fieldValue)
if string(result) == "null" {
if field.TagSettings["NOT NULL"] != "" {
return "", nil
}
return nil, err
}
return string(result), err
}
// UnixSecondSerializer json serializer
type UnixSecondSerializer struct{}
// Scan implements serializer interface
func (UnixSecondSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) {
t := sql.NullTime{}
if err = t.Scan(dbValue); err == nil && t.Valid {
err = field.Set(ctx, dst, t.Time.Unix())
}
return
}
// Value implements serializer interface
func (UnixSecondSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (result interface{}, err error) {
rv := reflect.ValueOf(fieldValue)
switch fieldValue.(type) {
case int, int8, int16, int32, int64:
result = time.Unix(rv.Int(), 0).UTC()
case uint, uint8, uint16, uint32, uint64:
if uv := rv.Uint(); uv > math.MaxInt64 {
err = fmt.Errorf("integer overflow conversion uint64(%d) -> int64", uv)
} else {
result = time.Unix(int64(uv), 0).UTC() //nolint:gosec
}
case *int, *int8, *int16, *int32, *int64:
if rv.IsZero() {
return nil, nil
}
result = time.Unix(rv.Elem().Int(), 0).UTC()
case *uint, *uint8, *uint16, *uint32, *uint64:
if rv.IsZero() {
return nil, nil
}
if uv := rv.Elem().Uint(); uv > math.MaxInt64 {
err = fmt.Errorf("integer overflow conversion uint64(%d) -> int64", uv)
} else {
result = time.Unix(int64(uv), 0).UTC() //nolint:gosec
}
default:
err = fmt.Errorf("invalid field type %#v for UnixSecondSerializer, only int, uint supported", fieldValue)
}
return
}
// GobSerializer gob serializer
type GobSerializer struct{}
// Scan implements serializer interface
func (GobSerializer) Scan(ctx context.Context, field *Field, dst reflect.Value, dbValue interface{}) (err error) {
fieldValue := reflect.New(field.FieldType)
if dbValue != nil {
var bytesValue []byte
switch v := dbValue.(type) {
case []byte:
bytesValue = v
default:
return fmt.Errorf("failed to unmarshal gob value: %#v", dbValue)
}
if len(bytesValue) > 0 {
decoder := gob.NewDecoder(bytes.NewBuffer(bytesValue))
err = decoder.Decode(fieldValue.Interface())
}
}
field.ReflectValueOf(ctx, dst).Set(fieldValue.Elem())
return
}
// Value implements serializer interface
func (GobSerializer) Value(ctx context.Context, field *Field, dst reflect.Value, fieldValue interface{}) (interface{}, error) {
buf := new(bytes.Buffer)
err := gob.NewEncoder(buf).Encode(fieldValue)
return buf.Bytes(), err
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/field.go | schema/field.go | package schema
import (
"context"
"database/sql"
"database/sql/driver"
"fmt"
"reflect"
"strconv"
"strings"
"sync"
"time"
"github.com/jinzhu/now"
"gorm.io/gorm/clause"
"gorm.io/gorm/utils"
)
// special types' reflect type
var (
TimeReflectType = reflect.TypeOf(time.Time{})
TimePtrReflectType = reflect.TypeOf(&time.Time{})
ByteReflectType = reflect.TypeOf(uint8(0))
)
type (
// DataType GORM data type
DataType string
// TimeType GORM time type
TimeType int64
)
// GORM time types
const (
UnixTime TimeType = 1
UnixSecond TimeType = 2
UnixMillisecond TimeType = 3
UnixNanosecond TimeType = 4
)
// GORM fields types
const (
Bool DataType = "bool"
Int DataType = "int"
Uint DataType = "uint"
Float DataType = "float"
String DataType = "string"
Time DataType = "time"
Bytes DataType = "bytes"
)
const DefaultAutoIncrementIncrement int64 = 1
// Field is the representation of model schema's field
type Field struct {
Name string
DBName string
BindNames []string
EmbeddedBindNames []string
DataType DataType
GORMDataType DataType
PrimaryKey bool
AutoIncrement bool
AutoIncrementIncrement int64
Creatable bool
Updatable bool
Readable bool
AutoCreateTime TimeType
AutoUpdateTime TimeType
HasDefaultValue bool
DefaultValue string
DefaultValueInterface interface{}
NotNull bool
Unique bool
Comment string
Size int
Precision int
Scale int
IgnoreMigration bool
FieldType reflect.Type
IndirectFieldType reflect.Type
StructField reflect.StructField
Tag reflect.StructTag
TagSettings map[string]string
Schema *Schema
EmbeddedSchema *Schema
OwnerSchema *Schema
ReflectValueOf func(context.Context, reflect.Value) reflect.Value
ValueOf func(context.Context, reflect.Value) (value interface{}, zero bool)
Set func(context.Context, reflect.Value, interface{}) error
Serializer SerializerInterface
NewValuePool FieldNewValuePool
// In some db (e.g. MySQL), Unique and UniqueIndex are indistinguishable.
// When a column has a (not Mul) UniqueIndex, Migrator always reports its gorm.ColumnType is Unique.
// It causes field unnecessarily migration.
// Therefore, we need to record the UniqueIndex on this column (exclude Mul UniqueIndex) for MigrateColumnUnique.
UniqueIndex string
}
func (field *Field) BindName() string {
return strings.Join(field.BindNames, ".")
}
// ParseField parses reflect.StructField to Field
func (schema *Schema) ParseField(fieldStruct reflect.StructField) *Field {
var (
err error
tagSetting = ParseTagSetting(fieldStruct.Tag.Get("gorm"), ";")
)
field := &Field{
Name: fieldStruct.Name,
DBName: tagSetting["COLUMN"],
BindNames: []string{fieldStruct.Name},
EmbeddedBindNames: []string{fieldStruct.Name},
FieldType: fieldStruct.Type,
IndirectFieldType: fieldStruct.Type,
StructField: fieldStruct,
Tag: fieldStruct.Tag,
TagSettings: tagSetting,
Schema: schema,
Creatable: true,
Updatable: true,
Readable: true,
PrimaryKey: utils.CheckTruth(tagSetting["PRIMARYKEY"], tagSetting["PRIMARY_KEY"]),
AutoIncrement: utils.CheckTruth(tagSetting["AUTOINCREMENT"]),
HasDefaultValue: utils.CheckTruth(tagSetting["AUTOINCREMENT"]),
NotNull: utils.CheckTruth(tagSetting["NOT NULL"], tagSetting["NOTNULL"]),
Unique: utils.CheckTruth(tagSetting["UNIQUE"]),
Comment: tagSetting["COMMENT"],
AutoIncrementIncrement: DefaultAutoIncrementIncrement,
}
for field.IndirectFieldType.Kind() == reflect.Ptr {
field.IndirectFieldType = field.IndirectFieldType.Elem()
}
fieldValue := reflect.New(field.IndirectFieldType)
// if field is valuer, used its value or first field as data type
valuer, isValuer := fieldValue.Interface().(driver.Valuer)
if isValuer {
if _, ok := fieldValue.Interface().(GormDataTypeInterface); !ok {
if v, err := valuer.Value(); reflect.ValueOf(v).IsValid() && err == nil {
fieldValue = reflect.ValueOf(v)
}
// Use the field struct's first field type as data type, e.g: use `string` for sql.NullString
var getRealFieldValue func(reflect.Value)
getRealFieldValue = func(v reflect.Value) {
var (
rv = reflect.Indirect(v)
rvType = rv.Type()
)
if rv.Kind() == reflect.Struct && !rvType.ConvertibleTo(TimeReflectType) {
for i := 0; i < rvType.NumField(); i++ {
for key, value := range ParseTagSetting(rvType.Field(i).Tag.Get("gorm"), ";") {
if _, ok := field.TagSettings[key]; !ok {
field.TagSettings[key] = value
}
}
}
for i := 0; i < rvType.NumField(); i++ {
newFieldType := rvType.Field(i).Type
for newFieldType.Kind() == reflect.Ptr {
newFieldType = newFieldType.Elem()
}
fieldValue = reflect.New(newFieldType)
if rvType != reflect.Indirect(fieldValue).Type() {
getRealFieldValue(fieldValue)
}
if fieldValue.IsValid() {
return
}
}
}
}
getRealFieldValue(fieldValue)
}
}
if v, isSerializer := fieldValue.Interface().(SerializerInterface); isSerializer {
field.DataType = String
field.Serializer = v
} else {
serializerName := field.TagSettings["JSON"]
if serializerName == "" {
serializerName = field.TagSettings["SERIALIZER"]
}
if serializerName != "" {
if serializer, ok := GetSerializer(serializerName); ok {
// Set default data type to string for serializer
field.DataType = String
field.Serializer = serializer
} else {
schema.err = fmt.Errorf("invalid serializer type %v", serializerName)
}
}
}
if num, ok := field.TagSettings["AUTOINCREMENTINCREMENT"]; ok {
field.AutoIncrementIncrement, _ = strconv.ParseInt(num, 10, 64)
}
if v, ok := field.TagSettings["DEFAULT"]; ok {
field.HasDefaultValue = true
field.DefaultValue = v
}
if num, ok := field.TagSettings["SIZE"]; ok {
if field.Size, err = strconv.Atoi(num); err != nil {
field.Size = -1
}
}
if p, ok := field.TagSettings["PRECISION"]; ok {
field.Precision, _ = strconv.Atoi(p)
}
if s, ok := field.TagSettings["SCALE"]; ok {
field.Scale, _ = strconv.Atoi(s)
}
// default value is function or null or blank (primary keys)
field.DefaultValue = strings.TrimSpace(field.DefaultValue)
skipParseDefaultValue := strings.Contains(field.DefaultValue, "(") &&
strings.Contains(field.DefaultValue, ")") || strings.ToLower(field.DefaultValue) == "null" || field.DefaultValue == ""
switch reflect.Indirect(fieldValue).Kind() {
case reflect.Bool:
field.DataType = Bool
if field.HasDefaultValue && !skipParseDefaultValue {
if field.DefaultValueInterface, err = strconv.ParseBool(field.DefaultValue); err != nil {
schema.err = fmt.Errorf("failed to parse %s as default value for bool, got error: %v", field.DefaultValue, err)
}
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
field.DataType = Int
if field.HasDefaultValue && !skipParseDefaultValue {
if field.DefaultValueInterface, err = strconv.ParseInt(field.DefaultValue, 0, 64); err != nil {
schema.err = fmt.Errorf("failed to parse %s as default value for int, got error: %v", field.DefaultValue, err)
}
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
field.DataType = Uint
if field.HasDefaultValue && !skipParseDefaultValue {
if field.DefaultValueInterface, err = strconv.ParseUint(field.DefaultValue, 0, 64); err != nil {
schema.err = fmt.Errorf("failed to parse %s as default value for uint, got error: %v", field.DefaultValue, err)
}
}
case reflect.Float32, reflect.Float64:
field.DataType = Float
if field.HasDefaultValue && !skipParseDefaultValue {
if field.DefaultValueInterface, err = strconv.ParseFloat(field.DefaultValue, 64); err != nil {
schema.err = fmt.Errorf("failed to parse %s as default value for float, got error: %v", field.DefaultValue, err)
}
}
case reflect.String:
field.DataType = String
if field.HasDefaultValue && !skipParseDefaultValue {
field.DefaultValue = strings.Trim(field.DefaultValue, "'")
field.DefaultValue = strings.Trim(field.DefaultValue, `"`)
field.DefaultValueInterface = field.DefaultValue
}
case reflect.Struct:
if _, ok := fieldValue.Interface().(*time.Time); ok {
field.DataType = Time
} else if fieldValue.Type().ConvertibleTo(TimeReflectType) {
field.DataType = Time
} else if fieldValue.Type().ConvertibleTo(TimePtrReflectType) {
field.DataType = Time
}
if field.HasDefaultValue && !skipParseDefaultValue && field.DataType == Time {
if t, err := now.Parse(field.DefaultValue); err == nil {
field.DefaultValueInterface = t
}
}
case reflect.Array, reflect.Slice:
if reflect.Indirect(fieldValue).Type().Elem() == ByteReflectType && field.DataType == "" {
field.DataType = Bytes
}
}
if dataTyper, ok := fieldValue.Interface().(GormDataTypeInterface); ok {
field.DataType = DataType(dataTyper.GormDataType())
}
if v, ok := field.TagSettings["AUTOCREATETIME"]; (ok && utils.CheckTruth(v)) || (!ok && field.Name == "CreatedAt" && (field.DataType == Time || field.DataType == Int || field.DataType == Uint)) {
if field.DataType == Time {
field.AutoCreateTime = UnixTime
} else if strings.ToUpper(v) == "NANO" {
field.AutoCreateTime = UnixNanosecond
} else if strings.ToUpper(v) == "MILLI" {
field.AutoCreateTime = UnixMillisecond
} else {
field.AutoCreateTime = UnixSecond
}
}
if v, ok := field.TagSettings["AUTOUPDATETIME"]; (ok && utils.CheckTruth(v)) || (!ok && field.Name == "UpdatedAt" && (field.DataType == Time || field.DataType == Int || field.DataType == Uint)) {
if field.DataType == Time {
field.AutoUpdateTime = UnixTime
} else if strings.ToUpper(v) == "NANO" {
field.AutoUpdateTime = UnixNanosecond
} else if strings.ToUpper(v) == "MILLI" {
field.AutoUpdateTime = UnixMillisecond
} else {
field.AutoUpdateTime = UnixSecond
}
}
if field.GORMDataType == "" {
field.GORMDataType = field.DataType
}
if val, ok := field.TagSettings["TYPE"]; ok {
lowerVal := DataType(strings.ToLower(val))
switch lowerVal {
case Bool, Int, Uint, Float, String, Time, Bytes:
field.DataType = lowerVal
default:
field.DataType = DataType(val)
}
}
if field.Size == 0 {
switch reflect.Indirect(fieldValue).Kind() {
case reflect.Int, reflect.Int64, reflect.Uint, reflect.Uint64, reflect.Float64:
field.Size = 64
case reflect.Int8, reflect.Uint8:
field.Size = 8
case reflect.Int16, reflect.Uint16:
field.Size = 16
case reflect.Int32, reflect.Uint32, reflect.Float32:
field.Size = 32
}
}
// setup permission
if val, ok := field.TagSettings["-"]; ok {
val = strings.ToLower(strings.TrimSpace(val))
switch val {
case "-":
field.Creatable = false
field.Updatable = false
field.Readable = false
field.DataType = ""
case "all":
field.Creatable = false
field.Updatable = false
field.Readable = false
field.DataType = ""
field.IgnoreMigration = true
case "migration":
field.IgnoreMigration = true
}
}
if v, ok := field.TagSettings["->"]; ok {
field.Creatable = false
field.Updatable = false
if strings.ToLower(v) == "false" {
field.Readable = false
} else {
field.Readable = true
}
}
if v, ok := field.TagSettings["<-"]; ok {
field.Creatable = true
field.Updatable = true
if v != "<-" {
if !strings.Contains(v, "create") {
field.Creatable = false
}
if !strings.Contains(v, "update") {
field.Updatable = false
}
}
}
// Normal anonymous field or having `EMBEDDED` tag
if _, ok := field.TagSettings["EMBEDDED"]; ok || (field.GORMDataType != Time && field.GORMDataType != Bytes && !isValuer &&
fieldStruct.Anonymous && (field.Creatable || field.Updatable || field.Readable)) {
kind := reflect.Indirect(fieldValue).Kind()
switch kind {
case reflect.Struct:
var err error
field.Creatable = false
field.Updatable = false
field.Readable = false
cacheStore := &sync.Map{}
cacheStore.Store(embeddedCacheKey, true)
if field.EmbeddedSchema, err = getOrParse(fieldValue.Interface(), cacheStore, embeddedNamer{Table: schema.Table, Namer: schema.namer}); err != nil {
schema.err = err
}
for _, ef := range field.EmbeddedSchema.Fields {
ef.Schema = schema
ef.OwnerSchema = field.EmbeddedSchema
ef.BindNames = append([]string{fieldStruct.Name}, ef.BindNames...)
if _, ok := field.TagSettings["EMBEDDED"]; ok || !fieldStruct.Anonymous {
ef.EmbeddedBindNames = append([]string{fieldStruct.Name}, ef.EmbeddedBindNames...)
}
// index is negative means is pointer
if field.FieldType.Kind() == reflect.Struct {
ef.StructField.Index = append([]int{fieldStruct.Index[0]}, ef.StructField.Index...)
} else {
ef.StructField.Index = append([]int{-fieldStruct.Index[0] - 1}, ef.StructField.Index...)
}
if prefix, ok := field.TagSettings["EMBEDDEDPREFIX"]; ok && ef.DBName != "" {
ef.DBName = prefix + ef.DBName
}
if ef.PrimaryKey {
if !utils.CheckTruth(ef.TagSettings["PRIMARYKEY"], ef.TagSettings["PRIMARY_KEY"]) {
ef.PrimaryKey = false
if val, ok := ef.TagSettings["AUTOINCREMENT"]; !ok || !utils.CheckTruth(val) {
ef.AutoIncrement = false
}
if !ef.AutoIncrement && ef.DefaultValue == "" {
ef.HasDefaultValue = false
}
}
}
for k, v := range field.TagSettings {
ef.TagSettings[k] = v
}
}
case reflect.Invalid, reflect.Uintptr, reflect.Array, reflect.Chan, reflect.Func, reflect.Interface,
reflect.Map, reflect.Ptr, reflect.Slice, reflect.UnsafePointer, reflect.Complex64, reflect.Complex128:
schema.err = fmt.Errorf("invalid embedded struct for %s's field %s, should be struct, but got %v", field.Schema.Name, field.Name, field.FieldType)
}
}
return field
}
// create valuer, setter when parse struct
func (field *Field) setupValuerAndSetter(modelType reflect.Type) {
// Setup NewValuePool
field.setupNewValuePool()
// ValueOf returns field's value and if it is zero
fieldIndex := field.StructField.Index[0]
switch {
case len(field.StructField.Index) == 1 && fieldIndex >= 0:
field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) {
v = reflect.Indirect(v)
fieldValue := v.Field(fieldIndex)
return fieldValue.Interface(), fieldValue.IsZero()
}
default:
field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) {
v = reflect.Indirect(v)
for _, fieldIdx := range field.StructField.Index {
if fieldIdx >= 0 {
v = v.Field(fieldIdx)
} else {
v = v.Field(-fieldIdx - 1)
if !v.IsNil() {
v = v.Elem()
} else {
return nil, true
}
}
}
fv, zero := v.Interface(), v.IsZero()
return fv, zero
}
}
if field.Serializer != nil {
oldValuerOf := field.ValueOf
field.ValueOf = func(ctx context.Context, v reflect.Value) (interface{}, bool) {
value, zero := oldValuerOf(ctx, v)
s, ok := value.(SerializerValuerInterface)
if !ok {
s = field.Serializer
}
return &serializer{
Field: field,
SerializeValuer: s,
Destination: v,
Context: ctx,
fieldValue: value,
}, zero
}
}
// ReflectValueOf returns field's reflect value
switch {
case len(field.StructField.Index) == 1 && fieldIndex >= 0:
field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value {
v = reflect.Indirect(v)
return v.Field(fieldIndex)
}
default:
field.ReflectValueOf = func(ctx context.Context, v reflect.Value) reflect.Value {
v = reflect.Indirect(v)
for idx, fieldIdx := range field.StructField.Index {
if fieldIdx >= 0 {
v = v.Field(fieldIdx)
} else {
v = v.Field(-fieldIdx - 1)
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
if idx < len(field.StructField.Index)-1 {
v = v.Elem()
}
}
}
return v
}
}
fallbackSetter := func(ctx context.Context, value reflect.Value, v interface{}, setter func(context.Context, reflect.Value, interface{}) error) (err error) {
if v == nil {
field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem())
} else {
reflectV := reflect.ValueOf(v)
// Optimal value type acquisition for v
reflectValType := reflectV.Type()
if reflectValType.AssignableTo(field.FieldType) {
if reflectV.Kind() == reflect.Ptr && reflectV.Elem().Kind() == reflect.Ptr {
reflectV = reflect.Indirect(reflectV)
}
field.ReflectValueOf(ctx, value).Set(reflectV)
return
} else if reflectValType.ConvertibleTo(field.FieldType) {
field.ReflectValueOf(ctx, value).Set(reflectV.Convert(field.FieldType))
return
} else if field.FieldType.Kind() == reflect.Ptr {
fieldValue := field.ReflectValueOf(ctx, value)
fieldType := field.FieldType.Elem()
if reflectValType.AssignableTo(fieldType) {
if !fieldValue.IsValid() {
fieldValue = reflect.New(fieldType)
} else if fieldValue.IsNil() {
fieldValue.Set(reflect.New(fieldType))
}
fieldValue.Elem().Set(reflectV)
return
} else if reflectValType.ConvertibleTo(fieldType) {
if fieldValue.IsNil() {
fieldValue.Set(reflect.New(fieldType))
}
fieldValue.Elem().Set(reflectV.Convert(fieldType))
return
}
}
if reflectV.Kind() == reflect.Ptr {
if reflectV.IsNil() {
field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem())
} else if reflectV.Type().Elem().AssignableTo(field.FieldType) {
field.ReflectValueOf(ctx, value).Set(reflectV.Elem())
return
} else {
err = setter(ctx, value, reflectV.Elem().Interface())
}
} else if valuer, ok := v.(driver.Valuer); ok {
if v, err = valuer.Value(); err == nil {
err = setter(ctx, value, v)
}
} else if _, ok := v.(clause.Expr); !ok {
return fmt.Errorf("failed to set value %#v to field %s", v, field.Name)
}
}
return
}
// Set
switch field.FieldType.Kind() {
case reflect.Bool:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error {
switch data := v.(type) {
case **bool:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetBool(**data)
}
case bool:
field.ReflectValueOf(ctx, value).SetBool(data)
case int64:
field.ReflectValueOf(ctx, value).SetBool(data > 0)
case string:
b, _ := strconv.ParseBool(data)
field.ReflectValueOf(ctx, value).SetBool(b)
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return nil
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
switch data := v.(type) {
case **int64:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetInt(**data)
}
case **int:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetInt(int64(**data))
}
case **int8:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetInt(int64(**data))
}
case **int16:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetInt(int64(**data))
}
case **int32:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetInt(int64(**data))
}
case int64:
field.ReflectValueOf(ctx, value).SetInt(data)
case int:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case int8:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case int16:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case int32:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case uint:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case uint8:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case uint16:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case uint32:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case uint64:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case float32:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case float64:
field.ReflectValueOf(ctx, value).SetInt(int64(data))
case []byte:
return field.Set(ctx, value, string(data))
case string:
if i, err := strconv.ParseInt(data, 0, 64); err == nil {
field.ReflectValueOf(ctx, value).SetInt(i)
} else {
return err
}
case time.Time:
if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond {
field.ReflectValueOf(ctx, value).SetInt(data.UnixNano())
} else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond {
field.ReflectValueOf(ctx, value).SetInt(data.UnixMilli())
} else {
field.ReflectValueOf(ctx, value).SetInt(data.Unix())
}
case *time.Time:
if data != nil {
if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond {
field.ReflectValueOf(ctx, value).SetInt(data.UnixNano())
} else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond {
field.ReflectValueOf(ctx, value).SetInt(data.UnixMilli())
} else {
field.ReflectValueOf(ctx, value).SetInt(data.Unix())
}
} else {
field.ReflectValueOf(ctx, value).SetInt(0)
}
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return err
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
switch data := v.(type) {
case **uint64:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetUint(**data)
}
case **uint:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetUint(uint64(**data))
}
case **uint8:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetUint(uint64(**data))
}
case **uint16:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetUint(uint64(**data))
}
case **uint32:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetUint(uint64(**data))
}
case uint64:
field.ReflectValueOf(ctx, value).SetUint(data)
case uint:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case uint8:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case uint16:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case uint32:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case int64:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case int:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case int8:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case int16:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case int32:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case float32:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case float64:
field.ReflectValueOf(ctx, value).SetUint(uint64(data))
case []byte:
return field.Set(ctx, value, string(data))
case time.Time:
if field.AutoCreateTime == UnixNanosecond || field.AutoUpdateTime == UnixNanosecond {
field.ReflectValueOf(ctx, value).SetUint(uint64(data.UnixNano()))
} else if field.AutoCreateTime == UnixMillisecond || field.AutoUpdateTime == UnixMillisecond {
field.ReflectValueOf(ctx, value).SetUint(uint64(data.UnixMilli()))
} else {
field.ReflectValueOf(ctx, value).SetUint(uint64(data.Unix()))
}
case string:
if i, err := strconv.ParseUint(data, 0, 64); err == nil {
field.ReflectValueOf(ctx, value).SetUint(i)
} else {
return err
}
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return err
}
case reflect.Float32, reflect.Float64:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
switch data := v.(type) {
case **float64:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetFloat(**data)
}
case **float32:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetFloat(float64(**data))
}
case float64:
field.ReflectValueOf(ctx, value).SetFloat(data)
case float32:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case int64:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case int:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case int8:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case int16:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case int32:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case uint:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case uint8:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case uint16:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case uint32:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case uint64:
field.ReflectValueOf(ctx, value).SetFloat(float64(data))
case []byte:
return field.Set(ctx, value, string(data))
case string:
if i, err := strconv.ParseFloat(data, 64); err == nil {
field.ReflectValueOf(ctx, value).SetFloat(i)
} else {
return err
}
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return err
}
case reflect.String:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
switch data := v.(type) {
case **string:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).SetString(**data)
}
case string:
field.ReflectValueOf(ctx, value).SetString(data)
case []byte:
field.ReflectValueOf(ctx, value).SetString(string(data))
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
field.ReflectValueOf(ctx, value).SetString(utils.ToString(data))
case float64, float32:
field.ReflectValueOf(ctx, value).SetString(fmt.Sprintf("%."+strconv.Itoa(field.Precision)+"f", data))
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return err
}
default:
fieldValue := reflect.New(field.FieldType)
switch fieldValue.Elem().Interface().(type) {
case time.Time:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error {
switch data := v.(type) {
case **time.Time:
if data != nil && *data != nil {
field.Set(ctx, value, *data)
}
case time.Time:
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(v))
case *time.Time:
if data != nil {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(data).Elem())
} else {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(time.Time{}))
}
case string:
if t, err := now.Parse(data); err == nil {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(t))
} else {
return fmt.Errorf("failed to set string %v to time.Time field %s, failed to parse it as time, got error %v", v, field.Name, err)
}
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return nil
}
case *time.Time:
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) error {
switch data := v.(type) {
case **time.Time:
if data != nil && *data != nil {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(*data))
}
case time.Time:
fieldValue := field.ReflectValueOf(ctx, value)
if fieldValue.IsNil() {
fieldValue.Set(reflect.New(field.FieldType.Elem()))
}
fieldValue.Elem().Set(reflect.ValueOf(v))
case *time.Time:
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(v))
case string:
if t, err := now.Parse(data); err == nil {
fieldValue := field.ReflectValueOf(ctx, value)
if fieldValue.IsNil() {
if v == "" {
return nil
}
fieldValue.Set(reflect.New(field.FieldType.Elem()))
}
fieldValue.Elem().Set(reflect.ValueOf(t))
} else {
return fmt.Errorf("failed to set string %v to time.Time field %s, failed to parse it as time, got error %v", v, field.Name, err)
}
default:
return fallbackSetter(ctx, value, v, field.Set)
}
return nil
}
default:
if _, ok := fieldValue.Elem().Interface().(sql.Scanner); ok {
// pointer scanner
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
reflectV := reflect.ValueOf(v)
if !reflectV.IsValid() {
field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem())
} else if reflectV.Kind() == reflect.Ptr && reflectV.IsNil() {
return
} else if reflectV.Type().AssignableTo(field.FieldType) {
field.ReflectValueOf(ctx, value).Set(reflectV)
} else if reflectV.Kind() == reflect.Ptr {
return field.Set(ctx, value, reflectV.Elem().Interface())
} else {
fieldValue := field.ReflectValueOf(ctx, value)
if fieldValue.IsNil() {
fieldValue.Set(reflect.New(field.FieldType.Elem()))
}
if valuer, ok := v.(driver.Valuer); ok {
v, _ = valuer.Value()
}
err = fieldValue.Interface().(sql.Scanner).Scan(v)
}
return
}
} else if _, ok := fieldValue.Interface().(sql.Scanner); ok {
// struct scanner
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
reflectV := reflect.ValueOf(v)
if !reflectV.IsValid() {
field.ReflectValueOf(ctx, value).Set(reflect.New(field.FieldType).Elem())
} else if reflectV.Kind() == reflect.Ptr && reflectV.IsNil() {
return
} else if reflectV.Type().AssignableTo(field.FieldType) {
field.ReflectValueOf(ctx, value).Set(reflectV)
} else if reflectV.Kind() == reflect.Ptr {
return field.Set(ctx, value, reflectV.Elem().Interface())
} else {
if valuer, ok := v.(driver.Valuer); ok {
v, _ = valuer.Value()
}
err = field.ReflectValueOf(ctx, value).Addr().Interface().(sql.Scanner).Scan(v)
}
return
}
} else {
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
return fallbackSetter(ctx, value, v, field.Set)
}
}
}
}
if field.Serializer != nil {
var (
oldFieldSetter = field.Set
sameElemType bool
sameType = field.FieldType == reflect.ValueOf(field.Serializer).Type()
)
if reflect.ValueOf(field.Serializer).Kind() == reflect.Ptr {
sameElemType = field.FieldType == reflect.ValueOf(field.Serializer).Type().Elem()
}
serializerValue := reflect.Indirect(reflect.ValueOf(field.Serializer))
serializerType := serializerValue.Type()
field.Set = func(ctx context.Context, value reflect.Value, v interface{}) (err error) {
if s, ok := v.(*serializer); ok {
if s.fieldValue == nil && s.Serializer == nil {
rv := field.ReflectValueOf(ctx, value)
if rv.IsValid() && rv.CanSet() {
rv.Set(reflect.Zero(field.FieldType))
}
return nil
}
if s.fieldValue != nil {
err = oldFieldSetter(ctx, value, s.fieldValue)
} else if err = s.Serializer.Scan(ctx, field, value, s.value); err == nil {
if sameElemType {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(s.Serializer).Elem())
} else if sameType {
field.ReflectValueOf(ctx, value).Set(reflect.ValueOf(s.Serializer))
}
si := reflect.New(serializerType)
si.Elem().Set(serializerValue)
s.Serializer = si.Interface().(SerializerInterface)
}
} else {
err = oldFieldSetter(ctx, value, v)
}
return
}
}
}
func (field *Field) setupNewValuePool() {
if field.Serializer != nil {
serializerValue := reflect.Indirect(reflect.ValueOf(field.Serializer))
serializerType := serializerValue.Type()
field.NewValuePool = &sync.Pool{
New: func() interface{} {
si := reflect.New(serializerType)
si.Elem().Set(serializerValue)
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | true |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/pool.go | schema/pool.go | package schema
import (
"reflect"
"sync"
)
// sync pools
var (
normalPool sync.Map
poolInitializer = func(reflectType reflect.Type) FieldNewValuePool {
v, _ := normalPool.LoadOrStore(reflectType, &sync.Pool{
New: func() interface{} {
return reflect.New(reflectType).Interface()
},
})
return v.(FieldNewValuePool)
}
)
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/model_test.go | schema/model_test.go | package schema_test
import (
"database/sql"
"time"
"gorm.io/gorm"
"gorm.io/gorm/utils/tests"
)
type User struct {
*gorm.Model
Name *string
Age *uint
Birthday *time.Time
Account *tests.Account
Pets []*tests.Pet
Toys []*tests.Toy `gorm:"polymorphic:Owner"`
CompanyID *int
Company *tests.Company
ManagerID *uint
Manager *User
Team []*User `gorm:"foreignkey:ManagerID"`
Languages []*tests.Language `gorm:"many2many:UserSpeak"`
Friends []*User `gorm:"many2many:user_friends"`
Active *bool
}
type (
mytime time.Time
myint int
mybool = bool
)
type AdvancedDataTypeUser struct {
ID sql.NullInt64
Name *sql.NullString
Birthday sql.NullTime
RegisteredAt mytime
DeletedAt *mytime
Active mybool
Admin *mybool
}
type BaseModel struct {
ID uint
CreatedAt time.Time
CreatedBy *int
Created *VersionUser `gorm:"foreignKey:CreatedBy"`
UpdatedAt time.Time
DeletedAt gorm.DeletedAt `gorm:"index"`
}
type VersionModel struct {
BaseModel
Version int
}
type VersionUser struct {
VersionModel
Name string
Age uint
Birthday *time.Time
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/relationship_test.go | schema/relationship_test.go | package schema_test
import (
"sync"
"testing"
"time"
"gorm.io/gorm"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
func checkStructRelation(t *testing.T, data interface{}, relations ...Relation) {
if s, err := schema.Parse(data, &sync.Map{}, schema.NamingStrategy{}); err != nil {
t.Errorf("Failed to parse schema, got error %v", err)
} else {
for _, rel := range relations {
checkSchemaRelation(t, s, rel)
}
}
}
func TestBelongsToOverrideForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
}
type User struct {
gorm.Model
Profile Profile `gorm:"ForeignKey:ProfileRefer"`
ProfileRefer int
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ID", "Profile", "ProfileRefer", "User", "", false}},
})
}
func TestBelongsToOverrideReferences(t *testing.T) {
type Profile struct {
gorm.Model
Refer string
Name string
}
type User struct {
gorm.Model
Profile Profile `gorm:"ForeignKey:ProfileID;References:Refer"`
ProfileID int
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "Profile", "ProfileID", "User", "", false}},
})
}
func TestBelongsToWithOnlyReferences(t *testing.T) {
type Profile struct {
gorm.Model
Refer string
Name string
}
type User struct {
gorm.Model
Profile Profile `gorm:"References:Refer"`
ProfileRefer int
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "Profile", "ProfileRefer", "User", "", false}},
})
}
func TestBelongsToWithOnlyReferences2(t *testing.T) {
type Profile struct {
gorm.Model
Refer string
Name string
}
type User struct {
gorm.Model
Profile Profile `gorm:"References:Refer"`
ProfileID int
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "Profile", "ProfileID", "User", "", false}},
})
}
func TestSelfReferentialBelongsTo(t *testing.T) {
type User struct {
ID int32 `gorm:"primaryKey"`
Name string
CreatorID *int32
Creator *User
}
checkStructRelation(t, &User{}, Relation{
Name: "Creator", Type: schema.BelongsTo, Schema: "User", FieldSchema: "User",
References: []Reference{{"ID", "User", "CreatorID", "User", "", false}},
})
}
func TestSelfReferentialBelongsToOverrideReferences(t *testing.T) {
type User struct {
ID int32 `gorm:"primaryKey"`
Name string
CreatedBy *int32
Creator *User `gorm:"foreignKey:CreatedBy;references:ID"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Creator", Type: schema.BelongsTo, Schema: "User", FieldSchema: "User",
References: []Reference{{"ID", "User", "CreatedBy", "User", "", false}},
})
}
func TestBelongsToWithMixin(t *testing.T) {
type Profile struct {
gorm.Model
Refer string
Name string
}
type ProfileMixin struct {
Profile Profile `gorm:"References:Refer"`
ProfileRefer int
}
type User struct {
gorm.Model
ProfileMixin
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "Profile", "ProfileRefer", "User", "", false}},
})
}
func TestHasOneOverrideForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profile Profile `gorm:"ForeignKey:UserRefer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ID", "User", "UserRefer", "Profile", "", true}},
})
}
func TestHasOneOverrideReferences(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserID uint
}
type User struct {
gorm.Model
Refer string
Profile Profile `gorm:"ForeignKey:UserID;References:Refer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "User", "UserID", "Profile", "", true}},
})
}
func TestHasOneOverrideReferences2(t *testing.T) {
type Profile struct {
gorm.Model
Name string
}
type User struct {
gorm.Model
ProfileID uint `gorm:"column:profile_id"`
Profile *Profile `gorm:"foreignKey:ID;references:ProfileID"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ProfileID", "User", "ID", "Profile", "", true}},
})
}
func TestHasOneWithOnlyReferences(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Refer string
Profile Profile `gorm:"References:Refer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "User", "UserRefer", "Profile", "", true}},
})
}
func TestHasOneWithOnlyReferences2(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserID uint
}
type User struct {
gorm.Model
Refer string
Profile Profile `gorm:"References:Refer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "User", "UserID", "Profile", "", true}},
})
}
func TestHasManyOverrideForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profile []Profile `gorm:"ForeignKey:UserRefer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasMany, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ID", "User", "UserRefer", "Profile", "", true}},
})
}
func TestHasManyOverrideReferences(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserID uint
}
type User struct {
gorm.Model
Refer string
Profile []Profile `gorm:"ForeignKey:UserID;References:Refer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasMany, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"Refer", "User", "UserID", "Profile", "", true}},
})
}
func TestMany2ManyOverrideForeignKeyAndReferences(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profiles []Profile `gorm:"many2many:user_profiles;ForeignKey:Refer;JoinForeignKey:UserReferID;References:UserRefer;JoinReferences:ProfileRefer"`
Profiles2 []Profile `gorm:"many2many:user_profiles2;ForeignKey:refer;JoinForeignKey:user_refer_id;References:user_refer;JoinReferences:profile_refer"`
Refer uint
}
checkStructRelation(t, &User{}, Relation{
Name: "Profiles", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profiles", Table: "user_profiles"},
References: []Reference{
{"Refer", "User", "UserReferID", "user_profiles", "", true},
{"UserRefer", "Profile", "ProfileRefer", "user_profiles", "", false},
},
}, Relation{
Name: "Profiles2", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profiles2", Table: "user_profiles2"},
References: []Reference{
{"Refer", "User", "User_refer_id", "user_profiles2", "", true},
{"UserRefer", "Profile", "Profile_refer", "user_profiles2", "", false},
},
})
}
func TestMany2ManyOverrideForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profiles []Profile `gorm:"many2many:user_profiles;ForeignKey:Refer;References:UserRefer"`
Refer uint
}
checkStructRelation(t, &User{}, Relation{
Name: "Profiles", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profiles", Table: "user_profiles"},
References: []Reference{
{"Refer", "User", "UserRefer", "user_profiles", "", true},
{"UserRefer", "Profile", "ProfileUserRefer", "user_profiles", "", false},
},
})
}
func TestMany2ManySharedForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
Kind string
ProfileRefer uint
}
type User struct {
gorm.Model
Profiles []Profile `gorm:"many2many:user_profiles;foreignKey:Refer,Kind;joinForeignKey:UserRefer,Kind;References:ProfileRefer,Kind;joinReferences:ProfileR,Kind"`
Kind string
Refer uint
}
checkStructRelation(t, &User{}, Relation{
Name: "Profiles", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profiles", Table: "user_profiles"},
References: []Reference{
{"Refer", "User", "UserRefer", "user_profiles", "", true},
{"Kind", "User", "Kind", "user_profiles", "", true},
{"ProfileRefer", "Profile", "ProfileR", "user_profiles", "", false},
{"Kind", "Profile", "Kind", "user_profiles", "", false},
},
})
}
func TestMany2ManyOverrideJoinForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profiles []Profile `gorm:"many2many:user_profile;JoinForeignKey:UserReferID;JoinReferences:ProfileRefer"`
Refer uint
}
checkStructRelation(t, &User{}, Relation{
Name: "Profiles", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profile", Table: "user_profile"},
References: []Reference{
{"ID", "User", "UserReferID", "user_profile", "", true},
{"ID", "Profile", "ProfileRefer", "user_profile", "", false},
},
})
}
func TestBuildReadonlyMany2ManyRelation(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
Profiles []Profile `gorm:"->;many2many:user_profile;JoinForeignKey:UserReferID;JoinReferences:ProfileRefer"`
Refer uint
}
checkStructRelation(t, &User{}, Relation{
Name: "Profiles", Type: schema.Many2Many, Schema: "User", FieldSchema: "Profile",
JoinTable: JoinTable{Name: "user_profile", Table: "user_profile"},
References: []Reference{
{"ID", "User", "UserReferID", "user_profile", "", true},
{"ID", "Profile", "ProfileRefer", "user_profile", "", false},
},
})
}
func TestMany2ManyWithMultiPrimaryKeys(t *testing.T) {
type Tag struct {
ID uint `gorm:"primary_key"`
Locale string `gorm:"primary_key"`
Value string
}
type Blog struct {
ID uint `gorm:"primary_key"`
Locale string `gorm:"primary_key"`
Subject string
Body string
Tags []Tag `gorm:"many2many:blog_tags;"`
SharedTags []Tag `gorm:"many2many:shared_blog_tags;ForeignKey:id;References:id"`
LocaleTags []Tag `gorm:"many2many:locale_blog_tags;ForeignKey:id,locale;References:id"`
}
checkStructRelation(t, &Blog{},
Relation{
Name: "Tags", Type: schema.Many2Many, Schema: "Blog", FieldSchema: "Tag",
JoinTable: JoinTable{Name: "blog_tags", Table: "blog_tags"},
References: []Reference{
{"ID", "Blog", "BlogID", "blog_tags", "", true},
{"Locale", "Blog", "BlogLocale", "blog_tags", "", true},
{"ID", "Tag", "TagID", "blog_tags", "", false},
{"Locale", "Tag", "TagLocale", "blog_tags", "", false},
},
},
Relation{
Name: "SharedTags", Type: schema.Many2Many, Schema: "Blog", FieldSchema: "Tag",
JoinTable: JoinTable{Name: "shared_blog_tags", Table: "shared_blog_tags"},
References: []Reference{
{"ID", "Blog", "BlogID", "shared_blog_tags", "", true},
{"ID", "Tag", "TagID", "shared_blog_tags", "", false},
},
},
Relation{
Name: "LocaleTags", Type: schema.Many2Many, Schema: "Blog", FieldSchema: "Tag",
JoinTable: JoinTable{Name: "locale_blog_tags", Table: "locale_blog_tags"},
References: []Reference{
{"ID", "Blog", "BlogID", "locale_blog_tags", "", true},
{"Locale", "Blog", "BlogLocale", "locale_blog_tags", "", true},
{"ID", "Tag", "TagID", "locale_blog_tags", "", false},
},
},
)
}
func TestMultipleMany2Many(t *testing.T) {
type Thing struct {
ID int
}
type Person struct {
ID int
Likes []Thing `gorm:"many2many:likes"`
Dislikes []Thing `gorm:"many2many:dislikes"`
}
checkStructRelation(t, &Person{},
Relation{
Name: "Likes", Type: schema.Many2Many, Schema: "Person", FieldSchema: "Thing",
JoinTable: JoinTable{Name: "likes", Table: "likes"},
References: []Reference{
{"ID", "Person", "PersonID", "likes", "", true},
{"ID", "Thing", "ThingID", "likes", "", false},
},
},
Relation{
Name: "Dislikes", Type: schema.Many2Many, Schema: "Person", FieldSchema: "Thing",
JoinTable: JoinTable{Name: "dislikes", Table: "dislikes"},
References: []Reference{
{"ID", "Person", "PersonID", "dislikes", "", true},
{"ID", "Thing", "ThingID", "dislikes", "", false},
},
},
)
}
func TestSelfReferentialMany2Many(t *testing.T) {
type User struct {
ID int32 `gorm:"primaryKey"`
Name string
CreatedBy int32
Creators []User `gorm:"foreignKey:CreatedBy"`
AnotherPro interface{} `gorm:"-"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Creators", Type: schema.HasMany, Schema: "User", FieldSchema: "User",
References: []Reference{{"ID", "User", "CreatedBy", "User", "", true}},
})
user, err := schema.Parse(&User{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse schema")
}
relSchema := user.Relationships.Relations["Creators"].FieldSchema
if user != relSchema {
t.Fatalf("schema should be same, expects %p but got %p", user, relSchema)
}
}
type CreatedByModel struct {
CreatedByID uint
CreatedBy *CreatedUser
}
type CreatedUser struct {
gorm.Model
CreatedByModel
}
func TestEmbeddedRelation(t *testing.T) {
checkStructRelation(t, &CreatedUser{}, Relation{
Name: "CreatedBy", Type: schema.BelongsTo, Schema: "CreatedUser", FieldSchema: "CreatedUser",
References: []Reference{
{"ID", "CreatedUser", "CreatedByID", "CreatedUser", "", false},
},
})
userSchema, err := schema.Parse(&CreatedUser{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse schema, got error %v", err)
}
if len(userSchema.Relationships.Relations) != 1 {
t.Fatalf("expects 1 relations, but got %v", len(userSchema.Relationships.Relations))
}
if createdByRel, ok := userSchema.Relationships.Relations["CreatedBy"]; ok {
if createdByRel.FieldSchema != userSchema {
t.Fatalf("expects same field schema, but got new %p, old %p", createdByRel.FieldSchema, userSchema)
}
} else {
t.Fatalf("expects created by relations, but not found")
}
}
func TestEmbeddedHas(t *testing.T) {
type Toy struct {
ID int
Name string
OwnerID int
OwnerType string
}
type User struct {
ID int
Cat struct {
Name string
Toy Toy `gorm:"polymorphic:Owner;"`
Toys []Toy `gorm:"polymorphic:Owner;"`
} `gorm:"embedded;embeddedPrefix:cat_"`
Dog struct {
ID int
Name string
UserID int
Toy Toy `gorm:"polymorphic:Owner;"`
Toys []Toy `gorm:"polymorphic:Owner;"`
}
Toys []Toy `gorm:"polymorphic:Owner;"`
}
s, err := schema.Parse(&User{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toy": {
Name: "Toy",
Type: schema.HasOne,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "OwnerID", Type: "OwnerType", Value: "users"},
References: []Reference{
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
"Toys": {
Name: "Toys",
Type: schema.HasMany,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "OwnerID", Type: "OwnerType", Value: "users"},
References: []Reference{
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
}
func TestPolymorphic(t *testing.T) {
t.Run("has one", func(t *testing.T) {
type Toy struct {
ID int
Name string
OwnerID int
OwnerType string
}
type Cat struct {
ID int
Name string
Toy Toy `gorm:"polymorphic:Owner;"`
}
s, err := schema.Parse(&Cat{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toy": {
Name: "Toy",
Type: schema.HasOne,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "OwnerID", Type: "OwnerType", Value: "users"},
References: []Reference{
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
})
t.Run("has one with custom polymorphic type and id", func(t *testing.T) {
type Toy struct {
ID int
Name string
RefId int
Type string
}
type Cat struct {
ID int
Name string
Toy Toy `gorm:"polymorphic:Owner;polymorphicType:Type;polymorphicId:RefId"`
}
s, err := schema.Parse(&Cat{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toy": {
Name: "Toy",
Type: schema.HasOne,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "ref_id", Type: "Type", Value: "users"},
References: []Reference{
{ForeignKey: "Type", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
})
t.Run("has one with only polymorphic type", func(t *testing.T) {
type Toy struct {
ID int
Name string
OwnerID int
Type string
}
type Cat struct {
ID int
Name string
Toy Toy `gorm:"polymorphic:Owner;polymorphicType:Type"`
}
s, err := schema.Parse(&Cat{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toy": {
Name: "Toy",
Type: schema.HasOne,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "owner_id", Type: "Type", Value: "users"},
References: []Reference{
{ForeignKey: "Type", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
})
t.Run("has many", func(t *testing.T) {
type Toy struct {
ID int
Name string
OwnerID int
OwnerType string
}
type Cat struct {
ID int
Name string
Toys []Toy `gorm:"polymorphic:Owner;"`
}
s, err := schema.Parse(&Cat{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toys": {
Name: "Toys",
Type: schema.HasMany,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "OwnerID", Type: "OwnerType", Value: "users"},
References: []Reference{
{ForeignKey: "OwnerType", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
})
t.Run("has many with custom polymorphic type and id", func(t *testing.T) {
type Toy struct {
ID int
Name string
RefId int
Type string
}
type Cat struct {
ID int
Name string
Toys []Toy `gorm:"polymorphicType:Type;polymorphicId:RefId"`
}
s, err := schema.Parse(&Cat{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"Cat": {
Relations: map[string]Relation{
"Toys": {
Name: "Toys",
Type: schema.HasMany,
Schema: "User",
FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "ref_id", Type: "Type", Value: "users"},
References: []Reference{
{ForeignKey: "Type", ForeignSchema: "Toy", PrimaryValue: "users"},
},
},
},
},
})
})
}
func TestEmbeddedBelongsTo(t *testing.T) {
type Country struct {
ID int `gorm:"primaryKey"`
Name string
}
type Address struct {
CountryID int
Country Country
}
type NestedAddress struct {
Address
}
type CountryMixin struct {
CountryID int
Country Country
}
type Org struct {
ID int
PostalAddress Address `gorm:"embedded;embeddedPrefix:postal_address_"`
VisitingAddress Address `gorm:"embedded;embeddedPrefix:visiting_address_"`
AddressID int
Address struct {
ID int
Address
}
NestedAddress *NestedAddress `gorm:"embedded;embeddedPrefix:nested_address_"`
CountryMixin
}
s, err := schema.Parse(&Org{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Errorf("Failed to parse schema, got error %v", err)
}
checkEmbeddedRelations(t, s.Relationships.EmbeddedRelations, map[string]EmbeddedRelations{
"PostalAddress": {
Relations: map[string]Relation{
"Country": {
Name: "Country", Type: schema.BelongsTo, Schema: "Org", FieldSchema: "Country",
References: []Reference{
{PrimaryKey: "ID", PrimarySchema: "Country", ForeignKey: "CountryID", ForeignSchema: "Org"},
},
},
},
},
"VisitingAddress": {
Relations: map[string]Relation{
"Country": {
Name: "Country", Type: schema.BelongsTo, Schema: "Org", FieldSchema: "Country",
References: []Reference{
{PrimaryKey: "ID", PrimarySchema: "Country", ForeignKey: "CountryID", ForeignSchema: "Org"},
},
},
},
},
"NestedAddress": {
Relations: map[string]Relation{
"Country": {
Name: "Country", Type: schema.BelongsTo, Schema: "Org", FieldSchema: "Country",
References: []Reference{
{PrimaryKey: "ID", PrimarySchema: "Country", ForeignKey: "CountryID", ForeignSchema: "Org"},
},
},
},
},
})
}
func TestVariableRelation(t *testing.T) {
var result struct {
User
}
checkStructRelation(t, &result, Relation{
Name: "Account", Type: schema.HasOne, Schema: "", FieldSchema: "Account",
References: []Reference{
{"ID", "", "UserID", "Account", "", true},
},
})
checkStructRelation(t, &result, Relation{
Name: "Company", Type: schema.BelongsTo, Schema: "", FieldSchema: "Company",
References: []Reference{
{"ID", "Company", "CompanyID", "", "", false},
},
})
}
func TestSameForeignKey(t *testing.T) {
type UserAux struct {
gorm.Model
Aux string
UUID string
}
type User struct {
gorm.Model
Name string
UUID string
Aux *UserAux `gorm:"foreignkey:UUID;references:UUID"`
}
checkStructRelation(t, &User{},
Relation{
Name: "Aux", Type: schema.HasOne, Schema: "User", FieldSchema: "UserAux",
References: []Reference{
{"UUID", "User", "UUID", "UserAux", "", true},
},
},
)
}
func TestBelongsToSameForeignKey(t *testing.T) {
type User struct {
gorm.Model
Name string
UUID string
}
type UserAux struct {
gorm.Model
Aux string
UUID string
User User `gorm:"ForeignKey:UUID;references:UUID;belongsTo"`
}
checkStructRelation(t, &UserAux{},
Relation{
Name: "User", Type: schema.BelongsTo, Schema: "UserAux", FieldSchema: "User",
References: []Reference{
{"UUID", "User", "UUID", "UserAux", "", false},
},
},
)
}
func TestHasOneWithSameForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
ProfileRefer int // not used in relationship
}
type User struct {
gorm.Model
Profile Profile `gorm:"ForeignKey:ID;references:ProfileRefer"`
ProfileRefer int
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasOne, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ProfileRefer", "User", "ID", "Profile", "", true}},
})
}
func TestHasManySameForeignKey(t *testing.T) {
type Profile struct {
gorm.Model
Name string
UserRefer uint
}
type User struct {
gorm.Model
UserRefer uint
Profile []Profile `gorm:"ForeignKey:UserRefer"`
}
checkStructRelation(t, &User{}, Relation{
Name: "Profile", Type: schema.HasMany, Schema: "User", FieldSchema: "Profile",
References: []Reference{{"ID", "User", "UserRefer", "Profile", "", true}},
})
}
type Author struct {
gorm.Model
}
type Book struct {
gorm.Model
Author Author
AuthorID uint
}
func (Book) TableName() string {
return "my_schema.a_very_very_very_very_very_very_very_very_long_table_name"
}
func TestParseConstraintNameWithSchemaQualifiedLongTableName(t *testing.T) {
s, err := schema.Parse(
&Book{},
&sync.Map{},
schema.NamingStrategy{IdentifierMaxLength: 64},
)
if err != nil {
t.Fatalf("Failed to parse schema")
}
expectedConstraintName := "fk_my_schema_a_very_very_very_very_very_very_very_very_l4db13eec"
constraint := s.Relationships.Relations["Author"].ParseConstraint()
if constraint.Name != expectedConstraintName {
t.Fatalf(
"expected constraint name %s, got %s",
expectedConstraintName,
constraint.Name,
)
}
}
type InfoRelation struct {
ID int
Code string
Info1 []*Info1 `gorm:"foreignkey:Code;references:Code"`
Info2 []*Info2 `gorm:"foreignkey:Code;references:Code"`
}
type Info1 struct {
CreatedAt time.Time
UpdatedAt time.Time
Code string
Relation []*InfoRelation `gorm:"foreignkey:Code;references:Code"`
}
type Info2 struct {
CreatedAt time.Time
UpdatedAt time.Time
Code string
Relation []*InfoRelation `gorm:"foreignkey:Code;references:Code"`
}
func TestDataRace(t *testing.T) {
syncMap := &sync.Map{}
for i := 0; i < 10; i++ {
go func() {
schema.Parse(&Info1{}, syncMap, schema.NamingStrategy{IdentifierMaxLength: 64})
}()
go func() {
schema.Parse(&Info2{}, syncMap, schema.NamingStrategy{IdentifierMaxLength: 64})
}()
go func() {
var result User
schema.Parse(&result, syncMap, schema.NamingStrategy{IdentifierMaxLength: 64})
}()
go func() {
var result tests.Account
schema.Parse(&result, syncMap, schema.NamingStrategy{IdentifierMaxLength: 64})
}()
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/callbacks_test.go | schema/callbacks_test.go | package schema_test
import (
"reflect"
"sync"
"testing"
"gorm.io/gorm"
"gorm.io/gorm/schema"
)
type UserWithCallback struct{}
func (UserWithCallback) BeforeSave(*gorm.DB) error {
return nil
}
func (UserWithCallback) AfterCreate(*gorm.DB) error {
return nil
}
func TestCallback(t *testing.T) {
user, err := schema.Parse(&UserWithCallback{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user with callback, got error %v", err)
}
for _, str := range []string{"BeforeSave", "AfterCreate"} {
if !reflect.Indirect(reflect.ValueOf(user)).FieldByName(str).Interface().(bool) {
t.Errorf("%v should be true", str)
}
}
for _, str := range []string{"BeforeCreate", "BeforeUpdate", "AfterUpdate", "AfterSave", "BeforeDelete", "AfterDelete", "AfterFind"} {
if reflect.Indirect(reflect.ValueOf(user)).FieldByName(str).Interface().(bool) {
t.Errorf("%v should be false", str)
}
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/constraint.go | schema/constraint.go | package schema
import (
"regexp"
"strings"
"gorm.io/gorm/clause"
)
// reg match english letters and midline
var regEnLetterAndMidline = regexp.MustCompile(`^[\w-]+$`)
type CheckConstraint struct {
Name string
Constraint string // length(phone) >= 10
*Field
}
func (chk *CheckConstraint) GetName() string { return chk.Name }
func (chk *CheckConstraint) Build() (sql string, vars []interface{}) {
return "CONSTRAINT ? CHECK (?)", []interface{}{clause.Column{Name: chk.Name}, clause.Expr{SQL: chk.Constraint}}
}
// ParseCheckConstraints parse schema check constraints
func (schema *Schema) ParseCheckConstraints() map[string]CheckConstraint {
checks := map[string]CheckConstraint{}
for _, field := range schema.FieldsByDBName {
if chk := field.TagSettings["CHECK"]; chk != "" {
names := strings.Split(chk, ",")
if len(names) > 1 && regEnLetterAndMidline.MatchString(names[0]) {
checks[names[0]] = CheckConstraint{Name: names[0], Constraint: strings.Join(names[1:], ","), Field: field}
} else {
if names[0] == "" {
chk = strings.Join(names[1:], ",")
}
name := schema.namer.CheckerName(schema.Table, field.DBName)
checks[name] = CheckConstraint{Name: name, Constraint: chk, Field: field}
}
}
}
return checks
}
type UniqueConstraint struct {
Name string
Field *Field
}
func (uni *UniqueConstraint) GetName() string { return uni.Name }
func (uni *UniqueConstraint) Build() (sql string, vars []interface{}) {
return "CONSTRAINT ? UNIQUE (?)", []interface{}{clause.Column{Name: uni.Name}, clause.Column{Name: uni.Field.DBName}}
}
// ParseUniqueConstraints parse schema unique constraints
func (schema *Schema) ParseUniqueConstraints() map[string]UniqueConstraint {
uniques := make(map[string]UniqueConstraint)
for _, field := range schema.Fields {
if field.Unique {
name := schema.namer.UniqueName(schema.Table, field.DBName)
uniques[name] = UniqueConstraint{Name: name, Field: field}
}
}
return uniques
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/serializer_test.go | schema/serializer_test.go | package schema
import (
"context"
"math"
"reflect"
"testing"
"time"
)
func TestUnixSecondSerializer_Value(t *testing.T) {
var (
intValue = math.MaxInt64
int8Value = int8(math.MaxInt8)
int16Value = int16(math.MaxInt16)
int32Value = int32(math.MaxInt32)
int64Value = int64(math.MaxInt64)
uintValue = uint(math.MaxInt64)
uint8Value = uint8(math.MaxUint8)
uint16Value = uint16(math.MaxUint16)
uint32Value = uint32(math.MaxUint32)
uint64Value = uint64(math.MaxInt64)
maxInt64Plus1 = uint64(math.MaxInt64 + 1)
intPtrValue = &intValue
int8PtrValue = &int8Value
int16PtrValue = &int16Value
int32PtrValue = &int32Value
int64PtrValue = &int64Value
uintPtrValue = &uintValue
uint8PtrValue = &uint8Value
uint16PtrValue = &uint16Value
uint32PtrValue = &uint32Value
uint64PtrValue = &uint64Value
maxInt64Plus1Ptr = &maxInt64Plus1
)
tests := []struct {
name string
value interface{}
want interface{}
wantErr bool
}{
{
name: "int",
value: intValue,
want: time.Unix(int64(intValue), 0).UTC(),
wantErr: false,
},
{
name: "int8",
value: int8Value,
want: time.Unix(int64(int8Value), 0).UTC(),
wantErr: false,
},
{
name: "int16",
value: int16Value,
want: time.Unix(int64(int16Value), 0).UTC(),
wantErr: false,
},
{
name: "int32",
value: int32Value,
want: time.Unix(int64(int32Value), 0).UTC(),
wantErr: false,
},
{
name: "int64",
value: int64Value,
want: time.Unix(int64Value, 0).UTC(),
wantErr: false,
},
{
name: "uint",
value: uintValue,
want: time.Unix(int64(uintValue), 0).UTC(), //nolint:gosec
wantErr: false,
},
{
name: "uint8",
value: uint8Value,
want: time.Unix(int64(uint8Value), 0).UTC(),
wantErr: false,
},
{
name: "uint16",
value: uint16Value,
want: time.Unix(int64(uint16Value), 0).UTC(),
wantErr: false,
},
{
name: "uint32",
value: uint32Value,
want: time.Unix(int64(uint32Value), 0).UTC(),
wantErr: false,
},
{
name: "uint64",
value: uint64Value,
want: time.Unix(int64(uint64Value), 0).UTC(), //nolint:gosec
wantErr: false,
},
{
name: "maxInt64+1",
value: maxInt64Plus1,
want: nil,
wantErr: true,
},
{
name: "*int",
value: intPtrValue,
want: time.Unix(int64(*intPtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*int8",
value: int8PtrValue,
want: time.Unix(int64(*int8PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*int16",
value: int16PtrValue,
want: time.Unix(int64(*int16PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*int32",
value: int32PtrValue,
want: time.Unix(int64(*int32PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*int64",
value: int64PtrValue,
want: time.Unix(*int64PtrValue, 0).UTC(),
wantErr: false,
},
{
name: "*uint",
value: uintPtrValue,
want: time.Unix(int64(*uintPtrValue), 0).UTC(), //nolint:gosec
wantErr: false,
},
{
name: "*uint8",
value: uint8PtrValue,
want: time.Unix(int64(*uint8PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*uint16",
value: uint16PtrValue,
want: time.Unix(int64(*uint16PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*uint32",
value: uint32PtrValue,
want: time.Unix(int64(*uint32PtrValue), 0).UTC(),
wantErr: false,
},
{
name: "*uint64",
value: uint64PtrValue,
want: time.Unix(int64(*uint64PtrValue), 0).UTC(), //nolint:gosec
wantErr: false,
},
{
name: "pointer to maxInt64+1",
value: maxInt64Plus1Ptr,
want: nil,
wantErr: true,
},
{
name: "nil pointer",
value: (*int)(nil),
want: nil,
wantErr: false,
},
{
name: "invalid type",
value: "invalid",
want: nil,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := UnixSecondSerializer{}.Value(context.Background(), nil, reflect.Value{}, tt.value)
if (err != nil) != tt.wantErr {
t.Fatalf("UnixSecondSerializer.Value() error = %v, wantErr %v", err, tt.wantErr)
}
if err != nil {
return
}
if tt.want == nil && got == nil {
return
}
if tt.want == nil {
t.Fatalf("UnixSecondSerializer.Value() = %v, want nil", got)
}
if got == nil {
t.Fatalf("UnixSecondSerializer.Value() = nil, want %v", tt.want)
}
if gotTime, ok := got.(time.Time); !ok {
t.Errorf("UnixSecondSerializer.Value() returned %T, expected time.Time", got)
} else if !tt.want.(time.Time).Equal(gotTime) {
t.Errorf("UnixSecondSerializer.Value() = %v, want %v", got, tt.want)
}
})
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/schema_test.go | schema/schema_test.go | package schema_test
import (
"strings"
"sync"
"testing"
"gorm.io/gorm"
"gorm.io/gorm/schema"
"gorm.io/gorm/utils/tests"
)
func TestParseSchema(t *testing.T) {
user, err := schema.Parse(&tests.User{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user, got error %v", err)
}
checkUserSchema(t, user)
}
func TestParseSchemaWithMap(t *testing.T) {
type User struct {
tests.User
Attrs map[string]string `gorm:"type:Map(String,String);"`
}
user, err := schema.Parse(&User{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse user with map, got error %v", err)
}
if field := user.FieldsByName["Attrs"]; field.DataType != "Map(String,String)" {
t.Errorf("failed to parse user field Attrs")
}
}
func TestParseSchemaWithPointerFields(t *testing.T) {
user, err := schema.Parse(&User{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse pointer user, got error %v", err)
}
checkUserSchema(t, user)
}
func checkUserSchema(t *testing.T, user *schema.Schema) {
// check schema
checkSchema(t, user, &schema.Schema{Name: "User", Table: "users"}, []string{"ID"})
// check fields
fields := []schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"Model", "ID"}, DataType: schema.Uint, PrimaryKey: true, Tag: `gorm:"primarykey"`, TagSettings: map[string]string{"PRIMARYKEY": "PRIMARYKEY"}, Size: 64, HasDefaultValue: true, AutoIncrement: true},
{Name: "CreatedAt", DBName: "created_at", BindNames: []string{"Model", "CreatedAt"}, DataType: schema.Time},
{Name: "UpdatedAt", DBName: "updated_at", BindNames: []string{"Model", "UpdatedAt"}, DataType: schema.Time},
{Name: "DeletedAt", DBName: "deleted_at", BindNames: []string{"Model", "DeletedAt"}, Tag: `gorm:"index"`, DataType: schema.Time},
{Name: "Name", DBName: "name", BindNames: []string{"Name"}, DataType: schema.String},
{Name: "Age", DBName: "age", BindNames: []string{"Age"}, DataType: schema.Uint, Size: 64},
{Name: "Birthday", DBName: "birthday", BindNames: []string{"Birthday"}, DataType: schema.Time},
{Name: "CompanyID", DBName: "company_id", BindNames: []string{"CompanyID"}, DataType: schema.Int, Size: 64},
{Name: "ManagerID", DBName: "manager_id", BindNames: []string{"ManagerID"}, DataType: schema.Uint, Size: 64},
{Name: "Active", DBName: "active", BindNames: []string{"Active"}, DataType: schema.Bool},
}
for i := range fields {
checkSchemaField(t, user, &fields[i], func(f *schema.Field) {
f.Creatable = true
f.Updatable = true
f.Readable = true
})
}
// check relations
relations := []Relation{
{
Name: "Account", Type: schema.HasOne, Schema: "User", FieldSchema: "Account",
References: []Reference{{"ID", "User", "UserID", "Account", "", true}},
},
{
Name: "Pets", Type: schema.HasMany, Schema: "User", FieldSchema: "Pet",
References: []Reference{{"ID", "User", "UserID", "Pet", "", true}},
},
{
Name: "Toys", Type: schema.HasMany, Schema: "User", FieldSchema: "Toy",
Polymorphic: Polymorphic{ID: "OwnerID", Type: "OwnerType", Value: "users"},
References: []Reference{{"ID", "User", "OwnerID", "Toy", "", true}, {"", "", "OwnerType", "Toy", "users", false}},
},
{
Name: "Company", Type: schema.BelongsTo, Schema: "User", FieldSchema: "Company",
References: []Reference{{"ID", "Company", "CompanyID", "User", "", false}},
},
{
Name: "Manager", Type: schema.BelongsTo, Schema: "User", FieldSchema: "User",
References: []Reference{{"ID", "User", "ManagerID", "User", "", false}},
},
{
Name: "Team", Type: schema.HasMany, Schema: "User", FieldSchema: "User",
References: []Reference{{"ID", "User", "ManagerID", "User", "", true}},
},
{
Name: "Languages", Type: schema.Many2Many, Schema: "User", FieldSchema: "Language",
JoinTable: JoinTable{Name: "UserSpeak", Table: "user_speaks", Fields: []schema.Field{
{
Name: "UserID", DBName: "user_id", BindNames: []string{"UserID"}, DataType: schema.Uint,
Tag: `gorm:"primarykey"`, Creatable: true, Updatable: true, Readable: true, PrimaryKey: true, Size: 64,
},
{
Name: "LanguageCode", DBName: "language_code", BindNames: []string{"LanguageCode"}, DataType: schema.String,
Tag: `gorm:"primarykey"`, Creatable: true, Updatable: true, Readable: true, PrimaryKey: true,
},
}},
References: []Reference{{"ID", "User", "UserID", "UserSpeak", "", true}, {"Code", "Language", "LanguageCode", "UserSpeak", "", false}},
},
{
Name: "Friends", Type: schema.Many2Many, Schema: "User", FieldSchema: "User",
JoinTable: JoinTable{Name: "user_friends", Table: "user_friends", Fields: []schema.Field{
{
Name: "UserID", DBName: "user_id", BindNames: []string{"UserID"}, DataType: schema.Uint,
Tag: `gorm:"primarykey"`, Creatable: true, Updatable: true, Readable: true, PrimaryKey: true, Size: 64,
},
{
Name: "FriendID", DBName: "friend_id", BindNames: []string{"FriendID"}, DataType: schema.Uint,
Tag: `gorm:"primarykey"`, Creatable: true, Updatable: true, Readable: true, PrimaryKey: true, Size: 64,
},
}},
References: []Reference{{"ID", "User", "UserID", "user_friends", "", true}, {"ID", "User", "FriendID", "user_friends", "", false}},
},
}
for _, relation := range relations {
checkSchemaRelation(t, user, relation)
}
}
func TestParseSchemaWithAdvancedDataType(t *testing.T) {
user, err := schema.Parse(&AdvancedDataTypeUser{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse pointer user, got error %v", err)
}
// check schema
checkSchema(t, user, &schema.Schema{Name: "AdvancedDataTypeUser", Table: "advanced_data_type_users"}, []string{"ID"})
// check fields
fields := []schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"ID"}, DataType: schema.Int, PrimaryKey: true, Size: 64, HasDefaultValue: true, AutoIncrement: true},
{Name: "Name", DBName: "name", BindNames: []string{"Name"}, DataType: schema.String},
{Name: "Birthday", DBName: "birthday", BindNames: []string{"Birthday"}, DataType: schema.Time},
{Name: "RegisteredAt", DBName: "registered_at", BindNames: []string{"RegisteredAt"}, DataType: schema.Time},
{Name: "DeletedAt", DBName: "deleted_at", BindNames: []string{"DeletedAt"}, DataType: schema.Time},
{Name: "Active", DBName: "active", BindNames: []string{"Active"}, DataType: schema.Bool},
{Name: "Admin", DBName: "admin", BindNames: []string{"Admin"}, DataType: schema.Bool},
}
for i := range fields {
checkSchemaField(t, user, &fields[i], func(f *schema.Field) {
f.Creatable = true
f.Updatable = true
f.Readable = true
})
}
}
type CustomizeTable struct{}
func (CustomizeTable) TableName() string {
return "customize"
}
func TestCustomizeTableName(t *testing.T) {
customize, err := schema.Parse(&CustomizeTable{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse pointer user, got error %v", err)
}
if customize.Table != "customize" {
t.Errorf("Failed to customize table with TableName method")
}
}
func TestNestedModel(t *testing.T) {
versionUser, err := schema.Parse(&VersionUser{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse nested user, got error %v", err)
}
fields := []schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"VersionModel", "BaseModel", "ID"}, DataType: schema.Uint, PrimaryKey: true, Size: 64, HasDefaultValue: true, AutoIncrement: true},
{Name: "CreatedBy", DBName: "created_by", BindNames: []string{"VersionModel", "BaseModel", "CreatedBy"}, DataType: schema.Uint, Size: 64},
{Name: "Version", DBName: "version", BindNames: []string{"VersionModel", "Version"}, DataType: schema.Int, Size: 64},
}
for _, f := range fields {
checkSchemaField(t, versionUser, &f, func(f *schema.Field) {
f.Creatable = true
f.Updatable = true
f.Readable = true
})
}
}
func TestEmbeddedStruct(t *testing.T) {
type CorpBase struct {
gorm.Model
OwnerID string
}
type Company struct {
ID int
OwnerID int
Name string
Ignored string `gorm:"-"`
}
type Corp struct {
CorpBase
Base Company `gorm:"embedded;embeddedPrefix:company_"`
}
cropSchema, err := schema.Parse(&Corp{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse embedded struct with primary key, got error %v", err)
}
fields := []schema.Field{
{Name: "ID", DBName: "id", BindNames: []string{"CorpBase", "Model", "ID"}, DataType: schema.Uint, PrimaryKey: true, Size: 64, HasDefaultValue: true, AutoIncrement: true, TagSettings: map[string]string{"PRIMARYKEY": "PRIMARYKEY"}},
{Name: "ID", DBName: "company_id", BindNames: []string{"Base", "ID"}, DataType: schema.Int, Size: 64, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "Name", DBName: "company_name", BindNames: []string{"Base", "Name"}, DataType: schema.String, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "Ignored", BindNames: []string{"Base", "Ignored"}, TagSettings: map[string]string{"-": "-", "EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "OwnerID", DBName: "company_owner_id", BindNames: []string{"Base", "OwnerID"}, DataType: schema.Int, Size: 64, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "OwnerID", DBName: "owner_id", BindNames: []string{"CorpBase", "OwnerID"}, DataType: schema.String},
}
for _, f := range fields {
checkSchemaField(t, cropSchema, &f, func(f *schema.Field) {
if f.Name != "Ignored" {
f.Creatable = true
f.Updatable = true
f.Readable = true
}
})
}
}
type CustomizedNamingStrategy struct {
schema.NamingStrategy
}
func (ns CustomizedNamingStrategy) ColumnName(table, column string) string {
baseColumnName := ns.NamingStrategy.ColumnName(table, column)
if table == "" {
return baseColumnName
}
s := strings.Split(table, "_")
var prefix string
switch len(s) {
case 1:
prefix = s[0][:3]
case 2:
prefix = s[0][:1] + s[1][:2]
default:
prefix = s[0][:1] + s[1][:1] + s[2][:1]
}
return prefix + "_" + baseColumnName
}
func TestEmbeddedStructForCustomizedNamingStrategy(t *testing.T) {
type CorpBase struct {
gorm.Model
OwnerID string
}
type Company struct {
ID int
OwnerID int
Name string
Ignored string `gorm:"-"`
}
type Corp struct {
CorpBase
Base Company `gorm:"embedded;embeddedPrefix:company_"`
}
cropSchema, err := schema.Parse(&Corp{}, &sync.Map{}, CustomizedNamingStrategy{schema.NamingStrategy{}})
if err != nil {
t.Fatalf("failed to parse embedded struct with primary key, got error %v", err)
}
fields := []schema.Field{
{Name: "ID", DBName: "cor_id", BindNames: []string{"CorpBase", "Model", "ID"}, DataType: schema.Uint, PrimaryKey: true, Size: 64, HasDefaultValue: true, AutoIncrement: true, TagSettings: map[string]string{"PRIMARYKEY": "PRIMARYKEY"}},
{Name: "ID", DBName: "company_cor_id", BindNames: []string{"Base", "ID"}, DataType: schema.Int, Size: 64, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "Name", DBName: "company_cor_name", BindNames: []string{"Base", "Name"}, DataType: schema.String, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "Ignored", BindNames: []string{"Base", "Ignored"}, TagSettings: map[string]string{"-": "-", "EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "OwnerID", DBName: "company_cor_owner_id", BindNames: []string{"Base", "OwnerID"}, DataType: schema.Int, Size: 64, TagSettings: map[string]string{"EMBEDDED": "EMBEDDED", "EMBEDDEDPREFIX": "company_"}},
{Name: "OwnerID", DBName: "cor_owner_id", BindNames: []string{"CorpBase", "OwnerID"}, DataType: schema.String},
}
for _, f := range fields {
checkSchemaField(t, cropSchema, &f, func(f *schema.Field) {
if f.Name != "Ignored" {
f.Creatable = true
f.Updatable = true
f.Readable = true
}
})
}
}
func TestCompositePrimaryKeyWithAutoIncrement(t *testing.T) {
type Product struct {
ProductID uint `gorm:"primaryKey;autoIncrement"`
LanguageCode uint `gorm:"primaryKey"`
Code string
Name string
}
type ProductNonAutoIncrement struct {
ProductID uint `gorm:"primaryKey;autoIncrement:false"`
LanguageCode uint `gorm:"primaryKey"`
Code string
Name string
}
product, err := schema.Parse(&Product{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse product struct with composite primary key, got error %v", err)
}
prioritizedPrimaryField := schema.Field{
Name: "ProductID", DBName: "product_id", BindNames: []string{"ProductID"}, DataType: schema.Uint, PrimaryKey: true, Size: 64, HasDefaultValue: true, AutoIncrement: true, TagSettings: map[string]string{"PRIMARYKEY": "PRIMARYKEY", "AUTOINCREMENT": "AUTOINCREMENT"},
}
product.Fields = []*schema.Field{product.PrioritizedPrimaryField}
checkSchemaField(t, product, &prioritizedPrimaryField, func(f *schema.Field) {
f.Creatable = true
f.Updatable = true
f.Readable = true
})
productNonAutoIncrement, err := schema.Parse(&ProductNonAutoIncrement{}, &sync.Map{}, schema.NamingStrategy{})
if err != nil {
t.Fatalf("failed to parse productNonAutoIncrement struct with composite primary key, got error %v", err)
}
if productNonAutoIncrement.PrioritizedPrimaryField != nil {
t.Fatalf("PrioritizedPrimaryField of non autoincrement composite key should be nil")
}
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
go-gorm/gorm | https://github.com/go-gorm/gorm/blob/0d9141bad9772c6047ecfdb2819d8a52d27ceb65/schema/schema.go | schema/schema.go | package schema
import (
"context"
"errors"
"fmt"
"go/ast"
"path"
"reflect"
"strings"
"sync"
"gorm.io/gorm/clause"
"gorm.io/gorm/logger"
)
type callbackType string
const (
callbackTypeBeforeCreate callbackType = "BeforeCreate"
callbackTypeBeforeUpdate callbackType = "BeforeUpdate"
callbackTypeAfterCreate callbackType = "AfterCreate"
callbackTypeAfterUpdate callbackType = "AfterUpdate"
callbackTypeBeforeSave callbackType = "BeforeSave"
callbackTypeAfterSave callbackType = "AfterSave"
callbackTypeBeforeDelete callbackType = "BeforeDelete"
callbackTypeAfterDelete callbackType = "AfterDelete"
callbackTypeAfterFind callbackType = "AfterFind"
)
// ErrUnsupportedDataType unsupported data type
var ErrUnsupportedDataType = errors.New("unsupported data type")
type Schema struct {
Name string
ModelType reflect.Type
Table string
PrioritizedPrimaryField *Field
DBNames []string
PrimaryFields []*Field
PrimaryFieldDBNames []string
Fields []*Field
FieldsByName map[string]*Field
FieldsByBindName map[string]*Field // embedded fields is 'Embed.Field'
FieldsByDBName map[string]*Field
FieldsWithDefaultDBValue []*Field // fields with default value assigned by database
Relationships Relationships
CreateClauses []clause.Interface
QueryClauses []clause.Interface
UpdateClauses []clause.Interface
DeleteClauses []clause.Interface
BeforeCreate, AfterCreate bool
BeforeUpdate, AfterUpdate bool
BeforeDelete, AfterDelete bool
BeforeSave, AfterSave bool
AfterFind bool
err error
initialized chan struct{}
namer Namer
cacheStore *sync.Map
}
func (schema *Schema) String() string {
if schema.ModelType.Name() == "" {
return fmt.Sprintf("%s(%s)", schema.Name, schema.Table)
}
return fmt.Sprintf("%s.%s", schema.ModelType.PkgPath(), schema.ModelType.Name())
}
func (schema *Schema) MakeSlice() reflect.Value {
slice := reflect.MakeSlice(reflect.SliceOf(reflect.PointerTo(schema.ModelType)), 0, 20)
results := reflect.New(slice.Type())
results.Elem().Set(slice)
return results
}
func (schema *Schema) LookUpField(name string) *Field {
if field, ok := schema.FieldsByDBName[name]; ok {
return field
}
if field, ok := schema.FieldsByName[name]; ok {
return field
}
// Lookup field using namer-driven ColumnName
if schema.namer == nil {
return nil
}
namerColumnName := schema.namer.ColumnName(schema.Table, name)
if field, ok := schema.FieldsByDBName[namerColumnName]; ok {
return field
}
return nil
}
// LookUpFieldByBindName looks for the closest field in the embedded struct.
//
// type Struct struct {
// Embedded struct {
// ID string // is selected by LookUpFieldByBindName([]string{"Embedded", "ID"}, "ID")
// }
// ID string // is selected by LookUpFieldByBindName([]string{"ID"}, "ID")
// }
func (schema *Schema) LookUpFieldByBindName(bindNames []string, name string) *Field {
for i := len(bindNames) - 1; i >= 0; i-- {
find := strings.Join(bindNames[:i], ".") + "." + name
if field, ok := schema.FieldsByBindName[find]; ok {
return field
}
}
return nil
}
type Tabler interface {
TableName() string
}
type TablerWithNamer interface {
TableName(Namer) string
}
var callbackTypes = []callbackType{
callbackTypeBeforeCreate, callbackTypeAfterCreate,
callbackTypeBeforeUpdate, callbackTypeAfterUpdate,
callbackTypeBeforeSave, callbackTypeAfterSave,
callbackTypeBeforeDelete, callbackTypeAfterDelete,
callbackTypeAfterFind,
}
// Parse get data type from dialector
func Parse(dest interface{}, cacheStore *sync.Map, namer Namer) (*Schema, error) {
return ParseWithSpecialTableName(dest, cacheStore, namer, "")
}
// ParseWithSpecialTableName get data type from dialector with extra schema table
func ParseWithSpecialTableName(dest interface{}, cacheStore *sync.Map, namer Namer, specialTableName string) (*Schema, error) {
if dest == nil {
return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest)
}
modelType := reflect.ValueOf(dest).Type()
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
if modelType.Kind() != reflect.Struct {
if modelType.Kind() == reflect.Interface {
modelType = reflect.Indirect(reflect.ValueOf(dest)).Elem().Type()
}
for modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array || modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
if modelType.Kind() != reflect.Struct {
if modelType.PkgPath() == "" {
return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest)
}
return nil, fmt.Errorf("%w: %s.%s", ErrUnsupportedDataType, modelType.PkgPath(), modelType.Name())
}
}
// Cache the Schema for performance,
// Use the modelType or modelType + schemaTable (if it present) as cache key.
var schemaCacheKey interface{} = modelType
if specialTableName != "" {
schemaCacheKey = fmt.Sprintf("%p-%s", modelType, specialTableName)
}
// Load exist schema cache, return if exists
if v, ok := cacheStore.Load(schemaCacheKey); ok {
s := v.(*Schema)
// Wait for the initialization of other goroutines to complete
<-s.initialized
return s, s.err
}
var tableName string
modelValue := reflect.New(modelType)
if specialTableName != "" {
tableName = specialTableName
} else if en, ok := namer.(embeddedNamer); ok {
tableName = en.Table
} else if tabler, ok := modelValue.Interface().(Tabler); ok {
tableName = tabler.TableName()
} else if tabler, ok := modelValue.Interface().(TablerWithNamer); ok {
tableName = tabler.TableName(namer)
} else {
tableName = namer.TableName(modelType.Name())
}
schema := &Schema{
Name: modelType.Name(),
ModelType: modelType,
Table: tableName,
DBNames: make([]string, 0, 10),
Fields: make([]*Field, 0, 10),
FieldsByName: make(map[string]*Field, 10),
FieldsByBindName: make(map[string]*Field, 10),
FieldsByDBName: make(map[string]*Field, 10),
Relationships: Relationships{Relations: map[string]*Relationship{}},
cacheStore: cacheStore,
namer: namer,
initialized: make(chan struct{}),
}
// When the schema initialization is completed, the channel will be closed
defer close(schema.initialized)
// Load exist schema cache, return if exists
if v, ok := cacheStore.Load(schemaCacheKey); ok {
s := v.(*Schema)
// Wait for the initialization of other goroutines to complete
<-s.initialized
return s, s.err
}
for i := 0; i < modelType.NumField(); i++ {
if fieldStruct := modelType.Field(i); ast.IsExported(fieldStruct.Name) {
if field := schema.ParseField(fieldStruct); field.EmbeddedSchema != nil {
schema.Fields = append(schema.Fields, field.EmbeddedSchema.Fields...)
} else {
schema.Fields = append(schema.Fields, field)
}
}
}
for _, field := range schema.Fields {
if field.DBName == "" && field.DataType != "" {
field.DBName = namer.ColumnName(schema.Table, field.Name)
}
bindName := field.BindName()
if field.DBName != "" {
// nonexistence or shortest path or first appear prioritized if has permission
if v, ok := schema.FieldsByDBName[field.DBName]; !ok || ((field.Creatable || field.Updatable || field.Readable) && len(field.BindNames) < len(v.BindNames)) {
if _, ok := schema.FieldsByDBName[field.DBName]; !ok {
schema.DBNames = append(schema.DBNames, field.DBName)
}
schema.FieldsByDBName[field.DBName] = field
schema.FieldsByName[field.Name] = field
schema.FieldsByBindName[bindName] = field
if v != nil && v.PrimaryKey {
// remove the existing primary key field
for idx, f := range schema.PrimaryFields {
if f.DBName == v.DBName {
schema.PrimaryFields = append(schema.PrimaryFields[0:idx], schema.PrimaryFields[idx+1:]...)
}
}
}
if field.PrimaryKey {
schema.PrimaryFields = append(schema.PrimaryFields, field)
}
}
}
if of, ok := schema.FieldsByName[field.Name]; !ok || of.TagSettings["-"] == "-" {
schema.FieldsByName[field.Name] = field
}
if of, ok := schema.FieldsByBindName[bindName]; !ok || of.TagSettings["-"] == "-" {
schema.FieldsByBindName[bindName] = field
}
field.setupValuerAndSetter(modelType)
}
prioritizedPrimaryField := schema.LookUpField("id")
if prioritizedPrimaryField == nil {
prioritizedPrimaryField = schema.LookUpField("ID")
}
if prioritizedPrimaryField != nil {
if prioritizedPrimaryField.PrimaryKey {
schema.PrioritizedPrimaryField = prioritizedPrimaryField
} else if len(schema.PrimaryFields) == 0 {
prioritizedPrimaryField.PrimaryKey = true
schema.PrioritizedPrimaryField = prioritizedPrimaryField
schema.PrimaryFields = append(schema.PrimaryFields, prioritizedPrimaryField)
}
}
if schema.PrioritizedPrimaryField == nil {
if len(schema.PrimaryFields) == 1 {
schema.PrioritizedPrimaryField = schema.PrimaryFields[0]
} else if len(schema.PrimaryFields) > 1 {
// If there are multiple primary keys, the AUTOINCREMENT field is prioritized
for _, field := range schema.PrimaryFields {
if field.AutoIncrement {
schema.PrioritizedPrimaryField = field
break
}
}
}
}
for _, field := range schema.PrimaryFields {
schema.PrimaryFieldDBNames = append(schema.PrimaryFieldDBNames, field.DBName)
}
_, embedded := schema.cacheStore.Load(embeddedCacheKey)
relationshipFields := []*Field{}
for _, field := range schema.Fields {
if field.DataType != "" && field.HasDefaultValue && field.DefaultValueInterface == nil {
schema.FieldsWithDefaultDBValue = append(schema.FieldsWithDefaultDBValue, field)
}
if !embedded {
if field.DataType == "" && field.GORMDataType == "" && (field.Creatable || field.Updatable || field.Readable) {
relationshipFields = append(relationshipFields, field)
schema.FieldsByName[field.Name] = field
schema.FieldsByBindName[field.BindName()] = field
}
fieldValue := reflect.New(field.IndirectFieldType).Interface()
if fc, ok := fieldValue.(CreateClausesInterface); ok {
field.Schema.CreateClauses = append(field.Schema.CreateClauses, fc.CreateClauses(field)...)
}
if fc, ok := fieldValue.(QueryClausesInterface); ok {
field.Schema.QueryClauses = append(field.Schema.QueryClauses, fc.QueryClauses(field)...)
}
if fc, ok := fieldValue.(UpdateClausesInterface); ok {
field.Schema.UpdateClauses = append(field.Schema.UpdateClauses, fc.UpdateClauses(field)...)
}
if fc, ok := fieldValue.(DeleteClausesInterface); ok {
field.Schema.DeleteClauses = append(field.Schema.DeleteClauses, fc.DeleteClauses(field)...)
}
}
}
if field := schema.PrioritizedPrimaryField; field != nil {
switch field.GORMDataType {
case Int, Uint:
if _, ok := field.TagSettings["AUTOINCREMENT"]; !ok {
if !field.HasDefaultValue || field.DefaultValueInterface != nil {
schema.FieldsWithDefaultDBValue = append(schema.FieldsWithDefaultDBValue, field)
}
field.HasDefaultValue = true
field.AutoIncrement = true
}
}
}
// Cache the schema
if v, loaded := cacheStore.LoadOrStore(schemaCacheKey, schema); loaded {
s := v.(*Schema)
// Wait for the initialization of other goroutines to complete
<-s.initialized
return s, s.err
}
defer func() {
if schema.err != nil {
logger.Default.Error(context.Background(), schema.err.Error())
cacheStore.Delete(modelType)
}
}()
for _, cbName := range callbackTypes {
if methodValue := modelValue.MethodByName(string(cbName)); methodValue.IsValid() {
switch methodValue.Type().String() {
case "func(*gorm.DB) error":
expectedPkgPath := path.Dir(reflect.TypeOf(schema).Elem().PkgPath())
if inVarPkg := methodValue.Type().In(0).Elem().PkgPath(); inVarPkg == expectedPkgPath {
reflect.Indirect(reflect.ValueOf(schema)).FieldByName(string(cbName)).SetBool(true)
} else {
logger.Default.Warn(context.Background(), "In model %v, the hook function `%v(*gorm.DB) error` has an incorrect parameter type. The expected parameter type is `%v`, but the provided type is `%v`.", schema, cbName, expectedPkgPath, inVarPkg)
// PASS
}
default:
logger.Default.Warn(context.Background(), "Model %v don't match %vInterface, should be `%v(*gorm.DB) error`. Please see https://gorm.io/docs/hooks.html", schema, cbName, cbName)
}
}
}
// parse relationships
for _, field := range relationshipFields {
if schema.parseRelation(field); schema.err != nil {
return schema, schema.err
}
}
return schema, schema.err
}
func getOrParse(dest interface{}, cacheStore *sync.Map, namer Namer) (*Schema, error) {
modelType := reflect.ValueOf(dest).Type()
if modelType.Kind() != reflect.Struct {
for modelType.Kind() == reflect.Slice || modelType.Kind() == reflect.Array || modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
if modelType.Kind() != reflect.Struct {
if modelType.PkgPath() == "" {
return nil, fmt.Errorf("%w: %+v", ErrUnsupportedDataType, dest)
}
return nil, fmt.Errorf("%w: %s.%s", ErrUnsupportedDataType, modelType.PkgPath(), modelType.Name())
}
}
if v, ok := cacheStore.Load(modelType); ok {
return v.(*Schema), nil
}
return Parse(dest, cacheStore, namer)
}
| go | MIT | 0d9141bad9772c6047ecfdb2819d8a52d27ceb65 | 2026-01-07T08:35:52.485253Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/api_impl_test.go | pkg/api/api_impl_test.go | package api
import (
"fmt"
"testing"
"github.com/evanw/esbuild/internal/test"
)
func TestStripDirPrefix(t *testing.T) {
expectSuccess := func(path string, prefix string, allowedSlashes string, expected string) {
t.Helper()
t.Run(fmt.Sprintf("path=%s prefix=%s slashes=%s", path, prefix, allowedSlashes), func(t *testing.T) {
t.Helper()
observed, ok := stripDirPrefix(path, prefix, allowedSlashes)
if !ok {
t.Fatalf("Unexpected failure")
}
test.AssertEqualWithDiff(t, observed, expected)
})
}
expectFailure := func(path string, prefix string, allowedSlashes string) {
t.Helper()
t.Run(fmt.Sprintf("path=%s prefix=%s slashes=%s", path, prefix, allowedSlashes), func(t *testing.T) {
t.Helper()
_, ok := stripDirPrefix(path, prefix, allowedSlashes)
if ok {
t.Fatalf("Unexpected success")
}
})
}
// Note: People sometimes set "outdir" to "/" and expect that to work:
// https://github.com/evanw/esbuild/issues/3027
expectSuccess(`/foo/bar/baz`, ``, `/`, `/foo/bar/baz`)
expectSuccess(`/foo/bar/baz`, `/`, `/`, `foo/bar/baz`)
expectSuccess(`/foo/bar/baz`, `/foo`, `/`, `bar/baz`)
expectSuccess(`/foo/bar/baz`, `/foo/bar`, `/`, `baz`)
expectSuccess(`/foo/bar/baz`, `/foo/bar/baz`, `/`, ``)
expectSuccess(`/foo/bar//baz`, `/foo/bar`, `/`, `/baz`)
expectSuccess(`C:\foo\bar\baz`, ``, `\/`, `C:\foo\bar\baz`)
expectSuccess(`C:\foo\bar\baz`, `C:`, `\/`, `foo\bar\baz`)
expectSuccess(`C:\foo\bar\baz`, `C:\`, `\/`, `foo\bar\baz`)
expectSuccess(`C:\foo\bar\baz`, `C:\foo`, `\/`, `bar\baz`)
expectSuccess(`C:\foo\bar\baz`, `C:\foo\bar`, `\/`, `baz`)
expectSuccess(`C:\foo\bar\baz`, `C:\foo\bar\baz`, `\/`, ``)
expectSuccess(`C:\foo\bar\\baz`, `C:\foo\bar`, `\/`, `\baz`)
expectSuccess(`C:\foo\bar/baz`, `C:\foo\bar`, `\/`, `baz`)
expectFailure(`/foo/bar`, `/foo/ba`, `/`)
expectFailure(`/foo/bar`, `/fo`, `/`)
expectFailure(`C:\foo\bar`, `C:\foo\ba`, `\/`)
expectFailure(`C:\foo\bar`, `C:\fo`, `\/`)
expectFailure(`C:/foo/bar`, `C:\foo`, `\/`)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/api.go | pkg/api/api.go | // This API exposes esbuild's two main operations: building and transforming.
// It's intended for integrating esbuild into other tools as a library.
//
// If you are just trying to run esbuild from Go without the overhead of
// creating a child process, there is also an API for the command-line
// interface itself: https://pkg.go.dev/github.com/evanw/esbuild/pkg/cli.
//
// # Build API
//
// This function runs an end-to-end build operation. It takes an array of file
// paths as entry points, parses them and all of their dependencies, and
// returns the output files to write to the file system. The available options
// roughly correspond to esbuild's command-line flags.
//
// Example usage:
//
// package main
//
// import (
// "os"
//
// "github.com/evanw/esbuild/pkg/api"
// )
//
// func main() {
// result := api.Build(api.BuildOptions{
// EntryPoints: []string{"input.js"},
// Outfile: "output.js",
// Bundle: true,
// Write: true,
// LogLevel: api.LogLevelInfo,
// })
//
// if len(result.Errors) > 0 {
// os.Exit(1)
// }
// }
//
// # Transform API
//
// This function transforms a string of source code into JavaScript. It can be
// used to minify JavaScript, convert TypeScript/JSX to JavaScript, or convert
// newer JavaScript to older JavaScript. The available options roughly
// correspond to esbuild's command-line flags.
//
// Example usage:
//
// package main
//
// import (
// "fmt"
// "os"
//
// "github.com/evanw/esbuild/pkg/api"
// )
//
// func main() {
// jsx := `
// import * as React from 'react'
// import * as ReactDOM from 'react-dom'
//
// ReactDOM.render(
// <h1>Hello, world!</h1>,
// document.getElementById('root')
// );
// `
//
// result := api.Transform(jsx, api.TransformOptions{
// Loader: api.LoaderJSX,
// })
//
// fmt.Printf("%d errors and %d warnings\n",
// len(result.Errors), len(result.Warnings))
//
// os.Stdout.Write(result.Code)
// }
package api
import (
"time"
"github.com/evanw/esbuild/internal/logger"
)
type SourceMap uint8
const (
SourceMapNone SourceMap = iota
SourceMapInline
SourceMapLinked
SourceMapExternal
SourceMapInlineAndExternal
)
type SourcesContent uint8
const (
SourcesContentInclude SourcesContent = iota
SourcesContentExclude
)
type LegalComments uint8
const (
LegalCommentsDefault LegalComments = iota
LegalCommentsNone
LegalCommentsInline
LegalCommentsEndOfFile
LegalCommentsLinked
LegalCommentsExternal
)
type JSX uint8
const (
JSXTransform JSX = iota
JSXPreserve
JSXAutomatic
)
type Target uint8
const (
DefaultTarget Target = iota
ESNext
ES5
ES2015
ES2016
ES2017
ES2018
ES2019
ES2020
ES2021
ES2022
ES2023
ES2024
)
type Loader uint16
const (
LoaderNone Loader = iota
LoaderBase64
LoaderBinary
LoaderCopy
LoaderCSS
LoaderDataURL
LoaderDefault
LoaderEmpty
LoaderFile
LoaderGlobalCSS
LoaderJS
LoaderJSON
LoaderJSX
LoaderLocalCSS
LoaderText
LoaderTS
LoaderTSX
)
type Platform uint8
const (
PlatformDefault Platform = iota
PlatformBrowser
PlatformNode
PlatformNeutral
)
type Format uint8
const (
FormatDefault Format = iota
FormatIIFE
FormatCommonJS
FormatESModule
)
type Packages uint8
const (
PackagesDefault Packages = iota
PackagesBundle
PackagesExternal
)
type Engine struct {
Name EngineName
Version string
}
type Location struct {
File string
Namespace string
Line int // 1-based
Column int // 0-based, in bytes
Length int // in bytes
LineText string
Suggestion string
}
type Message struct {
ID string
PluginName string
Text string
Location *Location
Notes []Note
// Optional user-specified data that is passed through unmodified. You can
// use this to stash the original error, for example.
Detail interface{}
}
type Note struct {
Text string
Location *Location
}
type StderrColor uint8
const (
ColorIfTerminal StderrColor = iota
ColorNever
ColorAlways
)
type LogLevel uint8
const (
LogLevelSilent LogLevel = iota
LogLevelVerbose
LogLevelDebug
LogLevelInfo
LogLevelWarning
LogLevelError
)
type Charset uint8
const (
CharsetDefault Charset = iota
CharsetASCII
CharsetUTF8
)
type TreeShaking uint8
const (
TreeShakingDefault TreeShaking = iota
TreeShakingFalse
TreeShakingTrue
)
type Drop uint8
const (
DropConsole Drop = 1 << iota
DropDebugger
)
type MangleQuoted uint8
const (
MangleQuotedFalse MangleQuoted = iota
MangleQuotedTrue
)
type AbsPaths uint8
const (
CodeAbsPath AbsPaths = 1 << iota
LogAbsPath
MetafileAbsPath
)
////////////////////////////////////////////////////////////////////////////////
// Build API
type BuildOptions struct {
Color StderrColor // Documentation: https://esbuild.github.io/api/#color
LogLevel LogLevel // Documentation: https://esbuild.github.io/api/#log-level
LogLimit int // Documentation: https://esbuild.github.io/api/#log-limit
LogOverride map[string]LogLevel // Documentation: https://esbuild.github.io/api/#log-override
AbsPaths AbsPaths // Documentation: https://esbuild.github.io/api/#abs-path
Sourcemap SourceMap // Documentation: https://esbuild.github.io/api/#sourcemap
SourceRoot string // Documentation: https://esbuild.github.io/api/#source-root
SourcesContent SourcesContent // Documentation: https://esbuild.github.io/api/#sources-content
Target Target // Documentation: https://esbuild.github.io/api/#target
Engines []Engine // Documentation: https://esbuild.github.io/api/#target
Supported map[string]bool // Documentation: https://esbuild.github.io/api/#supported
MangleProps string // Documentation: https://esbuild.github.io/api/#mangle-props
ReserveProps string // Documentation: https://esbuild.github.io/api/#mangle-props
MangleQuoted MangleQuoted // Documentation: https://esbuild.github.io/api/#mangle-props
MangleCache map[string]interface{} // Documentation: https://esbuild.github.io/api/#mangle-props
Drop Drop // Documentation: https://esbuild.github.io/api/#drop
DropLabels []string // Documentation: https://esbuild.github.io/api/#drop-labels
MinifyWhitespace bool // Documentation: https://esbuild.github.io/api/#minify
MinifyIdentifiers bool // Documentation: https://esbuild.github.io/api/#minify
MinifySyntax bool // Documentation: https://esbuild.github.io/api/#minify
LineLimit int // Documentation: https://esbuild.github.io/api/#line-limit
Charset Charset // Documentation: https://esbuild.github.io/api/#charset
TreeShaking TreeShaking // Documentation: https://esbuild.github.io/api/#tree-shaking
IgnoreAnnotations bool // Documentation: https://esbuild.github.io/api/#ignore-annotations
LegalComments LegalComments // Documentation: https://esbuild.github.io/api/#legal-comments
JSX JSX // Documentation: https://esbuild.github.io/api/#jsx-mode
JSXFactory string // Documentation: https://esbuild.github.io/api/#jsx-factory
JSXFragment string // Documentation: https://esbuild.github.io/api/#jsx-fragment
JSXImportSource string // Documentation: https://esbuild.github.io/api/#jsx-import-source
JSXDev bool // Documentation: https://esbuild.github.io/api/#jsx-dev
JSXSideEffects bool // Documentation: https://esbuild.github.io/api/#jsx-side-effects
Define map[string]string // Documentation: https://esbuild.github.io/api/#define
Pure []string // Documentation: https://esbuild.github.io/api/#pure
KeepNames bool // Documentation: https://esbuild.github.io/api/#keep-names
GlobalName string // Documentation: https://esbuild.github.io/api/#global-name
Bundle bool // Documentation: https://esbuild.github.io/api/#bundle
PreserveSymlinks bool // Documentation: https://esbuild.github.io/api/#preserve-symlinks
Splitting bool // Documentation: https://esbuild.github.io/api/#splitting
Outfile string // Documentation: https://esbuild.github.io/api/#outfile
Metafile bool // Documentation: https://esbuild.github.io/api/#metafile
Outdir string // Documentation: https://esbuild.github.io/api/#outdir
Outbase string // Documentation: https://esbuild.github.io/api/#outbase
AbsWorkingDir string // Documentation: https://esbuild.github.io/api/#working-directory
Platform Platform // Documentation: https://esbuild.github.io/api/#platform
Format Format // Documentation: https://esbuild.github.io/api/#format
External []string // Documentation: https://esbuild.github.io/api/#external
Packages Packages // Documentation: https://esbuild.github.io/api/#packages
Alias map[string]string // Documentation: https://esbuild.github.io/api/#alias
MainFields []string // Documentation: https://esbuild.github.io/api/#main-fields
Conditions []string // Documentation: https://esbuild.github.io/api/#conditions
Loader map[string]Loader // Documentation: https://esbuild.github.io/api/#loader
ResolveExtensions []string // Documentation: https://esbuild.github.io/api/#resolve-extensions
Tsconfig string // Documentation: https://esbuild.github.io/api/#tsconfig
TsconfigRaw string // Documentation: https://esbuild.github.io/api/#tsconfig-raw
OutExtension map[string]string // Documentation: https://esbuild.github.io/api/#out-extension
PublicPath string // Documentation: https://esbuild.github.io/api/#public-path
Inject []string // Documentation: https://esbuild.github.io/api/#inject
Banner map[string]string // Documentation: https://esbuild.github.io/api/#banner
Footer map[string]string // Documentation: https://esbuild.github.io/api/#footer
NodePaths []string // Documentation: https://esbuild.github.io/api/#node-paths
EntryNames string // Documentation: https://esbuild.github.io/api/#entry-names
ChunkNames string // Documentation: https://esbuild.github.io/api/#chunk-names
AssetNames string // Documentation: https://esbuild.github.io/api/#asset-names
EntryPoints []string // Documentation: https://esbuild.github.io/api/#entry-points
EntryPointsAdvanced []EntryPoint // Documentation: https://esbuild.github.io/api/#entry-points
Stdin *StdinOptions // Documentation: https://esbuild.github.io/api/#stdin
Write bool // Documentation: https://esbuild.github.io/api/#write
AllowOverwrite bool // Documentation: https://esbuild.github.io/api/#allow-overwrite
Plugins []Plugin // Documentation: https://esbuild.github.io/plugins/
}
type EntryPoint struct {
InputPath string
OutputPath string
}
type StdinOptions struct {
Contents string
ResolveDir string
Sourcefile string
Loader Loader
}
type BuildResult struct {
Errors []Message
Warnings []Message
OutputFiles []OutputFile
Metafile string
MangleCache map[string]interface{}
}
type OutputFile struct {
Path string
Contents []byte
Hash string
}
// Documentation: https://esbuild.github.io/api/#build
func Build(options BuildOptions) BuildResult {
start := time.Now()
ctx, errors := contextImpl(options)
if ctx == nil {
return BuildResult{Errors: errors}
}
result := ctx.Rebuild()
// Print a summary of the generated files to stderr. Except don't do
// this if the terminal is already being used for something else.
if ctx.args.logOptions.LogLevel <= logger.LevelInfo && !ctx.args.options.WriteToStdout {
printSummary(ctx.args.logOptions.Color, result.OutputFiles, start)
}
ctx.Dispose()
return result
}
////////////////////////////////////////////////////////////////////////////////
// Transform API
type TransformOptions struct {
Color StderrColor // Documentation: https://esbuild.github.io/api/#color
LogLevel LogLevel // Documentation: https://esbuild.github.io/api/#log-level
LogLimit int // Documentation: https://esbuild.github.io/api/#log-limit
LogOverride map[string]LogLevel // Documentation: https://esbuild.github.io/api/#log-override
AbsPaths AbsPaths // Documentation: https://esbuild.github.io/api/#abs-path
Sourcemap SourceMap // Documentation: https://esbuild.github.io/api/#sourcemap
SourceRoot string // Documentation: https://esbuild.github.io/api/#source-root
SourcesContent SourcesContent // Documentation: https://esbuild.github.io/api/#sources-content
Target Target // Documentation: https://esbuild.github.io/api/#target
Engines []Engine // Documentation: https://esbuild.github.io/api/#target
Supported map[string]bool // Documentation: https://esbuild.github.io/api/#supported
Platform Platform // Documentation: https://esbuild.github.io/api/#platform
Format Format // Documentation: https://esbuild.github.io/api/#format
GlobalName string // Documentation: https://esbuild.github.io/api/#global-name
MangleProps string // Documentation: https://esbuild.github.io/api/#mangle-props
ReserveProps string // Documentation: https://esbuild.github.io/api/#mangle-props
MangleQuoted MangleQuoted // Documentation: https://esbuild.github.io/api/#mangle-props
MangleCache map[string]interface{} // Documentation: https://esbuild.github.io/api/#mangle-props
Drop Drop // Documentation: https://esbuild.github.io/api/#drop
DropLabels []string // Documentation: https://esbuild.github.io/api/#drop-labels
MinifyWhitespace bool // Documentation: https://esbuild.github.io/api/#minify
MinifyIdentifiers bool // Documentation: https://esbuild.github.io/api/#minify
MinifySyntax bool // Documentation: https://esbuild.github.io/api/#minify
LineLimit int // Documentation: https://esbuild.github.io/api/#line-limit
Charset Charset // Documentation: https://esbuild.github.io/api/#charset
TreeShaking TreeShaking // Documentation: https://esbuild.github.io/api/#tree-shaking
IgnoreAnnotations bool // Documentation: https://esbuild.github.io/api/#ignore-annotations
LegalComments LegalComments // Documentation: https://esbuild.github.io/api/#legal-comments
JSX JSX // Documentation: https://esbuild.github.io/api/#jsx
JSXFactory string // Documentation: https://esbuild.github.io/api/#jsx-factory
JSXFragment string // Documentation: https://esbuild.github.io/api/#jsx-fragment
JSXImportSource string // Documentation: https://esbuild.github.io/api/#jsx-import-source
JSXDev bool // Documentation: https://esbuild.github.io/api/#jsx-dev
JSXSideEffects bool // Documentation: https://esbuild.github.io/api/#jsx-side-effects
TsconfigRaw string // Documentation: https://esbuild.github.io/api/#tsconfig-raw
Banner string // Documentation: https://esbuild.github.io/api/#banner
Footer string // Documentation: https://esbuild.github.io/api/#footer
Define map[string]string // Documentation: https://esbuild.github.io/api/#define
Pure []string // Documentation: https://esbuild.github.io/api/#pure
KeepNames bool // Documentation: https://esbuild.github.io/api/#keep-names
Sourcefile string // Documentation: https://esbuild.github.io/api/#sourcefile
Loader Loader // Documentation: https://esbuild.github.io/api/#loader
}
type TransformResult struct {
Errors []Message
Warnings []Message
Code []byte
Map []byte
LegalComments []byte
MangleCache map[string]interface{}
}
// Documentation: https://esbuild.github.io/api/#transform
func Transform(input string, options TransformOptions) TransformResult {
return transformImpl(input, options)
}
////////////////////////////////////////////////////////////////////////////////
// Context API
// Documentation: https://esbuild.github.io/api/#serve-arguments
type ServeOptions struct {
Port int
Host string
Servedir string
Keyfile string
Certfile string
Fallback string
CORS CORSOptions
OnRequest func(ServeOnRequestArgs)
}
// Documentation: https://esbuild.github.io/api/#cors
type CORSOptions struct {
Origin []string
}
type ServeOnRequestArgs struct {
RemoteAddress string
Method string
Path string
Status int
TimeInMS int // The time to generate the response, not to send it
}
// Documentation: https://esbuild.github.io/api/#serve-return-values
type ServeResult struct {
Port uint16
Hosts []string
}
// Documentation: https://esbuild.github.io/api/#watch-arguments
type WatchOptions struct {
Delay int // In milliseconds
}
type BuildContext interface {
// Documentation: https://esbuild.github.io/api/#rebuild
Rebuild() BuildResult
// Documentation: https://esbuild.github.io/api/#watch
Watch(options WatchOptions) error
// Documentation: https://esbuild.github.io/api/#serve
Serve(options ServeOptions) (ServeResult, error)
Cancel()
Dispose()
}
type ContextError struct {
Errors []Message // Option validation errors are returned here
}
func (err *ContextError) Error() string {
if len(err.Errors) > 0 {
return err.Errors[0].Text
}
return "Context creation failed"
}
// Documentation: https://esbuild.github.io/api/#build
func Context(buildOptions BuildOptions) (BuildContext, *ContextError) {
ctx, errors := contextImpl(buildOptions)
if ctx == nil {
return nil, &ContextError{Errors: errors}
}
return ctx, nil
}
////////////////////////////////////////////////////////////////////////////////
// Plugin API
type SideEffects uint8
const (
SideEffectsTrue SideEffects = iota
SideEffectsFalse
)
type Plugin struct {
Name string
Setup func(PluginBuild)
}
type PluginBuild struct {
// Documentation: https://esbuild.github.io/plugins/#build-options
InitialOptions *BuildOptions
// Documentation: https://esbuild.github.io/plugins/#resolve
Resolve func(path string, options ResolveOptions) ResolveResult
// Documentation: https://esbuild.github.io/plugins/#on-start
OnStart func(callback func() (OnStartResult, error))
// Documentation: https://esbuild.github.io/plugins/#on-end
OnEnd func(callback func(result *BuildResult) (OnEndResult, error))
// Documentation: https://esbuild.github.io/plugins/#on-resolve
OnResolve func(options OnResolveOptions, callback func(OnResolveArgs) (OnResolveResult, error))
// Documentation: https://esbuild.github.io/plugins/#on-load
OnLoad func(options OnLoadOptions, callback func(OnLoadArgs) (OnLoadResult, error))
// Documentation: https://esbuild.github.io/plugins/#on-dispose
OnDispose func(callback func())
}
// Documentation: https://esbuild.github.io/plugins/#resolve-options
type ResolveOptions struct {
PluginName string
Importer string
Namespace string
ResolveDir string
Kind ResolveKind
PluginData interface{}
With map[string]string
}
// Documentation: https://esbuild.github.io/plugins/#resolve-results
type ResolveResult struct {
Errors []Message
Warnings []Message
Path string
External bool
SideEffects bool
Namespace string
Suffix string
PluginData interface{}
}
type OnStartResult struct {
Errors []Message
Warnings []Message
}
type OnEndResult struct {
Errors []Message
Warnings []Message
}
// Documentation: https://esbuild.github.io/plugins/#on-resolve-options
type OnResolveOptions struct {
Filter string
Namespace string
}
// Documentation: https://esbuild.github.io/plugins/#on-resolve-arguments
type OnResolveArgs struct {
Path string
Importer string
Namespace string
ResolveDir string
Kind ResolveKind
PluginData interface{}
With map[string]string
}
// Documentation: https://esbuild.github.io/plugins/#on-resolve-results
type OnResolveResult struct {
PluginName string
Errors []Message
Warnings []Message
Path string
External bool
SideEffects SideEffects
Namespace string
Suffix string
PluginData interface{}
WatchFiles []string
WatchDirs []string
}
// Documentation: https://esbuild.github.io/plugins/#on-load-options
type OnLoadOptions struct {
Filter string
Namespace string
}
// Documentation: https://esbuild.github.io/plugins/#on-load-arguments
type OnLoadArgs struct {
Path string
Namespace string
Suffix string
PluginData interface{}
With map[string]string
}
// Documentation: https://esbuild.github.io/plugins/#on-load-results
type OnLoadResult struct {
PluginName string
Errors []Message
Warnings []Message
Contents *string
ResolveDir string
Loader Loader
PluginData interface{}
WatchFiles []string
WatchDirs []string
}
type ResolveKind uint8
const (
ResolveNone ResolveKind = iota
ResolveEntryPoint
ResolveJSImportStatement
ResolveJSRequireCall
ResolveJSDynamicImport
ResolveJSRequireResolve
ResolveCSSImportRule
ResolveCSSComposesFrom
ResolveCSSURLToken
)
////////////////////////////////////////////////////////////////////////////////
// FormatMessages API
type MessageKind uint8
const (
ErrorMessage MessageKind = iota
WarningMessage
)
type FormatMessagesOptions struct {
TerminalWidth int
Kind MessageKind
Color bool
}
func FormatMessages(msgs []Message, opts FormatMessagesOptions) []string {
return formatMsgsImpl(msgs, opts)
}
////////////////////////////////////////////////////////////////////////////////
// AnalyzeMetafile API
type AnalyzeMetafileOptions struct {
Color bool
Verbose bool
}
// Documentation: https://esbuild.github.io/api/#analyze
func AnalyzeMetafile(metafile string, opts AnalyzeMetafileOptions) string {
return analyzeMetafileImpl(metafile, opts)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/api_test.go | pkg/api/api_test.go | package api_test
import (
"testing"
"github.com/evanw/esbuild/internal/test"
"github.com/evanw/esbuild/pkg/api"
)
func TestFormatMessages(t *testing.T) {
check := func(name string, opts api.FormatMessagesOptions, msg api.Message, expected string) {
t.Helper()
t.Run(name, func(t *testing.T) {
test.AssertEqualWithDiff(t, api.FormatMessages([]api.Message{msg}, opts)[0], expected)
})
}
check("Error", api.FormatMessagesOptions{Kind: api.ErrorMessage}, api.Message{Text: "This is a test"}, "✘ [ERROR] This is a test\n\n")
check("Warning", api.FormatMessagesOptions{Kind: api.WarningMessage}, api.Message{Text: "This is a test"}, "▲ [WARNING] This is a test\n\n")
check("Basic location",
api.FormatMessagesOptions{},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 5, // 0-based
Length: 3,
LineText: "this.foo();",
Suggestion: "bar",
}},
`✘ [ERROR] This is a test
some file.js:100:5:
100 │ this.foo();
│ ~~~
╵ bar
`,
)
check("Unicode location",
api.FormatMessagesOptions{
Kind: api.WarningMessage,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 17, // In UTF-8 bytes
Length: 10, // In UTF-8 bytes
LineText: "𝓉𝒽𝒾𝓈.𝒻ℴℴ();",
Suggestion: "𝒷𝒶𝓇",
}},
`▲ [WARNING] This is a test
some file.js:100:17:
100 │ 𝓉𝒽𝒾𝓈.𝒻ℴℴ();
│ ~~~
╵ 𝒷𝒶𝓇
`,
)
check("Tab stop rendering",
api.FormatMessagesOptions{
Kind: api.WarningMessage,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 6,
Length: 4,
LineText: "0\t1\t23\t45\t678",
}},
`▲ [WARNING] This is a test
some file.js:100:6:
100 │ 0 1 23 45 678
╵ ~~~~~~
`,
)
check("Truncated location tail, zero length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 3,
Length: 0,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:3:
100 │ 012345678 abcdefg...
╵ ^
`,
)
check("Truncated location tail, nonzero length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 3,
Length: 6,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:3:
100 │ 012345678 abcdefg...
╵ ~~~~~~
`,
)
check("Truncated location tail, truncated length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 3,
Length: 100,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:3:
100 │ 012345678 abcdefg...
╵ ~~~~~~~~~~~~~~
`,
)
check("Truncated location head, zero length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 200,
Length: 0,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:59:
100 │ ...defghi ABCDEFGHI
╵ ^
`,
)
check("Truncated location head, nonzero length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 50,
Length: 200,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:50:
100 │ ...cdefghi ABCDEFGHI
╵ ~~~~~~~~~
`,
)
check("Truncated location head and tail, truncated length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 30,
Length: 30,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:30:
100 │ ... 012345678 abc...
╵ ~~~~~~~~~~~~~
`,
)
check("Truncated location head and tail, non-truncated length",
api.FormatMessagesOptions{
TerminalWidth: 32,
},
api.Message{Text: "This is a test", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 30,
Length: 9,
LineText: "012345678 abcdefghi ABCDEFGHI 012345678 abcdefghi ABCDEFGHI",
}},
`✘ [ERROR] This is a test
some file.js:100:30:
100 │ ...HI 012345678 a...
╵ ~~~~~~~~~
`,
)
check("Multi-line line text",
api.FormatMessagesOptions{},
api.Message{Text: "ReferenceError: Cannot access 'foo' before initialization", Location: &api.Location{
File: "some file.js",
Line: 100,
Column: 2,
LineText: ` foo();
at ModuleJob.run (node:internal/modules/esm/module_job:185:25)
at async Promise.all (index 0)
at async ESMLoader.import (node:internal/modules/esm/loader:281:24)
at async loadESM (node:internal/process/esm_loader:88:5)
at async handleMainPromise (node:internal/modules/run_main:65:12)`,
}},
`✘ [ERROR] ReferenceError: Cannot access 'foo' before initialization
some file.js:100:2:
100 │ foo();
╵ ^
at ModuleJob.run (node:internal/modules/esm/module_job:185:25)
at async Promise.all (index 0)
at async ESMLoader.import (node:internal/modules/esm/loader:281:24)
at async loadESM (node:internal/process/esm_loader:88:5)
at async handleMainPromise (node:internal/modules/run_main:65:12)
`,
)
check("Note formatting",
api.FormatMessagesOptions{
TerminalWidth: 40,
},
api.Message{
Text: "Why would you do this?",
Location: &api.Location{
File: "some file.js",
Line: 1,
Column: 10,
Length: 16,
LineText: "let ten = +([+!+[]]+[+[]]);",
},
Notes: []api.Note{{
Text: "This is 1:",
Location: &api.Location{
File: "some file.js",
Line: 1,
Column: 12,
Length: 7,
LineText: "let ten = +([+!+[]]+[+[]]);",
Suggestion: "'1'",
},
}, {
Text: "This is 0:",
Location: &api.Location{
File: "some file.js",
Line: 1,
Column: 20,
Length: 5,
LineText: "let ten = +([+!+[]]+[+[]]);",
Suggestion: "'0'",
},
}, {
Text: "The number 0 is created by +[], where [] is the empty array and + is the unary plus, " +
"used to convert the right side to a numeric value. The number 1 is formed as +!+[], where " +
"the boolean value true is converted into the numeric value 1 by the prepended plus sign.",
}},
},
`✘ [ERROR] Why would you do this?
some file.js:1:10:
1 │ let ten = +([+!+[]]+[+[]]);
╵ ~~~~~~~~~~~~~~~~
This is 1:
some file.js:1:12:
1 │ let ten = +([+!+[]]+[+[]]);
│ ~~~~~~~
╵ '1'
This is 0:
some file.js:1:20:
1 │ let ten = +([+!+[]]+[+[]]);
│ ~~~~~
╵ '0'
The number 0 is created by +[], where
[] is the empty array and + is the
unary plus, used to convert the right
side to a numeric value. The number 1
is formed as +!+[], where the boolean
value true is converted into the
numeric value 1 by the prepended plus
sign.
`,
)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/serve_other.go | pkg/api/serve_other.go | //go:build !js || !wasm
// +build !js !wasm
package api
// This file implements the "Serve()" function in esbuild's public API. It
// provides a basic web server that can serve a directory tree over HTTP. When
// a directory is visited the "index.html" will be served if present, otherwise
// esbuild will automatically generate a directory listing page with links for
// each file in the directory. If there is a build configured that generates
// output files, those output files are not written to disk but are instead
// "overlayed" virtually on top of the real file system. The server responds to
// HTTP requests for output files from the build with the latest in-memory
// build results.
import (
"errors"
"fmt"
"net"
"net/http"
"os"
"path"
"sort"
"strconv"
"strings"
"sync"
"sync/atomic"
"syscall"
"time"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/logger"
)
////////////////////////////////////////////////////////////////////////////////
// Serve API
type apiHandler struct {
onRequest func(ServeOnRequestArgs)
rebuild func() BuildResult
stop func()
fs fs.FS
absOutputDir string
outdirPathPrefix string
publicPath string
servedir string
keyfileToLower string
certfileToLower string
fallback string
hosts []string
corsOrigin []string
serveWaitGroup sync.WaitGroup
activeStreams []chan serverSentEvent
currentHashes map[string]string
mutex sync.Mutex
}
type serverSentEvent struct {
event string
data string
}
func escapeForHTML(text string) string {
text = strings.ReplaceAll(text, "&", "&")
text = strings.ReplaceAll(text, "<", "<")
text = strings.ReplaceAll(text, ">", ">")
return text
}
func escapeForAttribute(text string) string {
text = escapeForHTML(text)
text = strings.ReplaceAll(text, "\"", """)
text = strings.ReplaceAll(text, "'", "'")
return text
}
func (h *apiHandler) notifyRequest(duration time.Duration, req *http.Request, status int) {
if h.onRequest != nil {
h.onRequest(ServeOnRequestArgs{
RemoteAddress: req.RemoteAddr,
Method: req.Method,
Path: req.URL.Path,
Status: status,
TimeInMS: int(duration.Milliseconds()),
})
}
}
func errorsToString(errors []Message) string {
stderrOptions := logger.OutputOptions{IncludeSource: true}
terminalOptions := logger.TerminalInfo{}
sb := strings.Builder{}
limit := 5
for i, msg := range convertMessagesToInternal(nil, logger.Error, errors) {
if i == limit {
sb.WriteString(fmt.Sprintf("%d out of %d errors shown\n", limit, len(errors)))
break
}
sb.WriteString(msg.String(stderrOptions, terminalOptions))
}
return sb.String()
}
func (h *apiHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
start := time.Now()
// Add CORS headers to all relevant requests
if origin := req.Header.Get("Origin"); origin != "" {
for _, allowed := range h.corsOrigin {
if allowed == "*" {
res.Header().Set("Access-Control-Allow-Origin", "*")
break
} else if star := strings.IndexByte(allowed, '*'); star >= 0 {
prefix, suffix := allowed[:star], allowed[star+1:]
if len(origin) >= len(prefix)+len(suffix) && strings.HasPrefix(origin, prefix) && strings.HasSuffix(origin, suffix) {
res.Header().Set("Access-Control-Allow-Origin", origin)
break
}
} else if origin == allowed {
res.Header().Set("Access-Control-Allow-Origin", origin)
break
}
}
}
// HEAD requests omit the body
maybeWriteResponseBody := func(bytes []byte) { res.Write(bytes) }
isHEAD := req.Method == "HEAD"
if isHEAD {
maybeWriteResponseBody = func([]byte) { res.Write(nil) }
}
// Check the "Host" header to prevent DNS rebinding attacks
if strings.ContainsRune(req.Host, ':') {
// Try to strip off the port number
if host, _, err := net.SplitHostPort(req.Host); err == nil {
req.Host = host
}
}
if req.Host != "localhost" {
ok := false
for _, allowed := range h.hosts {
if req.Host == allowed {
ok = true
break
}
}
if !ok {
go h.notifyRequest(time.Since(start), req, http.StatusForbidden)
res.WriteHeader(http.StatusForbidden)
maybeWriteResponseBody([]byte(fmt.Sprintf("403 - Forbidden: The host %q is not allowed", req.Host)))
return
}
}
// Special-case the esbuild event stream
if req.Method == "GET" && req.URL.Path == "/esbuild" && req.Header.Get("Accept") == "text/event-stream" {
h.serveEventStream(start, req, res)
return
}
// Handle GET and HEAD requests
if (isHEAD || req.Method == "GET") && strings.HasPrefix(req.URL.Path, "/") {
queryPath := path.Clean(req.URL.Path)[1:]
result := h.rebuild()
// Requests fail if the build had errors
if len(result.Errors) > 0 {
res.Header().Set("Content-Type", "text/plain; charset=utf-8")
go h.notifyRequest(time.Since(start), req, http.StatusServiceUnavailable)
res.WriteHeader(http.StatusServiceUnavailable)
maybeWriteResponseBody([]byte(errorsToString(result.Errors)))
return
}
type fileToServe struct {
absPath string
contents fs.OpenedFile
}
var kind fs.EntryKind
var file fileToServe
dirEntries := make(map[string]bool)
fileEntries := make(map[string]bool)
// Check for a match with the results if we're within the output directory
if outdirQueryPath, ok := stripDirPrefix(queryPath, h.outdirPathPrefix, "/"); ok {
resultKind, inMemoryBytes, absPath, isImplicitIndexHTML := h.matchQueryPathToResult(outdirQueryPath, &result, dirEntries, fileEntries)
kind = resultKind
file = fileToServe{
absPath: absPath,
contents: &fs.InMemoryOpenedFile{Contents: inMemoryBytes},
}
if isImplicitIndexHTML {
queryPath = path.Join(queryPath, "index.html")
}
} else {
// Create a fake directory entry for the output path so that it appears to be a real directory
p := h.outdirPathPrefix
for p != "" {
var dir string
var base string
if slash := strings.IndexByte(p, '/'); slash == -1 {
base = p
} else {
dir = p[:slash]
base = p[slash+1:]
}
if dir == queryPath {
kind = fs.DirEntry
dirEntries[base] = true
break
}
p = dir
}
}
// Check for a file in the "servedir" directory
if h.servedir != "" && kind != fs.FileEntry {
absPath := h.fs.Join(h.servedir, queryPath)
if absDir := h.fs.Dir(absPath); absDir != absPath {
if entries, err, _ := h.fs.ReadDirectory(absDir); err == nil {
if entry, _ := entries.Get(h.fs.Base(absPath)); entry != nil && entry.Kind(h.fs) == fs.FileEntry {
if h.keyfileToLower != "" || h.certfileToLower != "" {
if toLower := strings.ToLower(absPath); toLower == h.keyfileToLower || toLower == h.certfileToLower {
// Don't serve the HTTPS key or certificate. This uses a case-
// insensitive check because some file systems are case-sensitive.
go h.notifyRequest(time.Since(start), req, http.StatusForbidden)
res.WriteHeader(http.StatusForbidden)
maybeWriteResponseBody([]byte("403 - Forbidden"))
return
}
}
if contents, err, _ := h.fs.OpenFile(absPath); err == nil {
defer contents.Close()
file = fileToServe{absPath: absPath, contents: contents}
kind = fs.FileEntry
} else if err != syscall.ENOENT {
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
maybeWriteResponseBody([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
return
}
}
}
}
}
// Check for a directory in the "servedir" directory
var servedirIndexName string
if h.servedir != "" && kind != fs.FileEntry {
if entries, err, _ := h.fs.ReadDirectory(h.fs.Join(h.servedir, queryPath)); err == nil {
kind = fs.DirEntry
for _, name := range entries.SortedKeys() {
entry, _ := entries.Get(name)
switch entry.Kind(h.fs) {
case fs.DirEntry:
dirEntries[name] = true
case fs.FileEntry:
fileEntries[name] = true
if name == "index.html" {
servedirIndexName = name
}
}
}
} else if err != syscall.ENOENT {
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
maybeWriteResponseBody([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
return
}
}
// Redirect to a trailing slash for directories
if kind == fs.DirEntry && !strings.HasSuffix(req.URL.Path, "/") {
res.Header().Set("Location", path.Clean(req.URL.Path)+"/")
go h.notifyRequest(time.Since(start), req, http.StatusFound)
res.WriteHeader(http.StatusFound)
maybeWriteResponseBody(nil)
return
}
// Serve an "index.html" file if present
if kind == fs.DirEntry && servedirIndexName != "" {
queryPath += "/" + servedirIndexName
absPath := h.fs.Join(h.servedir, queryPath)
if contents, err, _ := h.fs.OpenFile(absPath); err == nil {
defer contents.Close()
file = fileToServe{absPath: absPath, contents: contents}
kind = fs.FileEntry
} else if err != syscall.ENOENT {
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
maybeWriteResponseBody([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
return
}
}
// Serve the fallback HTML page if one was provided
if kind != fs.FileEntry && h.fallback != "" {
if contents, err, _ := h.fs.OpenFile(h.fallback); err == nil {
defer contents.Close()
file = fileToServe{absPath: h.fallback, contents: contents}
kind = fs.FileEntry
} else if err != syscall.ENOENT {
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
maybeWriteResponseBody([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
return
}
}
// Serve a file
if kind == fs.FileEntry {
// Default to serving the whole file
status := http.StatusOK
fileContentsLen := file.contents.Len()
begin := 0
end := fileContentsLen
isRange := false
// Handle range requests so that video playback works in Safari
if rangeBegin, rangeEnd, ok := parseRangeHeader(req.Header.Get("Range"), fileContentsLen); ok && rangeBegin < rangeEnd {
// Note: The content range is inclusive so subtract 1 from the end
isRange = true
begin = rangeBegin
end = rangeEnd
status = http.StatusPartialContent
}
// Try to read the range from the file, which may fail
fileBytes, err := file.contents.Read(begin, end)
if err != nil {
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
maybeWriteResponseBody([]byte(fmt.Sprintf("500 - Internal server error: %s", err.Error())))
return
}
// If we get here, the request was successful
if contentType := helpers.MimeTypeByExtension(h.fs.Ext(file.absPath)); contentType != "" {
res.Header().Set("Content-Type", contentType)
} else {
res.Header().Set("Content-Type", "application/octet-stream")
}
if isRange {
res.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", begin, end-1, fileContentsLen))
}
res.Header().Set("Content-Length", fmt.Sprintf("%d", len(fileBytes)))
go h.notifyRequest(time.Since(start), req, status)
res.WriteHeader(status)
maybeWriteResponseBody(fileBytes)
return
}
// Serve a directory listing
if kind == fs.DirEntry {
html := respondWithDirList(queryPath, dirEntries, fileEntries)
res.Header().Set("Content-Type", "text/html; charset=utf-8")
res.Header().Set("Content-Length", fmt.Sprintf("%d", len(html)))
go h.notifyRequest(time.Since(start), req, http.StatusOK)
maybeWriteResponseBody(html)
return
}
}
// Satisfy requests for "favicon.ico" to avoid errors in Firefox developer tools
if req.Method == "GET" && req.URL.Path == "/favicon.ico" {
for _, encoding := range strings.Split(req.Header.Get("Accept-Encoding"), ",") {
if semi := strings.IndexByte(encoding, ';'); semi >= 0 {
encoding = encoding[:semi]
}
if strings.TrimSpace(encoding) == "gzip" {
res.Header().Set("Content-Encoding", "gzip")
res.Header().Set("Content-Type", "image/vnd.microsoft.icon")
go h.notifyRequest(time.Since(start), req, http.StatusOK)
maybeWriteResponseBody(favicon_ico_gz)
return
}
}
}
// Default to a 404
res.Header().Set("Content-Type", "text/plain; charset=utf-8")
go h.notifyRequest(time.Since(start), req, http.StatusNotFound)
res.WriteHeader(http.StatusNotFound)
maybeWriteResponseBody([]byte("404 - Not Found"))
}
// This exposes an event stream to clients using server-sent events:
// https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events
func (h *apiHandler) serveEventStream(start time.Time, req *http.Request, res http.ResponseWriter) {
if flusher, ok := res.(http.Flusher); ok {
if closer, ok := res.(http.CloseNotifier); ok {
// Add a new stream to the array of active streams
stream := make(chan serverSentEvent)
h.mutex.Lock()
h.activeStreams = append(h.activeStreams, stream)
h.mutex.Unlock()
// Start the event stream
res.Header().Set("Content-Type", "text/event-stream")
res.Header().Set("Connection", "keep-alive")
res.Header().Set("Cache-Control", "no-cache")
go h.notifyRequest(time.Since(start), req, http.StatusOK)
res.WriteHeader(http.StatusOK)
res.Write([]byte("retry: 500\n"))
flusher.Flush()
// Send incoming messages over the stream
streamWasClosed := make(chan struct{}, 1)
go func() {
for {
var msg []byte
select {
case next, ok := <-stream:
if !ok {
streamWasClosed <- struct{}{}
return
}
msg = []byte(fmt.Sprintf("event: %s\ndata: %s\n\n", next.event, next.data))
case <-time.After(30 * time.Second):
// Send an occasional keep-alive
msg = []byte(":\n\n")
}
if _, err := res.Write(msg); err != nil {
return
}
flusher.Flush()
}
}()
// When the stream is closed (either by them or by us), remove it
// from the array and end the response body to clean up resources
select {
case <-closer.CloseNotify():
case <-streamWasClosed:
}
h.mutex.Lock()
for i := range h.activeStreams {
if h.activeStreams[i] == stream {
end := len(h.activeStreams) - 1
h.activeStreams[i] = h.activeStreams[end]
h.activeStreams = h.activeStreams[:end]
// Only close the stream if it's present in the list of active
// streams. Stopping the server can also call close on this
// stream and Go only lets you close a channel once before
// panicking, so we don't want to close it twice.
close(stream)
break
}
}
h.mutex.Unlock()
return
}
}
// If we get here, then event streaming isn't possible
go h.notifyRequest(time.Since(start), req, http.StatusInternalServerError)
res.WriteHeader(http.StatusInternalServerError)
res.Write([]byte("500 - Event stream error"))
}
func (h *apiHandler) broadcastBuildResult(result BuildResult, newHashes map[string]string) {
h.mutex.Lock()
var added []string
var removed []string
var updated []string
urlForPath := func(absPath string) (string, bool) {
if relPath, ok := stripDirPrefix(absPath, h.absOutputDir, "\\/"); ok {
relPath = strings.ReplaceAll(relPath, "\\", "/")
relPath = path.Join(h.outdirPathPrefix, relPath)
publicPath := h.publicPath
slash := "/"
if publicPath != "" && strings.HasSuffix(h.publicPath, "/") {
slash = ""
}
return fmt.Sprintf("%s%s%s", publicPath, slash, relPath), true
}
return "", false
}
// Diff the old and new states, but only if the build succeeded. We shouldn't
// make it appear as if all files were removed when there is a build error.
if len(result.Errors) == 0 {
oldHashes := h.currentHashes
h.currentHashes = newHashes
for absPath, newHash := range newHashes {
if oldHash, ok := oldHashes[absPath]; !ok {
if url, ok := urlForPath(absPath); ok {
added = append(added, url)
}
} else if newHash != oldHash {
if url, ok := urlForPath(absPath); ok {
updated = append(updated, url)
}
}
}
for absPath := range oldHashes {
if _, ok := newHashes[absPath]; !ok {
if url, ok := urlForPath(absPath); ok {
removed = append(removed, url)
}
}
}
}
// Only notify listeners if there's a change that's worth sending. That way
// you can implement a simple "reload on any change" script without having
// to do this check in the script.
if len(added) > 0 || len(removed) > 0 || len(updated) > 0 {
sort.Strings(added)
sort.Strings(removed)
sort.Strings(updated)
// Assemble the diff
var sb strings.Builder
sb.WriteString("{\"added\":[")
for i, path := range added {
if i > 0 {
sb.WriteRune(',')
}
sb.Write(helpers.QuoteForJSON(path, false))
}
sb.WriteString("],\"removed\":[")
for i, path := range removed {
if i > 0 {
sb.WriteRune(',')
}
sb.Write(helpers.QuoteForJSON(path, false))
}
sb.WriteString("],\"updated\":[")
for i, path := range updated {
if i > 0 {
sb.WriteRune(',')
}
sb.Write(helpers.QuoteForJSON(path, false))
}
sb.WriteString("]}")
json := sb.String()
// Broadcast the diff to all streams
for _, stream := range h.activeStreams {
stream <- serverSentEvent{event: "change", data: json}
}
}
h.mutex.Unlock()
}
// Handle enough of the range specification so that video playback works in Safari
func parseRangeHeader(r string, contentLength int) (int, int, bool) {
if strings.HasPrefix(r, "bytes=") {
r = r[len("bytes="):]
if dash := strings.IndexByte(r, '-'); dash != -1 {
// Note: The range is inclusive so the limit is deliberately "length - 1"
if begin, ok := parseRangeInt(r[:dash], contentLength-1); ok {
if end, ok := parseRangeInt(r[dash+1:], contentLength-1); ok {
// Note: The range is inclusive so a range of "0-1" is two bytes long
return begin, end + 1, true
}
}
}
}
return 0, 0, false
}
func parseRangeInt(text string, maxValue int) (int, bool) {
if text == "" {
return 0, false
}
value := 0
for _, c := range text {
if c < '0' || c > '9' {
return 0, false
}
value = value*10 + int(c-'0')
if value > maxValue {
return 0, false
}
}
return value, true
}
func (h *apiHandler) matchQueryPathToResult(
queryPath string,
result *BuildResult,
dirEntries map[string]bool,
fileEntries map[string]bool,
) (fs.EntryKind, []byte, string, bool) {
queryIsDir := false
queryDir := queryPath
if queryDir != "" {
queryDir += "/"
}
// Check the output files for a match
for _, file := range result.OutputFiles {
if relPath, ok := h.fs.Rel(h.absOutputDir, file.Path); ok {
relPath = strings.ReplaceAll(relPath, "\\", "/")
// An exact match
if relPath == queryPath {
return fs.FileEntry, file.Contents, file.Path, false
}
// Serve an "index.html" file if present
if dir, base := path.Split(relPath); base == "index.html" && queryDir == dir {
return fs.FileEntry, file.Contents, file.Path, true
}
// A match inside this directory
if strings.HasPrefix(relPath, queryDir) {
entry := relPath[len(queryDir):]
queryIsDir = true
if slash := strings.IndexByte(entry, '/'); slash == -1 {
fileEntries[entry] = true
} else if dir := entry[:slash]; !dirEntries[dir] {
dirEntries[dir] = true
}
}
}
}
// Treat this as a directory if it's non-empty
if queryIsDir {
return fs.DirEntry, nil, "", false
}
return 0, nil, "", false
}
func respondWithDirList(queryPath string, dirEntries map[string]bool, fileEntries map[string]bool) []byte {
queryPath = "/" + queryPath
queryDir := queryPath
if queryDir != "/" {
queryDir += "/"
}
html := strings.Builder{}
html.WriteString("<!doctype html>\n")
html.WriteString("<meta charset=\"utf8\">\n")
html.WriteString("<style>\n")
html.WriteString("body { margin: 30px; color: #222; background: #fff; font: 16px/22px sans-serif; }\n")
html.WriteString("a { color: inherit; text-decoration: none; }\n")
html.WriteString("a:hover { text-decoration: underline; }\n")
html.WriteString("a:visited { color: #777; }\n")
html.WriteString("@media (prefers-color-scheme: dark) {\n")
html.WriteString(" body { color: #fff; background: #222; }\n")
html.WriteString(" a:visited { color: #aaa; }\n")
html.WriteString("}\n")
html.WriteString("</style>\n")
html.WriteString("<title>Directory: ")
html.WriteString(escapeForHTML(queryDir))
html.WriteString("</title>\n")
html.WriteString("<h1>Directory: ")
var parts []string
if queryPath == "/" {
parts = []string{""}
} else {
parts = strings.Split(queryPath, "/")
}
for i, part := range parts {
if i+1 < len(parts) {
html.WriteString("<a href=\"")
html.WriteString(escapeForAttribute(strings.Join(parts[:i+1], "/")))
html.WriteString("/\">")
}
html.WriteString(escapeForHTML(part))
html.WriteString("/")
if i+1 < len(parts) {
html.WriteString("</a>")
}
}
html.WriteString("</h1>\n")
// Link to the parent directory
if queryPath != "/" {
parentDir := path.Dir(queryPath)
if parentDir != "/" {
parentDir += "/"
}
html.WriteString(fmt.Sprintf("<div>📁 <a href=\"%s\">../</a></div>\n", escapeForAttribute(parentDir)))
}
// Link to child directories
strings := make([]string, 0, len(dirEntries)+len(fileEntries))
for entry := range dirEntries {
strings = append(strings, entry)
}
sort.Strings(strings)
for _, entry := range strings {
html.WriteString(fmt.Sprintf("<div>📁 <a href=\"%s/\">%s/</a></div>\n", escapeForAttribute(path.Join(queryPath, entry)), escapeForHTML(entry)))
}
// Link to files in the directory
strings = strings[:0]
for entry := range fileEntries {
strings = append(strings, entry)
}
sort.Strings(strings)
for _, entry := range strings {
html.WriteString(fmt.Sprintf("<div>📄 <a href=\"%s\">%s</a></div>\n", escapeForAttribute(path.Join(queryPath, entry)), escapeForHTML(entry)))
}
return []byte(html.String())
}
// This is used to make error messages platform-independent
func prettyPrintPath(fs fs.FS, path string) string {
if relPath, ok := fs.Rel(fs.Cwd(), path); ok {
return strings.ReplaceAll(relPath, "\\", "/")
}
return path
}
func (ctx *internalContext) Serve(serveOptions ServeOptions) (ServeResult, error) {
ctx.mutex.Lock()
defer ctx.mutex.Unlock()
// Ignore disposed contexts
if ctx.didDispose {
return ServeResult{}, errors.New("Cannot serve a disposed context")
}
// Don't allow starting serve mode multiple times
if ctx.handler != nil {
return ServeResult{}, errors.New("Serve mode has already been enabled")
}
// Don't allow starting serve mode multiple times
if (serveOptions.Keyfile != "") != (serveOptions.Certfile != "") {
return ServeResult{}, errors.New("Must specify both key and certificate for HTTPS")
}
// Validate the "servedir" path
if serveOptions.Servedir != "" {
if absPath, ok := ctx.realFS.Abs(serveOptions.Servedir); ok {
serveOptions.Servedir = absPath
} else {
return ServeResult{}, fmt.Errorf("Invalid serve path: %s", serveOptions.Servedir)
}
}
// Validate the "fallback" path
if serveOptions.Fallback != "" {
if absPath, ok := ctx.realFS.Abs(serveOptions.Fallback); ok {
serveOptions.Fallback = absPath
} else {
return ServeResult{}, fmt.Errorf("Invalid fallback path: %s", serveOptions.Fallback)
}
}
// Validate the CORS origins
for _, origin := range serveOptions.CORS.Origin {
if star := strings.IndexByte(origin, '*'); star >= 0 && strings.ContainsRune(origin[star+1:], '*') {
return ServeResult{}, fmt.Errorf("Invalid origin: %s", origin)
}
}
// Stuff related to the output directory only matters if there are entry points
outdirPathPrefix := ""
if len(ctx.args.entryPoints) > 0 {
// Don't allow serving when builds are written to stdout
if ctx.args.options.WriteToStdout {
what := "entry points"
if len(ctx.args.entryPoints) == 1 {
what = "an entry point"
}
return ServeResult{}, fmt.Errorf("Cannot serve %s without an output path", what)
}
// Compute the output path prefix
if serveOptions.Servedir != "" && ctx.args.options.AbsOutputDir != "" {
// Make sure the output directory is contained in the "servedir" directory
relPath, ok := ctx.realFS.Rel(serveOptions.Servedir, ctx.args.options.AbsOutputDir)
if !ok {
return ServeResult{}, fmt.Errorf(
"Cannot compute relative path from %q to %q\n", serveOptions.Servedir, ctx.args.options.AbsOutputDir)
}
relPath = strings.ReplaceAll(relPath, "\\", "/") // Fix paths on Windows
if relPath == ".." || strings.HasPrefix(relPath, "../") {
return ServeResult{}, fmt.Errorf(
"Output directory %q must be contained in serve directory %q",
prettyPrintPath(ctx.realFS, ctx.args.options.AbsOutputDir),
prettyPrintPath(ctx.realFS, serveOptions.Servedir),
)
}
if relPath != "." {
outdirPathPrefix = relPath
}
}
}
// Determine the host
var listener net.Listener
network := "tcp4"
host := "0.0.0.0"
hostIsIP := true
if serveOptions.Host != "" {
host = serveOptions.Host
ip := net.ParseIP(host)
// Only use "tcp4" if this is an IPv4 address, otherwise use "tcp"
if ip == nil || ip.To4() == nil {
network = "tcp"
}
// Remember whether the host is a valid IP address or not
if ip == nil {
hostIsIP = false
}
}
// Pick the port
if serveOptions.Port == 0 {
// Default to picking a "800X" port
for port := 8000; port <= 8009; port++ {
if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", port))); err == nil {
listener = result
break
}
}
}
if listener == nil {
// Otherwise pick the provided port
port := serveOptions.Port
if port < 0 || port > 0xFFFF {
port = 0 // Pick a random port if the provided port is out of range
}
if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", port))); err != nil {
return ServeResult{}, err
} else {
listener = result
}
}
// Try listening on the provided port
addr := listener.Addr().String()
// Extract the real port in case we passed a port of "0"
var result ServeResult
var boundHost string
if host, text, err := net.SplitHostPort(addr); err == nil {
if port, err := strconv.ParseInt(text, 10, 32); err == nil {
result.Port = uint16(port)
boundHost = host
}
}
// Build up a list of all hosts we use
if ip := net.ParseIP(boundHost); ip != nil && ip.IsUnspecified() {
// If this is "0.0.0.0" or "::", list all relevant IP addresses
if addrs, err := net.InterfaceAddrs(); err == nil {
for _, addr := range addrs {
if addr, ok := addr.(*net.IPNet); ok && (addr.IP.To4() != nil) == (ip.To4() != nil) && !addr.IP.IsLinkLocalUnicast() {
result.Hosts = append(result.Hosts, addr.IP.String())
}
}
}
} else {
result.Hosts = append(result.Hosts, boundHost)
}
// If the host isn't a valid IP address, add it to the list of allowed hosts.
// For example, mapping "local.example.com" to "127.0.0.1" in "/etc/hosts"
// and then using "--serve=local.example.com:8000" should make it possible to
// successfully visit "http://local.example.com:8000/" in a browser.
if !hostIsIP {
result.Hosts = append(result.Hosts, host)
}
// HTTPS-related files should be absolute paths
isHTTPS := serveOptions.Keyfile != "" && serveOptions.Certfile != ""
if isHTTPS {
serveOptions.Keyfile, _ = ctx.realFS.Abs(serveOptions.Keyfile)
serveOptions.Certfile, _ = ctx.realFS.Abs(serveOptions.Certfile)
}
var shouldStop int32
// The first build will just build normally
handler := &apiHandler{
onRequest: serveOptions.OnRequest,
outdirPathPrefix: outdirPathPrefix,
absOutputDir: ctx.args.options.AbsOutputDir,
publicPath: ctx.args.options.PublicPath,
servedir: serveOptions.Servedir,
keyfileToLower: strings.ToLower(serveOptions.Keyfile),
certfileToLower: strings.ToLower(serveOptions.Certfile),
fallback: serveOptions.Fallback,
hosts: append([]string{}, result.Hosts...),
corsOrigin: append([]string{}, serveOptions.CORS.Origin...),
rebuild: func() BuildResult {
if atomic.LoadInt32(&shouldStop) != 0 {
// Don't start more rebuilds if we were told to stop
return BuildResult{}
} else {
return ctx.activeBuildOrRecentBuildOrRebuild()
}
},
fs: ctx.realFS,
}
// Create the server
server := &http.Server{Addr: addr, Handler: handler}
// When stop is called, block further rebuilds and then close the server
handler.stop = func() {
atomic.StoreInt32(&shouldStop, 1)
// Close the server and wait for it to close
server.Close()
// Close all open event streams
handler.mutex.Lock()
for _, stream := range handler.activeStreams {
close(stream)
}
handler.activeStreams = nil
handler.mutex.Unlock()
handler.serveWaitGroup.Wait()
}
// HACK: Go's HTTP API doesn't appear to provide a way to separate argument
// validation errors from eventual network errors. Specifically "ServeTLS"
// blocks for an arbitrarily long time before returning an error. So we
// intercept the first call to "Accept" on the listener and say that the
// serve call succeeded without an error if we get to that point.
hack := &hackListener{Listener: listener}
hack.waitGroup.Add(1)
// Start the server and signal on "serveWaitGroup" when it stops
handler.serveWaitGroup.Add(1)
go func() {
var err error
if isHTTPS {
err = server.ServeTLS(hack, serveOptions.Certfile, serveOptions.Keyfile)
} else {
err = server.Serve(hack)
}
if err != http.ErrServerClosed {
hack.mutex.Lock()
if !hack.done {
hack.done = true
hack.err = err
hack.waitGroup.Done()
}
hack.mutex.Unlock()
}
handler.serveWaitGroup.Done()
}()
// Return an error if the server failed to start accepting connections
hack.waitGroup.Wait()
if hack.err != nil {
return ServeResult{}, hack.err
}
// There appears to be some issue with Linux (but not with macOS) where
// destroying and recreating a server with the same port as the previous
// server had sometimes causes subsequent connections to fail with
// ECONNRESET (shows up in node as "Error: socket hang up").
//
// I think the problem is sort of that Go sets SO_REUSEADDR to 1 for listener
// sockets (specifically in "setDefaultListenerSockopts"). In some ways this
// is good, because it's more convenient for the user if the port is the
// same. However, I believe this sends a TCP RST packet to kill any previous
// connections. That can then be received by clients attempting to connect
// to the new server.
//
// As a hack to work around this problem, we wait for an additional short
// amount of time before returning. I observed this problem even with a 5ms
// timeout but I did not observe this problem with a 10ms timeout. So I'm
// setting this timeout to 50ms to be extra safe.
time.Sleep(50 * time.Millisecond)
// Only set the context handler if the server started successfully
ctx.handler = handler
// Print the URL(s) that the server can be reached at
if ctx.args.logOptions.LogLevel <= logger.LevelInfo {
printURLs(handler.hosts, result.Port, isHTTPS, ctx.args.logOptions.Color)
}
// Start the first build shortly after this function returns (but not
// immediately so that stuff we print right after this will come first).
//
// This also helps the CLI not do two builds when serve and watch mode
// are enabled together. Watch mode is enabled after serve mode because
// we want the stderr output for watch to come after the stderr output for
// serve, but watch mode will do another build if the current build is
// not a watch mode build.
go func() {
time.Sleep(10 * time.Millisecond)
handler.rebuild()
}()
return result, nil
}
type hackListener struct {
net.Listener
mutex sync.Mutex
waitGroup sync.WaitGroup
err error
done bool
}
func (hack *hackListener) Accept() (net.Conn, error) {
hack.mutex.Lock()
if !hack.done {
hack.done = true
hack.waitGroup.Done()
}
hack.mutex.Unlock()
return hack.Listener.Accept()
}
func printURLs(hosts []string, port uint16, https bool, useColor logger.UseColor) {
logger.PrintTextWithColor(os.Stderr, useColor, func(colors logger.Colors) string {
sb := strings.Builder{}
sb.WriteString(colors.Reset)
// Determine the host kinds
kinds := make([]string, len(hosts))
maxLen := 0
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/api_js_table.go | pkg/api/api_js_table.go | // This file was automatically generated by "js_table.ts"
package api
import "github.com/evanw/esbuild/internal/compat"
type EngineName uint8
const (
EngineChrome EngineName = iota
EngineDeno
EngineEdge
EngineFirefox
EngineHermes
EngineIE
EngineIOS
EngineNode
EngineOpera
EngineRhino
EngineSafari
)
func convertEngineName(engine EngineName) compat.Engine {
switch engine {
case EngineChrome:
return compat.Chrome
case EngineDeno:
return compat.Deno
case EngineEdge:
return compat.Edge
case EngineFirefox:
return compat.Firefox
case EngineHermes:
return compat.Hermes
case EngineIE:
return compat.IE
case EngineIOS:
return compat.IOS
case EngineNode:
return compat.Node
case EngineOpera:
return compat.Opera
case EngineRhino:
return compat.Rhino
case EngineSafari:
return compat.Safari
default:
panic("Invalid engine name")
}
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/serve_wasm.go | pkg/api/serve_wasm.go | //go:build js && wasm
// +build js,wasm
package api
import "fmt"
// Remove the serve API in the WebAssembly build. This removes 2.7mb of stuff.
func (*internalContext) Serve(ServeOptions) (ServeResult, error) {
return ServeResult{}, fmt.Errorf("The \"serve\" API is not supported when using WebAssembly")
}
type apiHandler struct {
}
func (*apiHandler) broadcastBuildResult(BuildResult, map[string]string) {
}
func (*apiHandler) stop() {
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/watcher.go | pkg/api/watcher.go | package api
// This file implements a polling file watcher for esbuild (i.e. it detects
// when files are changed by repeatedly checking their contents). Polling is
// used instead of more efficient platform-specific file system APIs because:
//
// * Go's standard library doesn't have built-in APIs for file watching
// * Using platform-specific APIs means using cgo, which I want to avoid
// * Polling is cross-platform and esbuild needs to work on 20+ platforms
// * Platform-specific APIs might be unreliable and could introduce bugs
//
// That said, this polling system is designed to use relatively little CPU vs.
// a more traditional polling system that scans the whole directory tree at
// once. The file system is still scanned regularly but each scan only checks
// a random subset of your files, which means a change to a file will be picked
// up soon after the change is made but not necessarily instantly.
//
// With the current heuristics, large projects should be completely scanned
// around every 2 seconds so in the worst case it could take up to 2 seconds
// for a change to be noticed. However, after a change has been noticed the
// change's path goes on a short list of recently changed paths which are
// checked on every scan, so further changes to recently changed files should
// be noticed almost instantly.
import (
"fmt"
"math/rand"
"os"
"sync"
"sync/atomic"
"time"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/resolver"
)
// The time to wait between watch intervals
const watchIntervalSleep = 100 * time.Millisecond
// The maximum number of recently-edited items to check every interval
const maxRecentItemCount = 16
// The minimum number of non-recent items to check every interval
const minItemCountPerIter = 64
// The maximum number of intervals before a change is detected
const maxIntervalsBeforeUpdate = 20
type watcher struct {
data fs.WatchData
fs fs.FS
rebuild func() fs.WatchData
delayInMS time.Duration
recentItems []string
itemsToScan []string
mutex sync.Mutex
itemsPerIteration int
shouldStop int32
shouldLog bool
useColor logger.UseColor
pathStyle logger.PathStyle
stopWaitGroup sync.WaitGroup
}
func (w *watcher) setWatchData(data fs.WatchData) {
defer w.mutex.Unlock()
w.mutex.Lock()
// Print something for the end of the first build
if w.shouldLog && w.data.Paths == nil {
logger.PrintTextWithColor(os.Stderr, w.useColor, func(colors logger.Colors) string {
var delay string
if w.delayInMS > 0 {
delay = fmt.Sprintf(" with a %dms delay", w.delayInMS)
}
return fmt.Sprintf("%s[watch] build finished, watching for changes%s...%s\n", colors.Dim, delay, colors.Reset)
})
}
w.data = data
w.itemsToScan = w.itemsToScan[:0] // Reuse memory
// Remove any recent items that weren't a part of the latest build
end := 0
for _, path := range w.recentItems {
if data.Paths[path] != nil {
w.recentItems[end] = path
end++
}
}
w.recentItems = w.recentItems[:end]
}
func (w *watcher) start() {
w.stopWaitGroup.Add(1)
go func() {
// Note: Do not change these log messages without a breaking version change.
// People want to run regexes over esbuild's stderr stream to look for these
// messages instead of using esbuild's API.
for atomic.LoadInt32(&w.shouldStop) == 0 {
// Sleep for the watch interval
time.Sleep(watchIntervalSleep)
// Rebuild if we're dirty
if absPath := w.tryToFindDirtyPath(); absPath != "" {
// Optionally wait before rebuilding
if w.delayInMS > 0 {
time.Sleep(w.delayInMS * time.Millisecond)
}
if w.shouldLog {
logger.PrintTextWithColor(os.Stderr, w.useColor, func(colors logger.Colors) string {
prettyPaths := resolver.MakePrettyPaths(w.fs, logger.Path{Text: absPath, Namespace: "file"})
return fmt.Sprintf("%s[watch] build started (change: %q)%s\n",
colors.Dim, prettyPaths.Select(w.pathStyle), colors.Reset)
})
}
// Run the build
w.setWatchData(w.rebuild())
if w.shouldLog {
logger.PrintTextWithColor(os.Stderr, w.useColor, func(colors logger.Colors) string {
return fmt.Sprintf("%s[watch] build finished%s\n", colors.Dim, colors.Reset)
})
}
}
}
w.stopWaitGroup.Done()
}()
}
func (w *watcher) stop() {
atomic.StoreInt32(&w.shouldStop, 1)
w.stopWaitGroup.Wait()
}
func (w *watcher) tryToFindDirtyPath() string {
defer w.mutex.Unlock()
w.mutex.Lock()
// If we ran out of items to scan, fill the items back up in a random order
if len(w.itemsToScan) == 0 {
items := w.itemsToScan[:0] // Reuse memory
for path := range w.data.Paths {
items = append(items, path)
}
rand.Seed(time.Now().UnixNano())
for i := int32(len(items) - 1); i > 0; i-- { // Fisher-Yates shuffle
j := rand.Int31n(i + 1)
items[i], items[j] = items[j], items[i]
}
w.itemsToScan = items
// Determine how many items to check every iteration, rounded up
perIter := (len(items) + maxIntervalsBeforeUpdate - 1) / maxIntervalsBeforeUpdate
if perIter < minItemCountPerIter {
perIter = minItemCountPerIter
}
w.itemsPerIteration = perIter
}
// Always check all recent items every iteration
for i, path := range w.recentItems {
if dirtyPath := w.data.Paths[path](); dirtyPath != "" {
// Move this path to the back of the list (i.e. the "most recent" position)
copy(w.recentItems[i:], w.recentItems[i+1:])
w.recentItems[len(w.recentItems)-1] = path
return dirtyPath
}
}
// Check a constant number of items every iteration
remainingCount := len(w.itemsToScan) - w.itemsPerIteration
if remainingCount < 0 {
remainingCount = 0
}
toCheck, remaining := w.itemsToScan[remainingCount:], w.itemsToScan[:remainingCount]
w.itemsToScan = remaining
// Check if any of the entries in this iteration have been modified
for _, path := range toCheck {
if dirtyPath := w.data.Paths[path](); dirtyPath != "" {
// Mark this item as recent by adding it to the back of the list
w.recentItems = append(w.recentItems, path)
if len(w.recentItems) > maxRecentItemCount {
// Remove items from the front of the list when we hit the limit
copy(w.recentItems, w.recentItems[1:])
w.recentItems = w.recentItems[:maxRecentItemCount]
}
return dirtyPath
}
}
return ""
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/favicon.go | pkg/api/favicon.go | package api
// This is the "favicon.ico" file used by esbuild's built-in development server
var favicon_ico_gz = []byte{
0x1F, 0x8B, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x03, 0x63, 0x60, 0x60, 0x64, 0x60, 0x62,
0x10, 0x10, 0x60, 0x00, 0xD2, 0x0A, 0x0C, 0x19, 0x2C, 0x0C, 0x0C, 0x6A, 0x0C, 0x0C, 0x0C, 0x0A,
0x0A, 0x10, 0xBE, 0x86, 0x20, 0x03, 0x43, 0x1F, 0x50, 0x4C, 0x03, 0x28, 0x26, 0x00, 0x12, 0x67,
0x80, 0x88, 0x13, 0x04, 0xE7, 0xFF, 0xFF, 0x27, 0x09, 0xD3, 0x4A, 0xFF, 0xC9, 0xEF, 0xFF, 0x59,
0x97, 0x9F, 0x81, 0xAB, 0x63, 0x5B, 0x72, 0xF2, 0x3F, 0xC3, 0x99, 0xDF, 0x44, 0xEB, 0xE7, 0x29,
0xE9, 0xF9, 0x2F, 0xAE, 0xA8, 0x02, 0xD6, 0xC7, 0x74, 0xE0, 0xCD, 0x7F, 0x09, 0x45, 0xE5, 0xFF,
0x02, 0xA1, 0xA9, 0x98, 0x66, 0xE0, 0xB1, 0x5F, 0xC8, 0x27, 0x12, 0x6E, 0x06, 0xFB, 0xEC, 0x7D,
0xFF, 0x25, 0xE4, 0x14, 0x30, 0xCD, 0xC0, 0xE7, 0x7F, 0xA0, 0x19, 0xC2, 0x0E, 0xDE, 0x60, 0x33,
0x58, 0x36, 0x5C, 0xFF, 0xCF, 0x31, 0x79, 0xF3, 0x7F, 0x49, 0x49, 0xC9, 0xFF, 0xFC, 0xB1, 0xF9,
0x44, 0xE9, 0x47, 0xB6, 0x93, 0xF1, 0xD8, 0x17, 0x14, 0xF7, 0x10, 0xD2, 0x4F, 0x94, 0x5E, 0x02,
0xFA, 0x05, 0x42, 0x53, 0xC0, 0x7E, 0x85, 0xE9, 0xC7, 0xD0, 0x4B, 0xCB, 0xF8, 0xA7, 0x85, 0xFE,
0x9A, 0x99, 0x68, 0x78, 0x56, 0x3D, 0xF9, 0xFA, 0xB1, 0xE8, 0x25, 0x5A, 0x3F, 0x0E, 0xBD, 0x44,
0xE9, 0xC7, 0xA3, 0x97, 0xA0, 0x7E, 0x02, 0x7A, 0x29, 0x00, 0x1A, 0xD0, 0x32, 0xC6, 0x81, 0xD8,
0x72, 0x86, 0xDC, 0xF8, 0xA6, 0x34, 0x7D, 0x8C, 0xDA, 0x3F, 0x6A, 0x3F, 0x01, 0xFB, 0x99, 0x77,
0x3C, 0xFA, 0x2F, 0xEC, 0x1E, 0x0C, 0xA6, 0xD1, 0xE5, 0x58, 0xD7, 0x5C, 0x06, 0xCB, 0x31, 0x1D,
0xF9, 0x48, 0x13, 0xFB, 0x41, 0x76, 0x8A, 0x19, 0x98, 0x81, 0xEB, 0x09, 0x11, 0x1B, 0x57, 0x14,
0x7B, 0x40, 0x76, 0x4B, 0xA8, 0xA8, 0x63, 0x95, 0xA3, 0x85, 0xFD, 0xE8, 0xF6, 0xE0, 0x93, 0xA3,
0x76, 0xF8, 0x53, 0xCD, 0x0D, 0x64, 0xA6, 0x3F, 0xAA, 0xB9, 0x81, 0x82, 0xF4, 0x4F, 0x15, 0x37,
0x50, 0x98, 0xFF, 0xD8, 0x16, 0x1E, 0x85, 0xDB, 0x01, 0xC2, 0x02, 0x71, 0x05, 0x70, 0x39, 0x8E,
0x69, 0xDB, 0x71, 0xCA, 0x0D, 0x75, 0xFF, 0x0F, 0x64, 0xFC, 0x0F, 0x64, 0xFA, 0x1F, 0xE8, 0xFC,
0x3F, 0xD0, 0xE5, 0xDF, 0x40, 0x97, 0xFF, 0xA3, 0xF5, 0xEF, 0xA8, 0xFD, 0x44, 0x61, 0x8C, 0xBE,
0x0C, 0x36, 0x3C, 0xA7, 0x7E, 0xE0, 0xEC, 0x9F, 0x53, 0x4F, 0xD3, 0xF6, 0x3F, 0x35, 0xEC, 0xA6,
0x89, 0xFD, 0x73, 0x48, 0xEB, 0x63, 0x51, 0xD5, 0xFE, 0x39, 0xA4, 0xF7, 0xEF, 0xA8, 0x66, 0xFF,
0x1C, 0xF2, 0xFA, 0x96, 0x54, 0xB1, 0x7F, 0x0E, 0xF9, 0xFD, 0x5A, 0x8A, 0xED, 0x9F, 0x43, 0x59,
0x9F, 0x9A, 0x22, 0xFB, 0xE7, 0x50, 0xDE, 0x9F, 0x27, 0xDB, 0xFE, 0x39, 0x34, 0x1B, 0x4B, 0x18,
0xCE, 0x00, 0x00, 0xDA, 0xEB, 0x61, 0xFD, 0xB6, 0x15, 0x00, 0x00,
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/api/api_impl.go | pkg/api/api_impl.go | package api
// This file implements most of the API. This includes the "Build", "Transform",
// "FormatMessages", and "AnalyzeMetafile" functions.
import (
"bytes"
"encoding/base64"
"encoding/binary"
"errors"
"fmt"
"io/ioutil"
"math"
"os"
"path"
"regexp"
"sort"
"strconv"
"strings"
"sync"
"time"
"unicode/utf8"
"github.com/evanw/esbuild/internal/api_helpers"
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/bundler"
"github.com/evanw/esbuild/internal/cache"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/graph"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_parser"
"github.com/evanw/esbuild/internal/linker"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/resolver"
"github.com/evanw/esbuild/internal/xxhash"
)
func validatePathTemplate(template string) []config.PathTemplate {
if template == "" {
return nil
}
template = "./" + strings.ReplaceAll(template, "\\", "/")
parts := make([]config.PathTemplate, 0, 4)
search := 0
// Split by placeholders
for search < len(template) {
// Jump to the next "["
if found := strings.IndexByte(template[search:], '['); found == -1 {
break
} else {
search += found
}
head, tail := template[:search], template[search:]
placeholder := config.NoPlaceholder
// Check for a placeholder
switch {
case strings.HasPrefix(tail, "[dir]"):
placeholder = config.DirPlaceholder
search += len("[dir]")
case strings.HasPrefix(tail, "[name]"):
placeholder = config.NamePlaceholder
search += len("[name]")
case strings.HasPrefix(tail, "[hash]"):
placeholder = config.HashPlaceholder
search += len("[hash]")
case strings.HasPrefix(tail, "[ext]"):
placeholder = config.ExtPlaceholder
search += len("[ext]")
default:
// Skip past the "[" so we don't find it again
search++
continue
}
// Add a part for everything up to and including this placeholder
parts = append(parts, config.PathTemplate{
Data: head,
Placeholder: placeholder,
})
// Reset the search after this placeholder
template = template[search:]
search = 0
}
// Append any remaining data as a part without a placeholder
if search < len(template) {
parts = append(parts, config.PathTemplate{
Data: template,
Placeholder: config.NoPlaceholder,
})
}
return parts
}
func validatePlatform(value Platform) config.Platform {
switch value {
case PlatformDefault, PlatformBrowser:
return config.PlatformBrowser
case PlatformNode:
return config.PlatformNode
case PlatformNeutral:
return config.PlatformNeutral
default:
panic("Invalid platform")
}
}
func validateFormat(value Format) config.Format {
switch value {
case FormatDefault:
return config.FormatPreserve
case FormatIIFE:
return config.FormatIIFE
case FormatCommonJS:
return config.FormatCommonJS
case FormatESModule:
return config.FormatESModule
default:
panic("Invalid format")
}
}
func validateSourceMap(value SourceMap) config.SourceMap {
switch value {
case SourceMapNone:
return config.SourceMapNone
case SourceMapLinked:
return config.SourceMapLinkedWithComment
case SourceMapInline:
return config.SourceMapInline
case SourceMapExternal:
return config.SourceMapExternalWithoutComment
case SourceMapInlineAndExternal:
return config.SourceMapInlineAndExternal
default:
panic("Invalid source map")
}
}
func validateLegalComments(value LegalComments, bundle bool) config.LegalComments {
switch value {
case LegalCommentsDefault:
if bundle {
return config.LegalCommentsEndOfFile
} else {
return config.LegalCommentsInline
}
case LegalCommentsNone:
return config.LegalCommentsNone
case LegalCommentsInline:
return config.LegalCommentsInline
case LegalCommentsEndOfFile:
return config.LegalCommentsEndOfFile
case LegalCommentsLinked:
return config.LegalCommentsLinkedWithComment
case LegalCommentsExternal:
return config.LegalCommentsExternalWithoutComment
default:
panic("Invalid source map")
}
}
func validateColor(value StderrColor) logger.UseColor {
switch value {
case ColorIfTerminal:
return logger.ColorIfTerminal
case ColorNever:
return logger.ColorNever
case ColorAlways:
return logger.ColorAlways
default:
panic("Invalid color")
}
}
func validateLogLevel(value LogLevel) logger.LogLevel {
switch value {
case LogLevelVerbose:
return logger.LevelVerbose
case LogLevelDebug:
return logger.LevelDebug
case LogLevelInfo:
return logger.LevelInfo
case LogLevelWarning:
return logger.LevelWarning
case LogLevelError:
return logger.LevelError
case LogLevelSilent:
return logger.LevelSilent
default:
panic("Invalid log level")
}
}
func validateASCIIOnly(value Charset) bool {
switch value {
case CharsetDefault, CharsetASCII:
return true
case CharsetUTF8:
return false
default:
panic("Invalid charset")
}
}
func validateExternalPackages(value Packages) bool {
switch value {
case PackagesDefault, PackagesBundle:
return false
case PackagesExternal:
return true
default:
panic("Invalid packages")
}
}
func validateTreeShaking(value TreeShaking, bundle bool, format Format) bool {
switch value {
case TreeShakingDefault:
// If we're in an IIFE then there's no way to concatenate additional code
// to the end of our output so we assume tree shaking is safe. And when
// bundling we assume that tree shaking is safe because if you want to add
// code to the bundle, you should be doing that by including it in the
// bundle instead of concatenating it afterward, so we also assume tree
// shaking is safe then. Otherwise we assume tree shaking is not safe.
return bundle || format == FormatIIFE
case TreeShakingFalse:
return false
case TreeShakingTrue:
return true
default:
panic("Invalid tree shaking")
}
}
func validateLoader(value Loader) config.Loader {
switch value {
case LoaderBase64:
return config.LoaderBase64
case LoaderBinary:
return config.LoaderBinary
case LoaderCopy:
return config.LoaderCopy
case LoaderCSS:
return config.LoaderCSS
case LoaderDataURL:
return config.LoaderDataURL
case LoaderDefault:
return config.LoaderDefault
case LoaderEmpty:
return config.LoaderEmpty
case LoaderFile:
return config.LoaderFile
case LoaderGlobalCSS:
return config.LoaderGlobalCSS
case LoaderJS:
return config.LoaderJS
case LoaderJSON:
return config.LoaderJSON
case LoaderJSX:
return config.LoaderJSX
case LoaderLocalCSS:
return config.LoaderLocalCSS
case LoaderNone:
return config.LoaderNone
case LoaderText:
return config.LoaderText
case LoaderTS:
return config.LoaderTS
case LoaderTSX:
return config.LoaderTSX
default:
panic("Invalid loader")
}
}
func extractPathStyle(absPaths AbsPaths, flag AbsPaths) logger.PathStyle {
if (absPaths & flag) != 0 {
return logger.AbsPath
} else {
return logger.RelPath
}
}
var versionRegex = regexp.MustCompile(`^([0-9]+)(?:\.([0-9]+))?(?:\.([0-9]+))?(-[A-Za-z0-9]+(?:\.[A-Za-z0-9]+)*)?$`)
func validateFeatures(log logger.Log, target Target, engines []Engine) (compat.JSFeature, compat.CSSFeature, map[css_ast.D]compat.CSSPrefix, string) {
if target == DefaultTarget && len(engines) == 0 {
return 0, 0, nil, ""
}
constraints := make(map[compat.Engine]compat.Semver)
targets := make([]string, 0, 1+len(engines))
switch target {
case ES5:
constraints[compat.ES] = compat.Semver{Parts: []int{5}}
case ES2015:
constraints[compat.ES] = compat.Semver{Parts: []int{2015}}
case ES2016:
constraints[compat.ES] = compat.Semver{Parts: []int{2016}}
case ES2017:
constraints[compat.ES] = compat.Semver{Parts: []int{2017}}
case ES2018:
constraints[compat.ES] = compat.Semver{Parts: []int{2018}}
case ES2019:
constraints[compat.ES] = compat.Semver{Parts: []int{2019}}
case ES2020:
constraints[compat.ES] = compat.Semver{Parts: []int{2020}}
case ES2021:
constraints[compat.ES] = compat.Semver{Parts: []int{2021}}
case ES2022:
constraints[compat.ES] = compat.Semver{Parts: []int{2022}}
case ES2023:
constraints[compat.ES] = compat.Semver{Parts: []int{2023}}
case ES2024:
constraints[compat.ES] = compat.Semver{Parts: []int{2024}}
case ESNext, DefaultTarget:
default:
panic("Invalid target")
}
for _, engine := range engines {
if match := versionRegex.FindStringSubmatch(engine.Version); match != nil {
if major, err := strconv.Atoi(match[1]); err == nil {
parts := []int{major}
if minor, err := strconv.Atoi(match[2]); err == nil {
parts = append(parts, minor)
if patch, err := strconv.Atoi(match[3]); err == nil {
parts = append(parts, patch)
}
}
constraints[convertEngineName(engine.Name)] = compat.Semver{
Parts: parts,
PreRelease: match[4],
}
continue
}
}
text := "All version numbers passed to esbuild must be in the format \"X\", \"X.Y\", or \"X.Y.Z\" where X, Y, and Z are non-negative integers."
log.AddErrorWithNotes(nil, logger.Range{}, fmt.Sprintf("Invalid version: %q", engine.Version),
[]logger.MsgData{{Text: text}})
}
for engine, version := range constraints {
targets = append(targets, engine.String()+version.String())
}
if target == ESNext {
targets = append(targets, "esnext")
}
sort.Strings(targets)
targetEnv := helpers.StringArrayToQuotedCommaSeparatedString(targets)
return compat.UnsupportedJSFeatures(constraints), compat.UnsupportedCSSFeatures(constraints), compat.CSSPrefixData(constraints), targetEnv
}
func validateSupported(log logger.Log, supported map[string]bool) (
jsFeature compat.JSFeature,
jsMask compat.JSFeature,
cssFeature compat.CSSFeature,
cssMask compat.CSSFeature,
) {
for k, v := range supported {
if js, ok := compat.StringToJSFeature[k]; ok {
jsMask |= js
if !v {
jsFeature |= js
}
} else if css, ok := compat.StringToCSSFeature[k]; ok {
cssMask |= css
if !v {
cssFeature |= css
}
} else {
log.AddError(nil, logger.Range{}, fmt.Sprintf("%q is not a valid feature name for the \"supported\" setting", k))
}
}
return
}
func validateGlobalName(log logger.Log, text string, path string) []string {
if text != "" {
source := logger.Source{
KeyPath: logger.Path{Text: path},
PrettyPaths: logger.PrettyPaths{Abs: path, Rel: path},
Contents: text,
}
if result, ok := js_parser.ParseGlobalName(log, source); ok {
return result
}
}
return nil
}
func validateRegex(log logger.Log, what string, value string) *regexp.Regexp {
if value == "" {
return nil
}
regex, err := regexp.Compile(value)
if err != nil {
log.AddError(nil, logger.Range{},
fmt.Sprintf("The %q setting is not a valid Go regular expression: %s", what, value))
return nil
}
return regex
}
func validateExternals(log logger.Log, fs fs.FS, paths []string) config.ExternalSettings {
result := config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: make(map[string]bool)},
PostResolve: config.ExternalMatchers{Exact: make(map[string]bool)},
}
for _, path := range paths {
if index := strings.IndexByte(path, '*'); index != -1 {
// Wildcard behavior
if strings.ContainsRune(path[index+1:], '*') {
log.AddError(nil, logger.Range{}, fmt.Sprintf("External path %q cannot have more than one \"*\" wildcard", path))
} else {
result.PreResolve.Patterns = append(result.PreResolve.Patterns, config.WildcardPattern{Prefix: path[:index], Suffix: path[index+1:]})
if !resolver.IsPackagePath(path) {
if absPath := validatePath(log, fs, path, "external path"); absPath != "" {
if absIndex := strings.IndexByte(absPath, '*'); absIndex != -1 && !strings.ContainsRune(absPath[absIndex+1:], '*') {
result.PostResolve.Patterns = append(result.PostResolve.Patterns, config.WildcardPattern{Prefix: absPath[:absIndex], Suffix: absPath[absIndex+1:]})
}
}
}
}
} else {
// Non-wildcard behavior
result.PreResolve.Exact[path] = true
if resolver.IsPackagePath(path) {
result.PreResolve.Patterns = append(result.PreResolve.Patterns, config.WildcardPattern{Prefix: path + "/"})
} else if absPath := validatePath(log, fs, path, "external path"); absPath != "" {
result.PostResolve.Exact[absPath] = true
}
}
}
return result
}
func validateAlias(log logger.Log, fs fs.FS, alias map[string]string) map[string]string {
valid := make(map[string]string, len(alias))
for old, new := range alias {
if new == "" {
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid alias substitution: %q", new))
continue
}
// Valid alias names:
// "foo"
// "foo/bar"
// "@foo"
// "@foo/bar"
// "@foo/bar/baz"
//
// Invalid alias names:
// "./foo"
// "../foo"
// "/foo"
// "C:\\foo"
// ".foo"
// "foo/"
// "@foo/"
// "foo/../bar"
//
if !strings.HasPrefix(old, ".") && !strings.HasPrefix(old, "/") && !fs.IsAbs(old) && path.Clean(strings.ReplaceAll(old, "\\", "/")) == old {
valid[old] = new
continue
}
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid alias name: %q", old))
}
return valid
}
func isValidExtension(ext string) bool {
return len(ext) >= 2 && ext[0] == '.' && ext[len(ext)-1] != '.'
}
func validateResolveExtensions(log logger.Log, order []string) []string {
if order == nil {
return []string{".tsx", ".ts", ".jsx", ".js", ".css", ".json"}
}
for _, ext := range order {
if !isValidExtension(ext) {
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid file extension: %q", ext))
}
}
return order
}
func validateLoaders(log logger.Log, loaders map[string]Loader) map[string]config.Loader {
result := bundler.DefaultExtensionToLoaderMap()
for ext, loader := range loaders {
if ext != "" && !isValidExtension(ext) {
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid file extension: %q", ext))
}
result[ext] = validateLoader(loader)
}
return result
}
func validateJSXExpr(log logger.Log, text string, name string) config.DefineExpr {
if text != "" {
if expr, _ := js_parser.ParseDefineExpr(text); len(expr.Parts) > 0 || (name == "fragment" && expr.Constant != nil) {
return expr
}
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid JSX %s: %q", name, text))
}
return config.DefineExpr{}
}
// This returns an arbitrary but unique key for each unique array of strings
func mapKeyForDefine(parts []string) string {
var sb strings.Builder
var n [4]byte
for _, part := range parts {
binary.LittleEndian.PutUint32(n[:], uint32(len(part)))
sb.Write(n[:])
sb.WriteString(part)
}
return sb.String()
}
func validateDefines(
log logger.Log,
defines map[string]string,
pureFns []string,
platform config.Platform,
isBuildAPI bool,
minify bool,
drop Drop,
) (*config.ProcessedDefines, []config.InjectedDefine) {
// Sort injected defines for determinism, since the imports will be injected
// into every file in the order that we return them from this function
sortedKeys := make([]string, 0, len(defines))
for key := range defines {
sortedKeys = append(sortedKeys, key)
}
sort.Strings(sortedKeys)
rawDefines := make(map[string]config.DefineData)
nodeEnvParts := []string{"process", "env", "NODE_ENV"}
nodeEnvMapKey := mapKeyForDefine(nodeEnvParts)
var injectedDefines []config.InjectedDefine
for _, key := range sortedKeys {
value := defines[key]
keyParts := validateGlobalName(log, key, "(define name)")
if keyParts == nil {
continue
}
mapKey := mapKeyForDefine(keyParts)
// Parse the value
defineExpr, injectExpr := js_parser.ParseDefineExpr(value)
// Define simple expressions
if defineExpr.Constant != nil || len(defineExpr.Parts) > 0 {
rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &defineExpr}
// Try to be helpful for common mistakes
if len(defineExpr.Parts) == 1 && mapKey == nodeEnvMapKey {
data := logger.MsgData{
Text: fmt.Sprintf("%q is defined as an identifier instead of a string (surround %q with quotes to get a string)", key, value),
}
part := defineExpr.Parts[0]
switch logger.API {
case logger.CLIAPI:
data.Location = &logger.MsgLocation{
File: logger.PrettyPaths{Abs: "<cli>", Rel: "<cli>"},
Line: 1,
Column: 30,
Length: len(part),
LineText: fmt.Sprintf("--define:process.env.NODE_ENV=%s", part),
Suggestion: fmt.Sprintf("\\\"%s\\\"", part),
}
case logger.JSAPI:
data.Location = &logger.MsgLocation{
File: logger.PrettyPaths{Abs: "<js>", Rel: "<js>"},
Line: 1,
Column: 34,
Length: len(part) + 2,
LineText: fmt.Sprintf("define: { 'process.env.NODE_ENV': '%s' }", part),
Suggestion: fmt.Sprintf("'\"%s\"'", part),
}
case logger.GoAPI:
data.Location = &logger.MsgLocation{
File: logger.PrettyPaths{Abs: "<go>", Rel: "<go>"},
Line: 1,
Column: 50,
Length: len(part) + 2,
LineText: fmt.Sprintf("Define: map[string]string{\"process.env.NODE_ENV\": \"%s\"}", part),
Suggestion: fmt.Sprintf("\"\\\"%s\\\"\"", part),
}
}
log.AddMsgID(logger.MsgID_JS_SuspiciousDefine, logger.Msg{
Kind: logger.Warning,
Data: data,
})
}
continue
}
// Inject complex expressions
if injectExpr != nil {
index := ast.MakeIndex32(uint32(len(injectedDefines)))
injectedDefines = append(injectedDefines, config.InjectedDefine{
Source: logger.Source{Contents: value},
Data: injectExpr,
Name: key,
})
rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &config.DefineExpr{InjectedDefineIndex: index}}
continue
}
// Anything else is unsupported
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid define value (must be an entity name or JS literal): %s", value))
}
// If we're bundling for the browser, add a special-cased define for
// "process.env.NODE_ENV" that is "development" when not minifying and
// "production" when minifying. This is a convention from the React world
// that must be handled to avoid all React code crashing instantly. This
// is only done if it's not already defined so that you can override it if
// necessary.
if isBuildAPI && platform == config.PlatformBrowser {
if _, process := rawDefines[mapKeyForDefine([]string{"process"})]; !process {
if _, processEnv := rawDefines[mapKeyForDefine([]string{"process.env"})]; !processEnv {
if _, processEnvNodeEnv := rawDefines[nodeEnvMapKey]; !processEnvNodeEnv {
var value []uint16
if minify {
value = helpers.StringToUTF16("production")
} else {
value = helpers.StringToUTF16("development")
}
rawDefines[nodeEnvMapKey] = config.DefineData{KeyParts: nodeEnvParts, DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: value}}}
}
}
}
}
// If we're dropping all console API calls, replace each one with undefined
if (drop & DropConsole) != 0 {
consoleParts := []string{"console"}
consoleMapKey := mapKeyForDefine(consoleParts)
define := rawDefines[consoleMapKey]
define.KeyParts = consoleParts
define.Flags |= config.MethodCallsMustBeReplacedWithUndefined
rawDefines[consoleMapKey] = define
}
for _, key := range pureFns {
keyParts := validateGlobalName(log, key, "(pure name)")
if keyParts == nil {
continue
}
mapKey := mapKeyForDefine(keyParts)
// Merge with any previously-specified defines
define := rawDefines[mapKey]
define.KeyParts = keyParts
define.Flags |= config.CallCanBeUnwrappedIfUnused
rawDefines[mapKey] = define
}
// Processing defines is expensive. Process them once here so the same object
// can be shared between all parsers we create using these arguments.
definesArray := make([]config.DefineData, 0, len(rawDefines))
for _, define := range rawDefines {
definesArray = append(definesArray, define)
}
processed := config.ProcessDefines(definesArray)
return &processed, injectedDefines
}
func validateLogOverrides(input map[string]LogLevel) (output map[logger.MsgID]logger.LogLevel) {
output = make(map[uint8]logger.LogLevel)
for k, v := range input {
logger.StringToMsgIDs(k, validateLogLevel(v), output)
}
return
}
func validatePath(log logger.Log, fs fs.FS, relPath string, pathKind string) string {
if relPath == "" {
return ""
}
absPath, ok := fs.Abs(relPath)
if !ok {
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid %s: %s", pathKind, relPath))
}
return absPath
}
func validateOutputExtensions(log logger.Log, outExtensions map[string]string) (js string, css string) {
for key, value := range outExtensions {
if !isValidExtension(value) {
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid output extension: %q", value))
}
switch key {
case ".js":
js = value
case ".css":
css = value
default:
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid output extension: %q (valid: .css, .js)", key))
}
}
return
}
func validateBannerOrFooter(log logger.Log, name string, values map[string]string) (js string, css string) {
for key, value := range values {
switch key {
case "js":
js = value
case "css":
css = value
default:
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid %s file type: %q (valid: css, js)", name, key))
}
}
return
}
func validateKeepNames(log logger.Log, options *config.Options) {
if options.KeepNames && options.UnsupportedJSFeatures.Has(compat.FunctionNameConfigurable) {
where := config.PrettyPrintTargetEnvironment(options.OriginalTargetEnv, options.UnsupportedJSFeatureOverridesMask)
log.AddErrorWithNotes(nil, logger.Range{}, fmt.Sprintf("The \"keep names\" setting cannot be used with %s", where), []logger.MsgData{{
Text: "In this environment, the \"Function.prototype.name\" property is not configurable and assigning to it will throw an error. " +
"Either use a newer target environment or disable the \"keep names\" setting."}})
}
}
func convertLocationToPublic(loc *logger.MsgLocation, pathStyle logger.PathStyle) *Location {
if loc != nil {
return &Location{
File: loc.File.Select(pathStyle),
Namespace: loc.Namespace,
Line: loc.Line,
Column: loc.Column,
Length: loc.Length,
LineText: loc.LineText,
Suggestion: loc.Suggestion,
}
}
return nil
}
func convertMessagesToPublic(kind logger.MsgKind, msgs []logger.Msg, pathStyle logger.PathStyle) []Message {
var filtered []Message
for _, msg := range msgs {
if msg.Kind == kind {
var notes []Note
for _, note := range msg.Notes {
notes = append(notes, Note{
Text: note.Text,
Location: convertLocationToPublic(note.Location, pathStyle),
})
}
filtered = append(filtered, Message{
ID: logger.MsgIDToString(msg.ID),
PluginName: msg.PluginName,
Text: msg.Data.Text,
Location: convertLocationToPublic(msg.Data.Location, pathStyle),
Notes: notes,
Detail: msg.Data.UserDetail,
})
}
}
return filtered
}
func convertLocationToInternal(loc *Location) *logger.MsgLocation {
if loc != nil {
namespace := loc.Namespace
if namespace == "" {
namespace = "file"
}
return &logger.MsgLocation{
File: logger.PrettyPaths{Abs: loc.File, Rel: loc.File},
Namespace: namespace,
Line: loc.Line,
Column: loc.Column,
Length: loc.Length,
LineText: loc.LineText,
Suggestion: loc.Suggestion,
}
}
return nil
}
func convertMessagesToInternal(msgs []logger.Msg, kind logger.MsgKind, messages []Message) []logger.Msg {
for _, message := range messages {
var notes []logger.MsgData
for _, note := range message.Notes {
notes = append(notes, logger.MsgData{
Text: note.Text,
Location: convertLocationToInternal(note.Location),
})
}
msgs = append(msgs, logger.Msg{
ID: logger.StringToMaximumMsgID(message.ID),
PluginName: message.PluginName,
Kind: kind,
Data: logger.MsgData{
Text: message.Text,
Location: convertLocationToInternal(message.Location),
UserDetail: message.Detail,
},
Notes: notes,
})
}
return msgs
}
func convertErrorsAndWarningsToInternal(errors []Message, warnings []Message) []logger.Msg {
if len(errors)+len(warnings) > 0 {
msgs := make(logger.SortableMsgs, 0, len(errors)+len(warnings))
msgs = convertMessagesToInternal(msgs, logger.Error, errors)
msgs = convertMessagesToInternal(msgs, logger.Warning, warnings)
sort.Stable(msgs)
return msgs
}
return nil
}
func cloneMangleCache(log logger.Log, mangleCache map[string]interface{}) map[string]interface{} {
if mangleCache == nil {
return nil
}
clone := make(map[string]interface{}, len(mangleCache))
for k, v := range mangleCache {
if v == "__proto__" {
// This could cause problems for our binary serialization protocol. It's
// also unnecessary because we already avoid mangling this property name.
log.AddError(nil, logger.Range{},
fmt.Sprintf("Invalid identifier name %q in mangle cache", k))
} else if _, ok := v.(string); ok || v == false {
clone[k] = v
} else {
log.AddError(nil, logger.Range{},
fmt.Sprintf("Expected %q in mangle cache to map to either a string or false", k))
}
}
return clone
}
////////////////////////////////////////////////////////////////////////////////
// Build API
func contextImpl(buildOpts BuildOptions) (*internalContext, []Message) {
logOptions := logger.OutputOptions{
IncludeSource: true,
MessageLimit: buildOpts.LogLimit,
Color: validateColor(buildOpts.Color),
LogLevel: validateLogLevel(buildOpts.LogLevel),
PathStyle: extractPathStyle(buildOpts.AbsPaths, LogAbsPath),
Overrides: validateLogOverrides(buildOpts.LogOverride),
}
// Validate that the current working directory is an absolute path
absWorkingDir := buildOpts.AbsWorkingDir
realFS, err := fs.RealFS(fs.RealFSOptions{
AbsWorkingDir: absWorkingDir,
// This is a long-lived file system object so do not cache calls to
// ReadDirectory() (they are normally cached for the duration of a build
// for performance).
DoNotCache: true,
})
if err != nil {
log := logger.NewStderrLog(logOptions)
log.AddError(nil, logger.Range{}, err.Error())
return nil, convertMessagesToPublic(logger.Error, log.Done(), logOptions.PathStyle)
}
// Do not re-evaluate plugins when rebuilding. Also make sure the working
// directory doesn't change, since breaking that invariant would break the
// validation that we just did above.
caches := cache.MakeCacheSet()
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, logOptions.Overrides)
onEndCallbacks, onDisposeCallbacks, finalizeBuildOptions := loadPlugins(&buildOpts, realFS, log, caches)
options, entryPoints := validateBuildOptions(buildOpts, log, realFS)
finalizeBuildOptions(&options)
if buildOpts.AbsWorkingDir != absWorkingDir {
panic("Mutating \"AbsWorkingDir\" is not allowed")
}
// If we have errors already, then refuse to build any further. This only
// happens when the build options themselves contain validation errors.
msgs := log.Done()
if log.HasErrors() {
if logOptions.LogLevel < logger.LevelSilent {
// Print all deferred validation log messages to stderr. We defer all log
// messages that are generated above because warnings are re-printed for
// every rebuild and we don't want to double-print these warnings for the
// first build.
stderr := logger.NewStderrLog(logOptions)
for _, msg := range msgs {
stderr.AddMsg(msg)
}
stderr.Done()
}
return nil, convertMessagesToPublic(logger.Error, msgs, options.LogPathStyle)
}
args := rebuildArgs{
caches: caches,
onEndCallbacks: onEndCallbacks,
onDisposeCallbacks: onDisposeCallbacks,
logOptions: logOptions,
logWarnings: msgs,
entryPoints: entryPoints,
options: options,
mangleCache: buildOpts.MangleCache,
absWorkingDir: absWorkingDir,
write: buildOpts.Write,
}
return &internalContext{
args: args,
realFS: realFS,
absWorkingDir: absWorkingDir,
}, nil
}
type buildInProgress struct {
state rebuildState
waitGroup sync.WaitGroup
cancel config.CancelFlag
}
type internalContext struct {
mutex sync.Mutex
args rebuildArgs
activeBuild *buildInProgress
recentBuild *BuildResult
realFS fs.FS
absWorkingDir string
watcher *watcher
handler *apiHandler
didDispose bool
// This saves just enough information to be able to compute a useful diff
// between two sets of output files. That way we don't need to hold both
// sets of output files in memory at once to compute a diff.
latestHashes map[string]string
}
func (ctx *internalContext) rebuild() rebuildState {
ctx.mutex.Lock()
// Ignore disposed contexts
if ctx.didDispose {
ctx.mutex.Unlock()
return rebuildState{}
}
// If there's already an active build, just return that build's result
if build := ctx.activeBuild; build != nil {
ctx.mutex.Unlock()
build.waitGroup.Wait()
return build.state
}
// Otherwise, start a new build
build := &buildInProgress{}
build.waitGroup.Add(1)
ctx.activeBuild = build
args := ctx.args
watcher := ctx.watcher
handler := ctx.handler
oldHashes := ctx.latestHashes
args.options.CancelFlag = &build.cancel
ctx.mutex.Unlock()
// Do the build without holding the mutex
var newHashes map[string]string
build.state, newHashes = rebuildImpl(args, oldHashes)
if handler != nil {
handler.broadcastBuildResult(build.state.result, newHashes)
}
if watcher != nil {
watcher.setWatchData(build.state.watchData)
}
// Store the recent build for the dev server
recentBuild := &build.state.result
ctx.mutex.Lock()
ctx.activeBuild = nil
ctx.recentBuild = recentBuild
ctx.latestHashes = newHashes
ctx.mutex.Unlock()
// Clear the recent build after it goes stale
go func() {
time.Sleep(250 * time.Millisecond)
ctx.mutex.Lock()
if ctx.recentBuild == recentBuild {
ctx.recentBuild = nil
}
ctx.mutex.Unlock()
}()
build.waitGroup.Done()
return build.state
}
// This is used by the dev server. The dev server does a rebuild on each
// incoming request since a) we want incoming requests to always be up to
// date and b) we don't necessarily know what output paths to even serve
// without running another build (e.g. the hashes may have changed).
//
// However, there is a small period of time where we reuse old build results
// instead of generating new ones. This is because page loads likely involve
// multiple requests, and don't want to rebuild separately for each of those
// requests.
func (ctx *internalContext) activeBuildOrRecentBuildOrRebuild() BuildResult {
ctx.mutex.Lock()
// If there's already an active build, wait for it and return that
if build := ctx.activeBuild; build != nil {
ctx.mutex.Unlock()
build.waitGroup.Wait()
return build.state.result
}
// Then try to return a recentl already-completed build
if build := ctx.recentBuild; build != nil {
ctx.mutex.Unlock()
return *build
}
// Otherwise, fall back to rebuilding
ctx.mutex.Unlock()
return ctx.Rebuild()
}
func (ctx *internalContext) Rebuild() BuildResult {
return ctx.rebuild().result
}
func (ctx *internalContext) Watch(options WatchOptions) error {
ctx.mutex.Lock()
defer ctx.mutex.Unlock()
// Ignore disposed contexts
if ctx.didDispose {
return errors.New("Cannot watch a disposed context")
}
// Don't allow starting watch mode multiple times
if ctx.watcher != nil {
return errors.New("Watch mode has already been enabled")
}
logLevel := ctx.args.logOptions.LogLevel
ctx.watcher = &watcher{
fs: ctx.realFS,
shouldLog: logLevel == logger.LevelInfo || logLevel == logger.LevelDebug || logLevel == logger.LevelVerbose,
useColor: ctx.args.logOptions.Color,
pathStyle: ctx.args.logOptions.PathStyle,
rebuild: func() fs.WatchData {
return ctx.rebuild().watchData
},
delayInMS: time.Duration(options.Delay),
}
// All subsequent builds will be watch mode builds
ctx.args.options.WatchMode = true
// Start the file watcher goroutine
ctx.watcher.start()
// Do the first watch mode build on another goroutine
go func() {
ctx.mutex.Lock()
build := ctx.activeBuild
ctx.mutex.Unlock()
// If there's an active build, then it's not a watch build. Wait for it to
// finish first so we don't just get this build when we call "Rebuild()".
if build != nil {
build.waitGroup.Wait()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/cli/cli_js_table.go | pkg/cli/cli_js_table.go | // This file was automatically generated by "js_table.ts"
package cli
import "github.com/evanw/esbuild/pkg/api"
var validEngines = map[string]api.EngineName{
"chrome": api.EngineChrome,
"deno": api.EngineDeno,
"edge": api.EngineEdge,
"firefox": api.EngineFirefox,
"hermes": api.EngineHermes,
"ie": api.EngineIE,
"ios": api.EngineIOS,
"node": api.EngineNode,
"opera": api.EngineOpera,
"rhino": api.EngineRhino,
"safari": api.EngineSafari,
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/cli/cli_impl.go | pkg/cli/cli_impl.go | package cli
// This file implements the public CLI. It's deliberately implemented using
// esbuild's public "Build", "Transform", and "AnalyzeMetafile" APIs instead of
// using internal APIs so that any tests that cover the CLI also implicitly
// cover the public API as well.
import (
"fmt"
"io/ioutil"
"net"
"os"
"sort"
"strconv"
"strings"
"github.com/evanw/esbuild/internal/cli_helpers"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/pkg/api"
)
func newBuildOptions() api.BuildOptions {
return api.BuildOptions{
Banner: make(map[string]string),
Define: make(map[string]string),
Footer: make(map[string]string),
Loader: make(map[string]api.Loader),
LogOverride: make(map[string]api.LogLevel),
Supported: make(map[string]bool),
}
}
func newTransformOptions() api.TransformOptions {
return api.TransformOptions{
Define: make(map[string]string),
LogOverride: make(map[string]api.LogLevel),
Supported: make(map[string]bool),
}
}
type parseOptionsKind uint8
const (
// This means we're parsing it for our own internal use
kindInternal parseOptionsKind = iota
// This means the result is returned through a public API
kindExternal
)
type parseOptionsExtras struct {
watch bool
watchDelay int
metafile *string
mangleCache *string
}
func isBoolFlag(arg string, flag string) bool {
if strings.HasPrefix(arg, flag) {
remainder := arg[len(flag):]
return len(remainder) == 0 || remainder[0] == '='
}
return false
}
func parseBoolFlag(arg string, defaultValue bool) (bool, *cli_helpers.ErrorWithNote) {
equals := strings.IndexByte(arg, '=')
if equals == -1 {
return defaultValue, nil
}
value := arg[equals+1:]
switch value {
case "false":
return false, nil
case "true":
return true, nil
}
return false, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"true\" or \"false\".",
)
}
func parseOptionsImpl(
osArgs []string,
buildOpts *api.BuildOptions,
transformOpts *api.TransformOptions,
kind parseOptionsKind,
) (extras parseOptionsExtras, err *cli_helpers.ErrorWithNote) {
hasBareSourceMapFlag := false
// Parse the arguments now that we know what we're parsing
for _, arg := range osArgs {
switch {
case isBoolFlag(arg, "--bundle") && buildOpts != nil:
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
buildOpts.Bundle = value
}
case isBoolFlag(arg, "--preserve-symlinks") && buildOpts != nil:
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
buildOpts.PreserveSymlinks = value
}
case isBoolFlag(arg, "--splitting") && buildOpts != nil:
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
buildOpts.Splitting = value
}
case isBoolFlag(arg, "--allow-overwrite") && buildOpts != nil:
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
buildOpts.AllowOverwrite = value
}
case isBoolFlag(arg, "--watch") && buildOpts != nil:
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
extras.watch = value
}
case strings.HasPrefix(arg, "--watch-delay=") && buildOpts != nil:
value := arg[len("--watch-delay="):]
delay, err := strconv.Atoi(value)
if err != nil {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"The watch delay must be an integer.",
)
}
extras.watchDelay = delay
case isBoolFlag(arg, "--minify"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.MinifySyntax = value
buildOpts.MinifyWhitespace = value
buildOpts.MinifyIdentifiers = value
} else {
transformOpts.MinifySyntax = value
transformOpts.MinifyWhitespace = value
transformOpts.MinifyIdentifiers = value
}
case isBoolFlag(arg, "--minify-syntax"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.MinifySyntax = value
} else {
transformOpts.MinifySyntax = value
}
case isBoolFlag(arg, "--minify-whitespace"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.MinifyWhitespace = value
} else {
transformOpts.MinifyWhitespace = value
}
case isBoolFlag(arg, "--minify-identifiers"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.MinifyIdentifiers = value
} else {
transformOpts.MinifyIdentifiers = value
}
case isBoolFlag(arg, "--mangle-quoted"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
var mangleQuoted *api.MangleQuoted
if buildOpts != nil {
mangleQuoted = &buildOpts.MangleQuoted
} else {
mangleQuoted = &transformOpts.MangleQuoted
}
if value {
*mangleQuoted = api.MangleQuotedTrue
} else {
*mangleQuoted = api.MangleQuotedFalse
}
}
case strings.HasPrefix(arg, "--mangle-props="):
value := arg[len("--mangle-props="):]
if buildOpts != nil {
buildOpts.MangleProps = value
} else {
transformOpts.MangleProps = value
}
case strings.HasPrefix(arg, "--reserve-props="):
value := arg[len("--reserve-props="):]
if buildOpts != nil {
buildOpts.ReserveProps = value
} else {
transformOpts.ReserveProps = value
}
case strings.HasPrefix(arg, "--mangle-cache=") && buildOpts != nil && kind == kindInternal:
value := arg[len("--mangle-cache="):]
extras.mangleCache = &value
case strings.HasPrefix(arg, "--drop:"):
value := arg[len("--drop:"):]
switch value {
case "console":
if buildOpts != nil {
buildOpts.Drop |= api.DropConsole
} else {
transformOpts.Drop |= api.DropConsole
}
case "debugger":
if buildOpts != nil {
buildOpts.Drop |= api.DropDebugger
} else {
transformOpts.Drop |= api.DropDebugger
}
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"console\" or \"debugger\".",
)
}
case strings.HasPrefix(arg, "--drop-labels="):
if buildOpts != nil {
buildOpts.DropLabels = splitWithEmptyCheck(arg[len("--drop-labels="):], ",")
} else {
transformOpts.DropLabels = splitWithEmptyCheck(arg[len("--drop-labels="):], ",")
}
case strings.HasPrefix(arg, "--legal-comments="):
value := arg[len("--legal-comments="):]
var legalComments api.LegalComments
switch value {
case "none":
legalComments = api.LegalCommentsNone
case "inline":
legalComments = api.LegalCommentsInline
case "eof":
legalComments = api.LegalCommentsEndOfFile
case "linked":
legalComments = api.LegalCommentsLinked
case "external":
legalComments = api.LegalCommentsExternal
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"none\", \"inline\", \"eof\", \"linked\", or \"external\".",
)
}
if buildOpts != nil {
buildOpts.LegalComments = legalComments
} else {
transformOpts.LegalComments = legalComments
}
case strings.HasPrefix(arg, "--charset="):
var value *api.Charset
if buildOpts != nil {
value = &buildOpts.Charset
} else {
value = &transformOpts.Charset
}
name := arg[len("--charset="):]
switch name {
case "ascii":
*value = api.CharsetASCII
case "utf8":
*value = api.CharsetUTF8
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", name, arg),
"Valid values are \"ascii\" or \"utf8\".",
)
}
case isBoolFlag(arg, "--tree-shaking"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
var treeShaking *api.TreeShaking
if buildOpts != nil {
treeShaking = &buildOpts.TreeShaking
} else {
treeShaking = &transformOpts.TreeShaking
}
if value {
*treeShaking = api.TreeShakingTrue
} else {
*treeShaking = api.TreeShakingFalse
}
}
case isBoolFlag(arg, "--ignore-annotations"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.IgnoreAnnotations = value
} else {
transformOpts.IgnoreAnnotations = value
}
case isBoolFlag(arg, "--keep-names"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.KeepNames = value
} else {
transformOpts.KeepNames = value
}
case arg == "--sourcemap":
if buildOpts != nil {
buildOpts.Sourcemap = api.SourceMapLinked
} else {
transformOpts.Sourcemap = api.SourceMapInline
}
hasBareSourceMapFlag = true
case strings.HasPrefix(arg, "--sourcemap="):
value := arg[len("--sourcemap="):]
var sourcemap api.SourceMap
switch value {
case "linked":
sourcemap = api.SourceMapLinked
case "inline":
sourcemap = api.SourceMapInline
case "external":
sourcemap = api.SourceMapExternal
case "both":
sourcemap = api.SourceMapInlineAndExternal
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"linked\", \"inline\", \"external\", or \"both\".",
)
}
if buildOpts != nil {
buildOpts.Sourcemap = sourcemap
} else {
transformOpts.Sourcemap = sourcemap
}
hasBareSourceMapFlag = false
case strings.HasPrefix(arg, "--source-root="):
sourceRoot := arg[len("--source-root="):]
if buildOpts != nil {
buildOpts.SourceRoot = sourceRoot
} else {
transformOpts.SourceRoot = sourceRoot
}
case isBoolFlag(arg, "--sources-content"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
var sourcesContent *api.SourcesContent
if buildOpts != nil {
sourcesContent = &buildOpts.SourcesContent
} else {
sourcesContent = &transformOpts.SourcesContent
}
if value {
*sourcesContent = api.SourcesContentInclude
} else {
*sourcesContent = api.SourcesContentExclude
}
}
case strings.HasPrefix(arg, "--sourcefile="):
if buildOpts != nil {
if buildOpts.Stdin == nil {
buildOpts.Stdin = &api.StdinOptions{}
}
buildOpts.Stdin.Sourcefile = arg[len("--sourcefile="):]
} else {
transformOpts.Sourcefile = arg[len("--sourcefile="):]
}
case strings.HasPrefix(arg, "--resolve-extensions=") && buildOpts != nil:
buildOpts.ResolveExtensions = splitWithEmptyCheck(arg[len("--resolve-extensions="):], ",")
case strings.HasPrefix(arg, "--main-fields=") && buildOpts != nil:
buildOpts.MainFields = splitWithEmptyCheck(arg[len("--main-fields="):], ",")
case strings.HasPrefix(arg, "--conditions=") && buildOpts != nil:
buildOpts.Conditions = splitWithEmptyCheck(arg[len("--conditions="):], ",")
case strings.HasPrefix(arg, "--public-path=") && buildOpts != nil:
buildOpts.PublicPath = arg[len("--public-path="):]
case strings.HasPrefix(arg, "--global-name="):
if buildOpts != nil {
buildOpts.GlobalName = arg[len("--global-name="):]
} else {
transformOpts.GlobalName = arg[len("--global-name="):]
}
case arg == "--metafile" && buildOpts != nil && kind == kindExternal:
buildOpts.Metafile = true
case strings.HasPrefix(arg, "--metafile=") && buildOpts != nil && kind == kindInternal:
value := arg[len("--metafile="):]
buildOpts.Metafile = true
extras.metafile = &value
case strings.HasPrefix(arg, "--outfile=") && buildOpts != nil:
buildOpts.Outfile = arg[len("--outfile="):]
case strings.HasPrefix(arg, "--outdir=") && buildOpts != nil:
buildOpts.Outdir = arg[len("--outdir="):]
case strings.HasPrefix(arg, "--outbase=") && buildOpts != nil:
buildOpts.Outbase = arg[len("--outbase="):]
case strings.HasPrefix(arg, "--tsconfig=") && buildOpts != nil:
buildOpts.Tsconfig = arg[len("--tsconfig="):]
case strings.HasPrefix(arg, "--tsconfig-raw="):
if buildOpts != nil {
buildOpts.TsconfigRaw = arg[len("--tsconfig-raw="):]
} else {
transformOpts.TsconfigRaw = arg[len("--tsconfig-raw="):]
}
case strings.HasPrefix(arg, "--entry-names=") && buildOpts != nil:
buildOpts.EntryNames = arg[len("--entry-names="):]
case strings.HasPrefix(arg, "--chunk-names=") && buildOpts != nil:
buildOpts.ChunkNames = arg[len("--chunk-names="):]
case strings.HasPrefix(arg, "--asset-names=") && buildOpts != nil:
buildOpts.AssetNames = arg[len("--asset-names="):]
case strings.HasPrefix(arg, "--define:"):
value := arg[len("--define:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use \"=\" to specify both the original value and the replacement value. "+
"For example, \"--define:DEBUG=true\" replaces \"DEBUG\" with \"true\".",
)
}
if buildOpts != nil {
buildOpts.Define[value[:equals]] = value[equals+1:]
} else {
transformOpts.Define[value[:equals]] = value[equals+1:]
}
case strings.HasPrefix(arg, "--log-override:"):
value := arg[len("--log-override:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use \"=\" to specify both the message name and the log level. "+
"For example, \"--log-override:css-syntax-error=error\" turns all \"css-syntax-error\" log messages into errors.",
)
}
logLevel, err := parseLogLevel(value[equals+1:], arg)
if err != nil {
return parseOptionsExtras{}, err
}
if buildOpts != nil {
buildOpts.LogOverride[value[:equals]] = logLevel
} else {
transformOpts.LogOverride[value[:equals]] = logLevel
}
case strings.HasPrefix(arg, "--abs-paths="):
values := splitWithEmptyCheck(arg[len("--abs-paths="):], ",")
var absPaths api.AbsPaths
for _, value := range values {
switch value {
case "code":
absPaths |= api.CodeAbsPath
case "log":
absPaths |= api.LogAbsPath
case "metafile":
absPaths |= api.MetafileAbsPath
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"code\", \"log\", or \"metafile\".",
)
}
}
if buildOpts != nil {
buildOpts.AbsPaths = absPaths
} else {
transformOpts.AbsPaths = absPaths
}
case strings.HasPrefix(arg, "--supported:"):
value := arg[len("--supported:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use \"=\" to specify both the name of the feature and whether it is supported or not. "+
"For example, \"--supported:arrow=false\" marks arrow functions as unsupported.",
)
}
if isSupported, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.Supported[value[:equals]] = isSupported
} else {
transformOpts.Supported[value[:equals]] = isSupported
}
case strings.HasPrefix(arg, "--pure:"):
value := arg[len("--pure:"):]
if buildOpts != nil {
buildOpts.Pure = append(buildOpts.Pure, value)
} else {
transformOpts.Pure = append(transformOpts.Pure, value)
}
case strings.HasPrefix(arg, "--loader:") && buildOpts != nil:
value := arg[len("--loader:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to specify the file extension that the loader applies to. "+
"For example, \"--loader:.js=jsx\" applies the \"jsx\" loader to files with the \".js\" extension.",
)
}
ext, text := value[:equals], value[equals+1:]
loader, err := cli_helpers.ParseLoader(text)
if err != nil {
return parseOptionsExtras{}, err
}
buildOpts.Loader[ext] = loader
case strings.HasPrefix(arg, "--loader="):
value := arg[len("--loader="):]
loader, err := cli_helpers.ParseLoader(value)
if err != nil {
return parseOptionsExtras{}, err
}
if loader == api.LoaderFile || loader == api.LoaderCopy {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("%q is not supported when transforming stdin", arg),
fmt.Sprintf("Using esbuild to transform stdin only generates one output file, so you cannot use the %q loader "+
"since that needs to generate two output files.", value),
)
}
if buildOpts != nil {
if buildOpts.Stdin == nil {
buildOpts.Stdin = &api.StdinOptions{}
}
buildOpts.Stdin.Loader = loader
} else {
transformOpts.Loader = loader
}
case strings.HasPrefix(arg, "--target="):
target, engines, err := parseTargets(splitWithEmptyCheck(arg[len("--target="):], ","), arg)
if err != nil {
return parseOptionsExtras{}, err
}
if buildOpts != nil {
buildOpts.Target = target
buildOpts.Engines = engines
} else {
transformOpts.Target = target
transformOpts.Engines = engines
}
case strings.HasPrefix(arg, "--out-extension:") && buildOpts != nil:
value := arg[len("--out-extension:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use either \"--out-extension:.js=...\" or \"--out-extension:.css=...\" "+
"to specify the file type that the output extension applies to .",
)
}
if buildOpts.OutExtension == nil {
buildOpts.OutExtension = make(map[string]string)
}
buildOpts.OutExtension[value[:equals]] = value[equals+1:]
case strings.HasPrefix(arg, "--platform="):
value := arg[len("--platform="):]
var platform api.Platform
switch value {
case "browser":
platform = api.PlatformBrowser
case "node":
platform = api.PlatformNode
case "neutral":
platform = api.PlatformNeutral
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"browser\", \"node\", or \"neutral\".",
)
}
if buildOpts != nil {
buildOpts.Platform = platform
} else {
transformOpts.Platform = platform
}
case strings.HasPrefix(arg, "--format="):
value := arg[len("--format="):]
var format api.Format
switch value {
case "iife":
format = api.FormatIIFE
case "cjs":
format = api.FormatCommonJS
case "esm":
format = api.FormatESModule
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"iife\", \"cjs\", or \"esm\".",
)
}
if buildOpts != nil {
buildOpts.Format = format
} else {
transformOpts.Format = format
}
case strings.HasPrefix(arg, "--packages=") && buildOpts != nil:
value := arg[len("--packages="):]
var packages api.Packages
switch value {
case "bundle":
packages = api.PackagesBundle
case "external":
packages = api.PackagesExternal
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"bundle\" or \"external\".",
)
}
buildOpts.Packages = packages
case strings.HasPrefix(arg, "--external:") && buildOpts != nil:
buildOpts.External = append(buildOpts.External, arg[len("--external:"):])
case strings.HasPrefix(arg, "--inject:") && buildOpts != nil:
buildOpts.Inject = append(buildOpts.Inject, arg[len("--inject:"):])
case strings.HasPrefix(arg, "--alias:") && buildOpts != nil:
value := arg[len("--alias:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use \"=\" to specify both the original package name and the replacement package name. "+
"For example, \"--alias:old=new\" replaces package \"old\" with package \"new\".",
)
}
if buildOpts.Alias == nil {
buildOpts.Alias = make(map[string]string)
}
buildOpts.Alias[value[:equals]] = value[equals+1:]
case strings.HasPrefix(arg, "--jsx="):
value := arg[len("--jsx="):]
var mode api.JSX
switch value {
case "transform":
mode = api.JSXTransform
case "preserve":
mode = api.JSXPreserve
case "automatic":
mode = api.JSXAutomatic
default:
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"Valid values are \"transform\", \"automatic\", or \"preserve\".",
)
}
if buildOpts != nil {
buildOpts.JSX = mode
} else {
transformOpts.JSX = mode
}
case strings.HasPrefix(arg, "--jsx-factory="):
value := arg[len("--jsx-factory="):]
if buildOpts != nil {
buildOpts.JSXFactory = value
} else {
transformOpts.JSXFactory = value
}
case strings.HasPrefix(arg, "--jsx-fragment="):
value := arg[len("--jsx-fragment="):]
if buildOpts != nil {
buildOpts.JSXFragment = value
} else {
transformOpts.JSXFragment = value
}
case strings.HasPrefix(arg, "--jsx-import-source="):
value := arg[len("--jsx-import-source="):]
if buildOpts != nil {
buildOpts.JSXImportSource = value
} else {
transformOpts.JSXImportSource = value
}
case isBoolFlag(arg, "--jsx-dev"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.JSXDev = value
} else {
transformOpts.JSXDev = value
}
case isBoolFlag(arg, "--jsx-side-effects"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else if buildOpts != nil {
buildOpts.JSXSideEffects = value
} else {
transformOpts.JSXSideEffects = value
}
case strings.HasPrefix(arg, "--banner=") && transformOpts != nil:
transformOpts.Banner = arg[len("--banner="):]
case strings.HasPrefix(arg, "--footer=") && transformOpts != nil:
transformOpts.Footer = arg[len("--footer="):]
case strings.HasPrefix(arg, "--banner:") && buildOpts != nil:
value := arg[len("--banner:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use either \"--banner:js=...\" or \"--banner:css=...\" to specify the language that the banner applies to.",
)
}
buildOpts.Banner[value[:equals]] = value[equals+1:]
case strings.HasPrefix(arg, "--footer:") && buildOpts != nil:
value := arg[len("--footer:"):]
equals := strings.IndexByte(value, '=')
if equals == -1 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Missing \"=\" in %q", arg),
"You need to use either \"--footer:js=...\" or \"--footer:css=...\" to specify the language that the footer applies to.",
)
}
buildOpts.Footer[value[:equals]] = value[equals+1:]
case strings.HasPrefix(arg, "--log-limit="):
value := arg[len("--log-limit="):]
limit, err := strconv.Atoi(value)
if err != nil || limit < 0 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"The log limit must be a non-negative integer.",
)
}
if buildOpts != nil {
buildOpts.LogLimit = limit
} else {
transformOpts.LogLimit = limit
}
case strings.HasPrefix(arg, "--line-limit="):
value := arg[len("--line-limit="):]
limit, err := strconv.Atoi(value)
if err != nil || limit < 0 {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid value %q in %q", value, arg),
"The line limit must be a non-negative integer.",
)
}
if buildOpts != nil {
buildOpts.LineLimit = limit
} else {
transformOpts.LineLimit = limit
}
// Make sure this stays in sync with "PrintErrorToStderr"
case isBoolFlag(arg, "--color"):
if value, err := parseBoolFlag(arg, true); err != nil {
return parseOptionsExtras{}, err
} else {
var color *api.StderrColor
if buildOpts != nil {
color = &buildOpts.Color
} else {
color = &transformOpts.Color
}
if value {
*color = api.ColorAlways
} else {
*color = api.ColorNever
}
}
// Make sure this stays in sync with "PrintErrorToStderr"
case strings.HasPrefix(arg, "--log-level="):
value := arg[len("--log-level="):]
logLevel, err := parseLogLevel(value, arg)
if err != nil {
return parseOptionsExtras{}, err
}
if buildOpts != nil {
buildOpts.LogLevel = logLevel
} else {
transformOpts.LogLevel = logLevel
}
case strings.HasPrefix(arg, "'--"):
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Unexpected single quote character before flag: %s", arg),
"This typically happens when attempting to use single quotes to quote arguments with a shell that doesn't recognize single quotes. "+
"Try using double quote characters to quote arguments instead.",
)
case !strings.HasPrefix(arg, "-") && buildOpts != nil:
if equals := strings.IndexByte(arg, '='); equals != -1 {
buildOpts.EntryPointsAdvanced = append(buildOpts.EntryPointsAdvanced, api.EntryPoint{
OutputPath: arg[:equals],
InputPath: arg[equals+1:],
})
} else {
buildOpts.EntryPoints = append(buildOpts.EntryPoints, arg)
}
default:
bare := map[string]bool{
"allow-overwrite": true,
"bundle": true,
"ignore-annotations": true,
"jsx-dev": true,
"jsx-side-effects": true,
"keep-names": true,
"minify-identifiers": true,
"minify-syntax": true,
"minify-whitespace": true,
"minify": true,
"preserve-symlinks": true,
"sourcemap": true,
"splitting": true,
"watch": true,
}
equals := map[string]bool{
"abs-paths": true,
"allow-overwrite": true,
"asset-names": true,
"banner": true,
"bundle": true,
"certfile": true,
"charset": true,
"chunk-names": true,
"color": true,
"conditions": true,
"cors-origin": true,
"drop-labels": true,
"entry-names": true,
"footer": true,
"format": true,
"global-name": true,
"ignore-annotations": true,
"jsx-factory": true,
"jsx-fragment": true,
"jsx-import-source": true,
"jsx": true,
"keep-names": true,
"keyfile": true,
"legal-comments": true,
"loader": true,
"log-level": true,
"log-limit": true,
"main-fields": true,
"mangle-cache": true,
"mangle-props": true,
"mangle-quoted": true,
"metafile": true,
"minify-identifiers": true,
"minify-syntax": true,
"minify-whitespace": true,
"minify": true,
"outbase": true,
"outdir": true,
"outfile": true,
"packages": true,
"platform": true,
"preserve-symlinks": true,
"public-path": true,
"reserve-props": true,
"resolve-extensions": true,
"serve-fallback": true,
"serve": true,
"servedir": true,
"source-root": true,
"sourcefile": true,
"sourcemap": true,
"sources-content": true,
"splitting": true,
"target": true,
"tree-shaking": true,
"tsconfig-raw": true,
"tsconfig": true,
"watch": true,
"watch-delay": true,
}
colon := map[string]bool{
"alias": true,
"banner": true,
"define": true,
"drop": true,
"external": true,
"footer": true,
"inject": true,
"loader": true,
"log-override": true,
"out-extension": true,
"pure": true,
"supported": true,
}
note := ""
// Try to provide helpful hints when we can recognize the mistake
switch {
case arg == "-o":
note = "Use \"--outfile=\" to configure the output file instead of \"-o\"."
case arg == "-v":
note = "Use \"--log-level=verbose\" to generate verbose logs instead of \"-v\"."
case strings.HasPrefix(arg, "--"):
if i := strings.IndexByte(arg, '='); i != -1 && colon[arg[2:i]] {
note = fmt.Sprintf("Use %q instead of %q. Flags that can be re-specified multiple times use \":\" instead of \"=\".",
arg[:i]+":"+arg[i+1:], arg)
}
if i := strings.IndexByte(arg, ':'); i != -1 && equals[arg[2:i]] {
note = fmt.Sprintf("Use %q instead of %q. Flags that can only be specified once use \"=\" instead of \":\".",
arg[:i]+"="+arg[i+1:], arg)
}
case strings.HasPrefix(arg, "-"):
isValid := bare[arg[1:]]
fix := "-" + arg
if i := strings.IndexByte(arg, '='); i != -1 && equals[arg[1:i]] {
isValid = true
} else if i != -1 && colon[arg[1:i]] {
isValid = true
fix = fmt.Sprintf("-%s:%s", arg[:i], arg[i+1:])
} else if i := strings.IndexByte(arg, ':'); i != -1 && colon[arg[1:i]] {
isValid = true
} else if i != -1 && equals[arg[1:i]] {
isValid = true
fix = fmt.Sprintf("-%s=%s", arg[:i], arg[i+1:])
}
if isValid {
note = fmt.Sprintf("Use %q instead of %q. Flags are always specified with two dashes instead of one dash.",
fix, arg)
}
}
if buildOpts != nil {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(fmt.Sprintf("Invalid build flag: %q", arg), note)
} else {
return parseOptionsExtras{}, cli_helpers.MakeErrorWithNote(fmt.Sprintf("Invalid transform flag: %q", arg), note)
}
}
}
// If we're building, the last source map flag is "--sourcemap", and there
// is no output path, change the source map option to "inline" because we're
// going to be writing to stdout which can only represent a single file.
if buildOpts != nil && hasBareSourceMapFlag && buildOpts.Outfile == "" && buildOpts.Outdir == "" {
buildOpts.Sourcemap = api.SourceMapInline
}
return
}
func parseTargets(targets []string, arg string) (target api.Target, engines []api.Engine, err *cli_helpers.ErrorWithNote) {
validTargets := map[string]api.Target{
"esnext": api.ESNext,
"es5": api.ES5,
"es6": api.ES2015,
"es2015": api.ES2015,
"es2016": api.ES2016,
"es2017": api.ES2017,
"es2018": api.ES2018,
"es2019": api.ES2019,
"es2020": api.ES2020,
"es2021": api.ES2021,
"es2022": api.ES2022,
"es2023": api.ES2023,
"es2024": api.ES2024,
}
outer:
for _, value := range targets {
if valid, ok := validTargets[strings.ToLower(value)]; ok {
target = valid
continue
}
for engine, name := range validEngines {
if strings.HasPrefix(value, engine) {
version := value[len(engine):]
if version == "" {
return 0, nil, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Target %q is missing a version number in %q", value, arg),
"",
)
}
engines = append(engines, api.Engine{Name: name, Version: version})
continue outer
}
}
engines := make([]string, 0, len(validEngines))
engines = append(engines, "\"esN\"")
for key := range validEngines {
engines = append(engines, fmt.Sprintf("%q", key+"N"))
}
sort.Strings(engines)
return 0, nil, cli_helpers.MakeErrorWithNote(
fmt.Sprintf("Invalid target %q in %q", value, arg),
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/cli/cli.go | pkg/cli/cli.go | // This API exposes the command-line interface for esbuild. It can be used to
// run esbuild from Go without the overhead of creating a child process.
//
// Example usage:
//
// package main
//
// import (
// "os"
//
// "github.com/evanw/esbuild/pkg/cli"
// )
//
// func main() {
// os.Exit(cli.Run(os.Args[1:]))
// }
package cli
import (
"errors"
"github.com/evanw/esbuild/pkg/api"
)
// This function invokes the esbuild CLI. It takes an array of command-line
// arguments (excluding the executable argument itself) and returns an exit
// code. There are some minor differences between this CLI and the actual
// "esbuild" executable such as the lack of auxiliary flags (e.g. "--help" and
// "--version") but it is otherwise exactly the same code.
func Run(osArgs []string) int {
return runImpl(osArgs, []api.Plugin{})
}
// This is similar to "Run()" but also takes an array of plugins to be used
// during the build process.
func RunWithPlugins(osArgs []string, plugins []api.Plugin) int {
return runImpl(osArgs, plugins)
}
// This parses an array of strings into an options object suitable for passing
// to "api.Build()". Use this if you need to reuse the same argument parsing
// logic as the esbuild CLI.
//
// Example usage:
//
// options, err := cli.ParseBuildOptions([]string{
// "input.js",
// "--bundle",
// "--minify",
// })
//
// result := api.Build(options)
func ParseBuildOptions(osArgs []string) (options api.BuildOptions, err error) {
options = newBuildOptions()
_, errWithNote := parseOptionsImpl(osArgs, &options, nil, kindExternal)
if errWithNote != nil {
err = errors.New(errWithNote.Text)
}
return
}
// This parses an array of strings into an options object suitable for passing
// to "api.Transform()". Use this if you need to reuse the same argument
// parsing logic as the esbuild CLI.
//
// Example usage:
//
// options, err := cli.ParseTransformOptions([]string{
// "--minify",
// "--loader=tsx",
// "--define:DEBUG=false",
// })
//
// result := api.Transform(input, options)
func ParseTransformOptions(osArgs []string) (options api.TransformOptions, err error) {
options = newTransformOptions()
_, errWithNote := parseOptionsImpl(osArgs, nil, &options, kindExternal)
if errWithNote != nil {
err = errors.New(errWithNote.Text)
}
return
}
// This parses an array of strings into an options object suitable for passing
// to "api.Serve()". The remaining non-serve arguments are returned in another
// array to then be passed to "api.ParseBuildOptions()". Use this if you need
// to reuse the same argument parsing logic as the esbuild CLI.
//
// Example usage:
//
// serveOptions, args, err := cli.ParseServeOptions([]string{
// "--serve=8000",
// })
//
// buildOptions, err := cli.ParseBuildOptions(args)
//
// result := api.Serve(serveOptions, buildOptions)
func ParseServeOptions(osArgs []string) (options api.ServeOptions, remainingArgs []string, err error) {
return parseServeOptionsImpl(osArgs)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/pkg/cli/mangle_cache.go | pkg/cli/mangle_cache.go | package cli
// The mangle cache is a JSON file that remembers esbuild's property renaming
// decisions. It's a flat map where the keys are strings and the values are
// either strings or the boolean value "false". This is the case both in JSON
// and in Go (so the "interface{}" values are also either strings or "false").
import (
"fmt"
"sort"
"strings"
"syscall"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
"github.com/evanw/esbuild/internal/js_parser"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/resolver"
)
func parseMangleCache(osArgs []string, fs fs.FS, absPath string) (map[string]interface{}, []string) {
// Log problems with the mangle cache to stderr
log := logger.NewStderrLog(logger.OutputOptionsForArgs(osArgs))
defer log.Done()
// Try to read the existing file
prettyPath := absPath
if rel, ok := fs.Rel(fs.Cwd(), absPath); ok {
prettyPath = rel
}
prettyPath = strings.ReplaceAll(prettyPath, "\\", "/")
bytes, err, originalError := fs.ReadFile(absPath)
if err != nil {
// It's ok if it's just missing
if err == syscall.ENOENT {
return make(map[string]interface{}), []string{}
}
// Otherwise, report the error
log.AddError(nil, logger.Range{},
fmt.Sprintf("Failed to read from mangle cache file %q: %s", prettyPath, originalError.Error()))
return nil, nil
}
// Use our JSON parser so we get pretty-printed error messages
keyPath := logger.Path{Text: absPath, Namespace: "file"}
source := logger.Source{
KeyPath: keyPath,
PrettyPaths: resolver.MakePrettyPaths(fs, keyPath),
Contents: string(bytes),
}
result, ok := js_parser.ParseJSON(log, source, js_parser.JSONOptions{})
if !ok || log.HasErrors() {
// Stop if there were any errors so we don't continue and then overwrite this file
return nil, nil
}
tracker := logger.MakeLineColumnTracker(&source)
// Validate the top-level object
root, ok := result.Data.(*js_ast.EObject)
if !ok {
log.AddError(&tracker, logger.Range{Loc: result.Loc},
"Expected a top-level object in mangle cache file")
return nil, nil
}
mangleCache := make(map[string]interface{}, len(root.Properties))
order := make([]string, 0, len(root.Properties))
for _, property := range root.Properties {
key := helpers.UTF16ToString(property.Key.Data.(*js_ast.EString).Value)
order = append(order, key)
switch v := property.ValueOrNil.Data.(type) {
case *js_ast.EBoolean:
if v.Value {
log.AddError(&tracker, js_lexer.RangeOfIdentifier(source, property.ValueOrNil.Loc),
fmt.Sprintf("Expected %q in mangle cache file to map to either a string or false", key))
} else {
mangleCache[key] = false
}
case *js_ast.EString:
mangleCache[key] = helpers.UTF16ToString(v.Value)
default:
log.AddError(&tracker, logger.Range{Loc: property.ValueOrNil.Loc},
fmt.Sprintf("Expected %q in mangle cache file to map to either a string or false", key))
}
}
if log.HasErrors() {
return nil, nil
}
return mangleCache, order
}
func printMangleCache(mangleCache map[string]interface{}, originalOrder []string, asciiOnly bool) []byte {
j := helpers.Joiner{}
j.AddString("{")
// Determine the order to print the keys in
order := originalOrder
if len(mangleCache) > len(order) {
order = make([]string, 0, len(mangleCache))
if sort.StringsAreSorted(originalOrder) {
// If they came sorted, keep them sorted
for key := range mangleCache {
order = append(order, key)
}
sort.Strings(order)
} else {
// Otherwise add all new keys to the end, and only sort the new keys
originalKeys := make(map[string]bool, len(originalOrder))
for _, key := range originalOrder {
originalKeys[key] = true
}
order = append(order, originalOrder...)
for key := range mangleCache {
if !originalKeys[key] {
order = append(order, key)
}
}
sort.Strings(order[len(originalOrder):])
}
}
// Print the JSON while preserving the existing order of the keys
for i, key := range order {
// Print the key
if i > 0 {
j.AddString(",\n ")
} else {
j.AddString("\n ")
}
j.AddBytes(helpers.QuoteForJSON(key, asciiOnly))
// Print the value
if value := mangleCache[key]; value != false {
j.AddString(": ")
j.AddBytes(helpers.QuoteForJSON(value.(string), asciiOnly))
} else {
j.AddString(": false")
}
}
if len(order) > 0 {
j.AddString("\n")
}
j.AddString("}\n")
return j.Done()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/service.go | cmd/esbuild/service.go | // This implements a simple long-running service over stdin/stdout. Each
// incoming request is an array of strings, and each outgoing response is a map
// of strings to byte slices. All values are length-prefixed using 32-bit
// little endian integers.
package main
import (
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"regexp"
"sync"
"time"
"github.com/evanw/esbuild/internal/cli_helpers"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/pkg/api"
"github.com/evanw/esbuild/pkg/cli"
)
type responseCallback func(interface{})
type pluginResolveCallback func(uint32, map[string]interface{}) []byte
type activeBuild struct {
ctx api.BuildContext
pluginResolve pluginResolveCallback
mutex sync.Mutex
disposeWaitGroup sync.WaitGroup // Allows "dispose" to wait for all active tasks
// These are guarded by the mutex
rebuildWaitGroup *sync.WaitGroup // Allows "cancel" to wait for all active rebuilds (within mutex because "sync.WaitGroup" isn't thread-safe)
withinRebuildCount int
didGetCancel bool
}
type serviceType struct {
callbacks map[uint32]responseCallback
activeBuilds map[int]*activeBuild
outgoingPackets chan []byte // Always use "sendPacket" instead of sending on this channel
keepAliveWaitGroup *helpers.ThreadSafeWaitGroup
mutex sync.Mutex
nextRequestID uint32
}
func (service *serviceType) getActiveBuild(key int) *activeBuild {
service.mutex.Lock()
activeBuild := service.activeBuilds[key]
service.mutex.Unlock()
return activeBuild
}
func (service *serviceType) createActiveBuild(key int) *activeBuild {
service.mutex.Lock()
defer service.mutex.Unlock()
if service.activeBuilds[key] != nil {
panic("Internal error")
}
activeBuild := &activeBuild{}
service.activeBuilds[key] = activeBuild
// This pairs with "Done()" in "decRefCount"
service.keepAliveWaitGroup.Add(1)
return activeBuild
}
func (service *serviceType) destroyActiveBuild(key int) {
service.mutex.Lock()
if service.activeBuilds[key] == nil {
panic("Internal error")
}
delete(service.activeBuilds, key)
service.mutex.Unlock()
// This pairs with "Add()" in "trackActiveBuild"
service.keepAliveWaitGroup.Done()
}
func runService(sendPings bool) {
logger.API = logger.JSAPI
service := serviceType{
callbacks: make(map[uint32]responseCallback),
activeBuilds: make(map[int]*activeBuild),
outgoingPackets: make(chan []byte),
keepAliveWaitGroup: helpers.MakeThreadSafeWaitGroup(),
}
buffer := make([]byte, 16*1024)
stream := []byte{}
// Write packets on a single goroutine so they aren't interleaved
go func() {
for packet := range service.outgoingPackets {
if _, err := os.Stdout.Write(packet); err != nil {
os.Exit(1) // I/O error
}
service.keepAliveWaitGroup.Done() // This pairs with the "Add()" when putting stuff into "outgoingPackets"
}
}()
// The protocol always starts with the version
os.Stdout.Write(append(writeUint32(nil, uint32(len(esbuildVersion))), esbuildVersion...))
// Wait for the last response to be written to stdout before returning from
// the enclosing function, which will return from "main()" and exit.
service.keepAliveWaitGroup.Add(1)
defer func() {
service.keepAliveWaitGroup.Done()
service.keepAliveWaitGroup.Wait()
}()
// Periodically ping the host even when we're idle. This will catch cases
// where the host has disappeared and will never send us anything else but
// we incorrectly think we are still needed. In that case we will now try
// to write to stdout and fail, and then know that we should exit.
if sendPings {
go func() {
for {
time.Sleep(1 * time.Second)
service.sendRequest(map[string]interface{}{
"command": "ping",
})
}
}()
}
for {
// Read more data from stdin
n, err := os.Stdin.Read(buffer)
if n == 0 || err == io.EOF {
break // End of stdin
}
if err != nil {
panic(err)
}
stream = append(stream, buffer[:n]...)
// Process all complete (i.e. not partial) packets
bytes := stream
for {
packet, afterPacket, ok := readLengthPrefixedSlice(bytes)
if !ok {
break
}
bytes = afterPacket
// Clone the input since slices into it may be used on another goroutine
clone := append([]byte{}, packet...)
service.handleIncomingPacket(clone)
}
// Move the remaining partial packet to the end to avoid reallocating
stream = append(stream[:0], bytes...)
}
}
// Each packet added to "outgoingPackets" must also add to the wait group
func (service *serviceType) sendPacket(packet []byte) {
service.keepAliveWaitGroup.Add(1) // The writer thread will call "Done()"
service.outgoingPackets <- packet
}
// This will either block until the request has been sent and a response has
// been received, or it will return nil to indicate failure to send due to
// stdin being closed.
func (service *serviceType) sendRequest(request interface{}) interface{} {
result := make(chan interface{})
var id uint32
callback := func(response interface{}) {
result <- response
close(result)
}
id = func() uint32 {
service.mutex.Lock()
defer service.mutex.Unlock()
id := service.nextRequestID
service.nextRequestID++
service.callbacks[id] = callback
return id
}()
service.sendPacket(encodePacket(packet{
id: id,
isRequest: true,
value: request,
}))
return <-result
}
// This function deliberately processes incoming packets sequentially on the
// same goroutine as the caller. We want calling "dispose" on a context to take
// effect immediately and to fail all future calls on that context. We don't
// want "dispose" to accidentally be reordered after any future calls on that
// context, since those future calls are supposed to fail.
//
// If processing a packet could potentially take a while, then the remainder of
// the work should be run on another goroutine after decoding the command.
func (service *serviceType) handleIncomingPacket(bytes []byte) {
p, ok := decodePacket(bytes)
if !ok {
return
}
if !p.isRequest {
service.mutex.Lock()
callback := service.callbacks[p.id]
delete(service.callbacks, p.id)
service.mutex.Unlock()
if callback == nil {
panic(fmt.Sprintf("callback nil for id %d, value %v", p.id, p.value))
}
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
callback(p.value)
}()
return
}
// Handle the request
request := p.value.(map[string]interface{})
command := request["command"].(string)
switch command {
case "build":
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
service.sendPacket(service.handleBuildRequest(p.id, request))
}()
case "transform":
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
service.sendPacket(service.handleTransformRequest(p.id, request))
}()
case "resolve":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
pluginResolve := build.pluginResolve
if ctx != nil && pluginResolve != nil {
build.disposeWaitGroup.Add(1)
}
build.mutex.Unlock()
if pluginResolve != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
if ctx != nil {
defer build.disposeWaitGroup.Done()
}
service.sendPacket(pluginResolve(p.id, request))
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"error": "Cannot call \"resolve\" on an inactive build",
},
}))
case "rebuild":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
if ctx != nil {
build.withinRebuildCount++
if build.rebuildWaitGroup == nil {
build.rebuildWaitGroup = &sync.WaitGroup{}
}
build.rebuildWaitGroup.Add(1)
build.disposeWaitGroup.Add(1)
}
build.mutex.Unlock()
if ctx != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
defer build.disposeWaitGroup.Done()
result := ctx.Rebuild()
build.mutex.Lock()
build.withinRebuildCount--
build.rebuildWaitGroup.Done()
if build.withinRebuildCount == 0 {
// Clear the cancel flag now that the last rebuild has finished
build.didGetCancel = false
// Clear this to avoid confusion with the next group of rebuilds
build.rebuildWaitGroup = nil
}
build.mutex.Unlock()
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"errors": encodeMessages(result.Errors),
"warnings": encodeMessages(result.Warnings),
},
}))
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"error": "Cannot rebuild",
},
}))
case "watch":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
if ctx != nil {
build.disposeWaitGroup.Add(1)
}
build.mutex.Unlock()
if ctx != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
defer build.disposeWaitGroup.Done()
var options api.WatchOptions
if value, ok := request["delay"]; ok {
options.Delay = value.(int)
}
if err := ctx.Watch(options); err != nil {
service.sendPacket(encodeErrorPacket(p.id, err))
} else {
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
}
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"error": "Cannot watch",
},
}))
case "serve":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
if ctx != nil {
build.disposeWaitGroup.Add(1)
}
build.mutex.Unlock()
if ctx != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
defer build.disposeWaitGroup.Done()
var options api.ServeOptions
if value, ok := request["host"]; ok {
options.Host = value.(string)
}
if value, ok := request["port"]; ok {
if value == 0 {
// 0 is the default value in Go, which we interpret as "try to
// pick port 8000". So Go uses -1 as the sentinel value instead.
options.Port = -1
} else {
options.Port = value.(int)
}
}
if value, ok := request["servedir"]; ok {
options.Servedir = value.(string)
}
if value, ok := request["keyfile"]; ok {
options.Keyfile = value.(string)
}
if value, ok := request["certfile"]; ok {
options.Certfile = value.(string)
}
if value, ok := request["fallback"]; ok {
options.Fallback = value.(string)
}
if value, ok := request["corsOrigin"].([]interface{}); ok {
for _, it := range value {
options.CORS.Origin = append(options.CORS.Origin, it.(string))
}
}
if request["onRequest"].(bool) {
options.OnRequest = func(args api.ServeOnRequestArgs) {
// This could potentially be called after we return from
// "Dispose()". If it does, then make sure we don't call into
// JavaScript because we'll get an error. Also make sure that
// if we do call into JavaScript, we wait to call "Dispose()"
// until JavaScript has returned back to us.
build.mutex.Lock()
ctx := build.ctx
if ctx != nil {
build.disposeWaitGroup.Add(1)
}
build.mutex.Unlock()
if ctx != nil {
service.sendRequest(map[string]interface{}{
"command": "serve-request",
"key": key,
"args": map[string]interface{}{
"remoteAddress": args.RemoteAddress,
"method": args.Method,
"path": args.Path,
"status": args.Status,
"timeInMS": args.TimeInMS,
},
})
build.disposeWaitGroup.Done()
}
}
}
if result, err := ctx.Serve(options); err != nil {
service.sendPacket(encodeErrorPacket(p.id, err))
} else {
hosts := make([]interface{}, len(result.Hosts))
for i, host := range result.Hosts {
hosts[i] = host
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"port": int(result.Port),
"hosts": hosts,
},
}))
}
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"error": "Cannot serve",
},
}))
case "cancel":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
rebuildWaitGroup := build.rebuildWaitGroup
if build.withinRebuildCount > 0 {
// If Go got a "rebuild" message from JS before this, there's a chance
// that Go hasn't run "ctx.Rebuild()" by the time our "ctx.Cancel()"
// runs below because both of them are on separate goroutines. To
// handle this, we set this flag to tell our "OnStart" plugin to cancel
// the build in case things happen in that order.
build.didGetCancel = true
}
build.mutex.Unlock()
if ctx != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
ctx.Cancel()
// Block until all manual rebuilds that were active at the time the
// "cancel" packet was originally processed have finished. That way
// JS can wait for "cancel" to end and be assured that it can call
// "rebuild" and have it not merge with any other ongoing rebuilds.
if rebuildWaitGroup != nil {
rebuildWaitGroup.Wait()
}
// Only return control to JavaScript once the cancel operation has succeeded
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
case "dispose":
key := request["key"].(int)
if build := service.getActiveBuild(key); build != nil {
build.mutex.Lock()
ctx := build.ctx
build.ctx = nil
build.mutex.Unlock()
// Release this ref count if it was held
if ctx != nil {
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
// While "Dispose()" will wait for any existing operations on the
// context to finish, we also don't want to start any new operations.
// That can happen because operations (e.g. "Rebuild()") are started
// from a separate goroutine without locking the build mutex. This
// uses a WaitGroup to handle this case. If that happened, then we'll
// wait for it here before disposing. Once the wait is over, no more
// operations can happen on the context because we have already
// zeroed out the shared context pointer above.
build.disposeWaitGroup.Done()
build.disposeWaitGroup.Wait()
ctx.Dispose()
service.destroyActiveBuild(key)
// Only return control to JavaScript once everything relating to this
// build has gracefully ended. Otherwise JavaScript will unregister
// everything related to this build and any calls an ongoing build
// makes into JavaScript will cause errors, which may be observable.
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
}()
return
}
}
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
case "error":
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
// This just exists so that errors during JavaScript API setup get printed
// nicely to the console. This matters if the JavaScript API setup code
// swallows thrown errors. We still want to be able to see the error.
flags := decodeStringArray(request["flags"].([]interface{}))
msg := decodeMessageToPrivate(request["error"].(map[string]interface{}))
logger.PrintMessageToStderr(flags, msg)
service.sendPacket(encodePacket(packet{
id: p.id,
value: make(map[string]interface{}),
}))
}()
case "format-msgs":
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
service.sendPacket(service.handleFormatMessagesRequest(p.id, request))
}()
case "analyze-metafile":
service.keepAliveWaitGroup.Add(1)
go func() {
defer service.keepAliveWaitGroup.Done()
service.sendPacket(service.handleAnalyzeMetafileRequest(p.id, request))
}()
default:
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
"error": fmt.Sprintf("Invalid command: %s", command),
},
}))
}
}
func encodeErrorPacket(id uint32, err error) []byte {
return encodePacket(packet{
id: id,
value: map[string]interface{}{
"error": err.Error(),
},
})
}
func (service *serviceType) handleBuildRequest(id uint32, request map[string]interface{}) []byte {
isContext := request["context"].(bool)
key := request["key"].(int)
write := request["write"].(bool)
entries := request["entries"].([]interface{})
flags := decodeStringArray(request["flags"].([]interface{}))
options, err := cli.ParseBuildOptions(flags)
options.AbsWorkingDir = request["absWorkingDir"].(string)
options.NodePaths = decodeStringArray(request["nodePaths"].([]interface{}))
options.MangleCache, _ = request["mangleCache"].(map[string]interface{})
for _, entry := range entries {
entry := entry.([]interface{})
key := entry[0].(string)
value := entry[1].(string)
options.EntryPointsAdvanced = append(options.EntryPointsAdvanced, api.EntryPoint{
OutputPath: key,
InputPath: value,
})
}
// Normally when "write" is true and there is no output file/directory then
// the output is written to stdout instead. However, we're currently using
// stdout as a communication channel and writing the build output to stdout
// would corrupt our protocol. Special-case this to channel this back to the
// host process and write it to stdout there.
writeToStdout := err == nil && write && options.Outfile == "" && options.Outdir == ""
if err != nil {
return encodeErrorPacket(id, err)
}
// Optionally allow input from the stdin channel
if stdin, ok := request["stdinContents"].([]byte); ok {
if options.Stdin == nil {
options.Stdin = &api.StdinOptions{}
}
options.Stdin.Contents = string(stdin)
if resolveDir, ok := request["stdinResolveDir"].(string); ok {
options.Stdin.ResolveDir = resolveDir
}
}
activeBuild := service.createActiveBuild(key)
hasOnEndCallbacks := false
if plugins, ok := request["plugins"]; ok {
if plugins, hasOnEnd, err := service.convertPlugins(key, plugins, activeBuild); err != nil {
return encodeErrorPacket(id, err)
} else {
options.Plugins = plugins
hasOnEndCallbacks = hasOnEnd
}
}
resultToResponse := func(result api.BuildResult) map[string]interface{} {
response := map[string]interface{}{
"errors": encodeMessages(result.Errors),
"warnings": encodeMessages(result.Warnings),
}
if !write {
// Pass the output files back to the caller
response["outputFiles"] = encodeOutputFiles(result.OutputFiles)
}
if options.Metafile {
response["metafile"] = result.Metafile
}
if options.MangleCache != nil {
response["mangleCache"] = result.MangleCache
}
if writeToStdout && len(result.OutputFiles) == 1 {
response["writeToStdout"] = result.OutputFiles[0].Contents
}
return response
}
if !writeToStdout {
options.Write = write
}
if isContext {
options.Plugins = append(options.Plugins, api.Plugin{
Name: "onEnd",
Setup: func(build api.PluginBuild) {
build.OnStart(func() (api.OnStartResult, error) {
activeBuild.mutex.Lock()
if currentWaitGroup := activeBuild.rebuildWaitGroup; currentWaitGroup != nil && activeBuild.didGetCancel {
// Cancel the current build now that the current build is active.
// This catches the case where JS does "rebuild()" then "cancel()"
// but Go's scheduler runs the original "ctx.Cancel()" goroutine
// before it runs the "ctx.Rebuild()" goroutine.
//
// This adds to the rebuild wait group that other cancel operations
// are waiting on because we also want those other cancel operations
// to wait on this cancel operation. Go might schedule this new
// goroutine after all currently-active rebuilds end. We don't want
// the user's cancel operation to return to the user and for them
// to start another rebuild before our "ctx.Cancel" below runs
// because our cancel is supposed to cancel the current build, not
// some independent future build.
activeBuild.rebuildWaitGroup.Add(1)
go func() {
activeBuild.ctx.Cancel()
// Lock the mutex because "sync.WaitGroup" isn't thread-safe.
// But use the wait group that was active at the time the
// "OnStart" callback ran instead of the latest one on the
// active build in case this goroutine is delayed.
activeBuild.mutex.Lock()
currentWaitGroup.Done()
activeBuild.mutex.Unlock()
}()
}
activeBuild.mutex.Unlock()
return api.OnStartResult{}, nil
})
build.OnEnd(func(result *api.BuildResult) (api.OnEndResult, error) {
// For performance, we only send JavaScript an "onEnd" message if
// it's needed. It's only needed if one of the following is true:
//
// - There are any "onEnd" callbacks registered
// - JavaScript has called our "rebuild()" function
// - We are writing build output to JavaScript's stdout
//
// This is especially important if "write" is false since otherwise
// we'd unnecessarily send the entire contents of all output files!
//
// "If a tree falls in a forest and no one is
// around to hear it, does it make a sound?"
//
activeBuild.mutex.Lock()
isWithinRebuild := activeBuild.withinRebuildCount > 0
activeBuild.mutex.Unlock()
if !hasOnEndCallbacks && !isWithinRebuild && !writeToStdout {
return api.OnEndResult{}, nil
}
request := resultToResponse(*result)
request["command"] = "on-end"
request["key"] = key
response, ok := service.sendRequest(request).(map[string]interface{})
if !ok {
return api.OnEndResult{}, errors.New("The service was stopped")
}
var errors []api.Message
var warnings []api.Message
if value, ok := response["errors"].([]interface{}); ok {
errors = decodeMessages(value)
}
if value, ok := response["warnings"].([]interface{}); ok {
warnings = decodeMessages(value)
}
return api.OnEndResult{
Errors: errors,
Warnings: warnings,
}, nil
})
},
})
ctx, err := api.Context(options)
if err != nil {
return encodePacket(packet{
id: id,
value: map[string]interface{}{
"errors": encodeMessages(err.Errors),
"warnings": []interface{}{},
},
})
}
// Keep the build alive until "dispose" has been called
activeBuild.disposeWaitGroup.Add(1)
activeBuild.ctx = ctx
return encodePacket(packet{
id: id,
value: map[string]interface{}{
"errors": []interface{}{},
"warnings": []interface{}{},
},
})
}
result := api.Build(options)
response := resultToResponse(result)
service.destroyActiveBuild(key)
return encodePacket(packet{
id: id,
value: response,
})
}
func resolveKindToString(kind api.ResolveKind) string {
switch kind {
case api.ResolveEntryPoint:
return "entry-point"
// JS
case api.ResolveJSImportStatement:
return "import-statement"
case api.ResolveJSRequireCall:
return "require-call"
case api.ResolveJSDynamicImport:
return "dynamic-import"
case api.ResolveJSRequireResolve:
return "require-resolve"
// CSS
case api.ResolveCSSImportRule:
return "import-rule"
case api.ResolveCSSComposesFrom:
return "composes-from"
case api.ResolveCSSURLToken:
return "url-token"
default:
panic("Internal error")
}
}
func stringToResolveKind(kind string) (api.ResolveKind, bool) {
switch kind {
case "entry-point":
return api.ResolveEntryPoint, true
// JS
case "import-statement":
return api.ResolveJSImportStatement, true
case "require-call":
return api.ResolveJSRequireCall, true
case "dynamic-import":
return api.ResolveJSDynamicImport, true
case "require-resolve":
return api.ResolveJSRequireResolve, true
// CSS
case "import-rule":
return api.ResolveCSSImportRule, true
case "composes-from":
return api.ResolveCSSComposesFrom, true
case "url-token":
return api.ResolveCSSURLToken, true
}
return api.ResolveNone, false
}
func (service *serviceType) convertPlugins(key int, jsPlugins interface{}, activeBuild *activeBuild) ([]api.Plugin, bool, error) {
type filteredCallback struct {
filter *regexp.Regexp
pluginName string
namespace string
id int
}
var onResolveCallbacks []filteredCallback
var onLoadCallbacks []filteredCallback
hasOnEnd := false
filteredCallbacks := func(pluginName string, kind string, items []interface{}) (result []filteredCallback, err error) {
for _, item := range items {
item := item.(map[string]interface{})
filter, err := config.CompileFilterForPlugin(pluginName, kind, item["filter"].(string))
if err != nil {
return nil, err
}
result = append(result, filteredCallback{
pluginName: pluginName,
id: item["id"].(int),
filter: filter,
namespace: item["namespace"].(string),
})
}
return
}
for _, p := range jsPlugins.([]interface{}) {
p := p.(map[string]interface{})
pluginName := p["name"].(string)
if p["onEnd"].(bool) {
hasOnEnd = true
}
if callbacks, err := filteredCallbacks(pluginName, "onResolve", p["onResolve"].([]interface{})); err != nil {
return nil, false, err
} else {
onResolveCallbacks = append(onResolveCallbacks, callbacks...)
}
if callbacks, err := filteredCallbacks(pluginName, "onLoad", p["onLoad"].([]interface{})); err != nil {
return nil, false, err
} else {
onLoadCallbacks = append(onLoadCallbacks, callbacks...)
}
}
// We want to minimize the amount of IPC traffic. Instead of adding one Go
// plugin for every JavaScript plugin, we just add a single Go plugin that
// proxies the plugin queries to the list of JavaScript plugins in the host.
return []api.Plugin{{
Name: "JavaScript plugins",
Setup: func(build api.PluginBuild) {
activeBuild.mutex.Lock()
activeBuild.pluginResolve = func(id uint32, request map[string]interface{}) []byte {
path := request["path"].(string)
var options api.ResolveOptions
if value, ok := request["pluginName"]; ok {
options.PluginName = value.(string)
}
if value, ok := request["importer"]; ok {
options.Importer = value.(string)
}
if value, ok := request["namespace"]; ok {
options.Namespace = value.(string)
}
if value, ok := request["resolveDir"]; ok {
options.ResolveDir = value.(string)
}
if value, ok := request["kind"]; ok {
str := value.(string)
kind, ok := stringToResolveKind(str)
if !ok {
return encodePacket(packet{
id: id,
value: map[string]interface{}{
"error": fmt.Sprintf("Invalid kind: %q", str),
},
})
}
options.Kind = kind
}
if value, ok := request["pluginData"]; ok {
options.PluginData = value.(int)
}
if value, ok := request["with"]; ok {
value := value.(map[string]interface{})
options.With = make(map[string]string, len(value))
for k, v := range value {
options.With[k] = v.(string)
}
}
result := build.Resolve(path, options)
return encodePacket(packet{
id: id,
value: map[string]interface{}{
"errors": encodeMessages(result.Errors),
"warnings": encodeMessages(result.Warnings),
"path": result.Path,
"external": result.External,
"sideEffects": result.SideEffects,
"namespace": result.Namespace,
"suffix": result.Suffix,
"pluginData": result.PluginData,
},
})
}
activeBuild.mutex.Unlock()
// Always register "OnStart" to clear "pluginData"
build.OnStart(func() (api.OnStartResult, error) {
response, ok := service.sendRequest(map[string]interface{}{
"command": "on-start",
"key": key,
}).(map[string]interface{})
if !ok {
return api.OnStartResult{}, errors.New("The service was stopped")
}
return api.OnStartResult{
Errors: decodeMessages(response["errors"].([]interface{})),
Warnings: decodeMessages(response["warnings"].([]interface{})),
}, nil
})
// Only register "OnResolve" if needed
if len(onResolveCallbacks) > 0 {
build.OnResolve(api.OnResolveOptions{Filter: ".*"}, func(args api.OnResolveArgs) (api.OnResolveResult, error) {
var ids []interface{}
applyPath := logger.Path{Text: args.Path, Namespace: args.Namespace}
for _, item := range onResolveCallbacks {
if config.PluginAppliesToPath(applyPath, item.filter, item.namespace) {
ids = append(ids, item.id)
}
}
result := api.OnResolveResult{}
if len(ids) == 0 {
return result, nil
}
with := make(map[string]interface{}, len(args.With))
for k, v := range args.With {
with[k] = v
}
response, ok := service.sendRequest(map[string]interface{}{
"command": "on-resolve",
"key": key,
"ids": ids,
"path": args.Path,
"importer": args.Importer,
"namespace": args.Namespace,
"resolveDir": args.ResolveDir,
"kind": resolveKindToString(args.Kind),
"pluginData": args.PluginData,
"with": with,
}).(map[string]interface{})
if !ok {
return result, errors.New("The service was stopped")
}
if value, ok := response["id"]; ok {
id := value.(int)
for _, item := range onResolveCallbacks {
if item.id == id {
result.PluginName = item.pluginName
break
}
}
}
if value, ok := response["error"]; ok {
return result, errors.New(value.(string))
}
if value, ok := response["pluginName"]; ok {
result.PluginName = value.(string)
}
if value, ok := response["path"]; ok {
result.Path = value.(string)
}
if value, ok := response["namespace"]; ok {
result.Namespace = value.(string)
}
if value, ok := response["suffix"]; ok {
result.Suffix = value.(string)
}
if value, ok := response["external"]; ok {
result.External = value.(bool)
}
if value, ok := response["sideEffects"]; ok {
if value.(bool) {
result.SideEffects = api.SideEffectsTrue
} else {
result.SideEffects = api.SideEffectsFalse
}
}
if value, ok := response["pluginData"]; ok {
result.PluginData = value.(int)
}
if value, ok := response["errors"]; ok {
result.Errors = decodeMessages(value.([]interface{}))
}
if value, ok := response["warnings"]; ok {
result.Warnings = decodeMessages(value.([]interface{}))
}
if value, ok := response["watchFiles"]; ok {
result.WatchFiles = decodeStringArray(value.([]interface{}))
}
if value, ok := response["watchDirs"]; ok {
result.WatchDirs = decodeStringArray(value.([]interface{}))
}
return result, nil
})
}
// Only register "OnLoad" if needed
if len(onLoadCallbacks) > 0 {
build.OnLoad(api.OnLoadOptions{Filter: ".*"}, func(args api.OnLoadArgs) (api.OnLoadResult, error) {
var ids []interface{}
applyPath := logger.Path{Text: args.Path, Namespace: args.Namespace}
for _, item := range onLoadCallbacks {
if config.PluginAppliesToPath(applyPath, item.filter, item.namespace) {
ids = append(ids, item.id)
}
}
result := api.OnLoadResult{}
if len(ids) == 0 {
return result, nil
}
with := make(map[string]interface{}, len(args.With))
for k, v := range args.With {
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/stdio_protocol.go | cmd/esbuild/stdio_protocol.go | // The JavaScript API communicates with the Go child process over stdin/stdout
// using this protocol. It's a very simple binary protocol that uses primitives
// and nested arrays and maps. It's basically JSON with UTF-8 encoding and an
// additional byte array primitive. You must send a response after receiving a
// request because the other end is blocking on the response coming back.
package main
import (
"encoding/binary"
"sort"
)
func readUint32(bytes []byte) (value uint32, leftOver []byte, ok bool) {
if len(bytes) >= 4 {
return binary.LittleEndian.Uint32(bytes), bytes[4:], true
}
return 0, bytes, false
}
func writeUint32(bytes []byte, value uint32) []byte {
bytes = append(bytes, 0, 0, 0, 0)
binary.LittleEndian.PutUint32(bytes[len(bytes)-4:], value)
return bytes
}
func readLengthPrefixedSlice(bytes []byte) (slice []byte, leftOver []byte, ok bool) {
if length, afterLength, ok := readUint32(bytes); ok && uint(len(afterLength)) >= uint(length) {
return afterLength[:length], afterLength[length:], true
}
return []byte{}, bytes, false
}
type packet struct {
value interface{}
id uint32
isRequest bool
}
func encodePacket(p packet) []byte {
var visit func(interface{})
var bytes []byte
visit = func(value interface{}) {
switch v := value.(type) {
case nil:
bytes = append(bytes, 0)
case bool:
n := uint8(0)
if v {
n = 1
}
bytes = append(bytes, 1, n)
case int:
bytes = append(bytes, 2)
bytes = writeUint32(bytes, uint32(v))
case string:
bytes = append(bytes, 3)
bytes = writeUint32(bytes, uint32(len(v)))
bytes = append(bytes, v...)
case []byte:
bytes = append(bytes, 4)
bytes = writeUint32(bytes, uint32(len(v)))
bytes = append(bytes, v...)
case []interface{}:
bytes = append(bytes, 5)
bytes = writeUint32(bytes, uint32(len(v)))
for _, item := range v {
visit(item)
}
case map[string]interface{}:
// Sort keys for determinism
keys := make([]string, 0, len(v))
for k := range v {
keys = append(keys, k)
}
sort.Strings(keys)
bytes = append(bytes, 6)
bytes = writeUint32(bytes, uint32(len(keys)))
for _, k := range keys {
bytes = writeUint32(bytes, uint32(len(k)))
bytes = append(bytes, k...)
visit(v[k])
}
default:
panic("Invalid packet")
}
}
bytes = writeUint32(bytes, 0) // Reserve space for the length
if p.isRequest {
bytes = writeUint32(bytes, p.id<<1)
} else {
bytes = writeUint32(bytes, (p.id<<1)|1)
}
visit(p.value)
writeUint32(bytes[:0], uint32(len(bytes)-4)) // Patch the length in
return bytes
}
func decodePacket(bytes []byte) (packet, bool) {
var visit func() (interface{}, bool)
visit = func() (interface{}, bool) {
kind := bytes[0]
bytes = bytes[1:]
switch kind {
case 0: // nil
return nil, true
case 1: // bool
value := bytes[0]
bytes = bytes[1:]
return value != 0, true
case 2: // int
value, next, ok := readUint32(bytes)
if !ok {
return nil, false
}
bytes = next
return int(value), true
case 3: // string
value, next, ok := readLengthPrefixedSlice(bytes)
if !ok {
return nil, false
}
bytes = next
return string(value), true
case 4: // []byte
value, next, ok := readLengthPrefixedSlice(bytes)
if !ok {
return nil, false
}
bytes = next
return value, true
case 5: // []interface{}
count, next, ok := readUint32(bytes)
if !ok {
return nil, false
}
bytes = next
value := make([]interface{}, count)
for i := 0; i < int(count); i++ {
item, ok := visit()
if !ok {
return nil, false
}
value[i] = item
}
return value, true
case 6: // map[string]interface{}
count, next, ok := readUint32(bytes)
if !ok {
return nil, false
}
bytes = next
value := make(map[string]interface{}, count)
for i := 0; i < int(count); i++ {
key, next, ok := readLengthPrefixedSlice(bytes)
if !ok {
return nil, false
}
bytes = next
item, ok := visit()
if !ok {
return nil, false
}
value[string(key)] = item
}
return value, true
default:
panic("Invalid packet")
}
}
id, bytes, ok := readUint32(bytes)
if !ok {
return packet{}, false
}
isRequest := (id & 1) == 0
id >>= 1
value, ok := visit()
if !ok {
return packet{}, false
}
if len(bytes) != 0 {
return packet{}, false
}
return packet{id: id, isRequest: isRequest, value: value}, true
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/main_other.go | cmd/esbuild/main_other.go | //go:build !js || !wasm
// +build !js !wasm
package main
import (
"fmt"
"os"
"runtime/pprof"
"runtime/trace"
"github.com/evanw/esbuild/internal/logger"
)
func createTraceFile(osArgs []string, traceFile string) func() {
f, err := os.Create(traceFile)
if err != nil {
logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
"Failed to create trace file: %s", err.Error()))
return nil
}
trace.Start(f)
return func() {
trace.Stop()
f.Close()
}
}
func createHeapFile(osArgs []string, heapFile string) func() {
f, err := os.Create(heapFile)
if err != nil {
logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
"Failed to create heap file: %s", err.Error()))
return nil
}
return func() {
if err := pprof.WriteHeapProfile(f); err != nil {
logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
"Failed to write heap profile: %s", err.Error()))
}
f.Close()
}
}
func createCpuprofileFile(osArgs []string, cpuprofileFile string) func() {
f, err := os.Create(cpuprofileFile)
if err != nil {
logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
"Failed to create cpuprofile file: %s", err.Error()))
return nil
}
pprof.StartCPUProfile(f)
return func() {
pprof.StopCPUProfile()
f.Close()
}
}
func isServeUnsupported() bool {
return false
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/main_wasm.go | cmd/esbuild/main_wasm.go | //go:build js && wasm
// +build js,wasm
package main
import (
"github.com/evanw/esbuild/internal/logger"
)
// Remove this code from the WebAssembly binary to reduce size. This only removes 0.4mb of stuff.
func createTraceFile(osArgs []string, traceFile string) func() {
logger.PrintErrorToStderr(osArgs, "The \"--trace\" flag is not supported when using WebAssembly")
return nil
}
func createHeapFile(osArgs []string, heapFile string) func() {
logger.PrintErrorToStderr(osArgs, "The \"--heap\" flag is not supported when using WebAssembly")
return nil
}
func createCpuprofileFile(osArgs []string, cpuprofileFile string) func() {
logger.PrintErrorToStderr(osArgs, "The \"--cpuprofile\" flag is not supported when using WebAssembly")
return nil
}
func isServeUnsupported() bool {
return true
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/version.go | cmd/esbuild/version.go | package main
const esbuildVersion = "0.27.2"
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/cmd/esbuild/main.go | cmd/esbuild/main.go | package main
import (
"fmt"
"io"
"os"
"runtime/debug"
"strings"
"time"
"github.com/evanw/esbuild/internal/api_helpers"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/pkg/cli"
)
var helpText = func(colors logger.Colors) string {
// Read "NO_COLOR" from the environment. This is a convention that some
// software follows. See https://no-color.org/ for more information.
if _, ok := os.LookupEnv("NO_COLOR"); ok {
colors = logger.Colors{}
}
return `
` + colors.Bold + `Usage:` + colors.Reset + `
esbuild [options] [entry points]
` + colors.Bold + `Documentation:` + colors.Reset + `
` + colors.Underline + `https://esbuild.github.io/` + colors.Reset + `
` + colors.Bold + `Repository:` + colors.Reset + `
` + colors.Underline + `https://github.com/evanw/esbuild` + colors.Reset + `
` + colors.Bold + `Simple options:` + colors.Reset + `
--bundle Bundle all dependencies into the output files
--define:K=V Substitute K with V while parsing
--external:M Exclude module M from the bundle (can use * wildcards)
--format=... Output format (iife | cjs | esm, no default when not
bundling, otherwise default is iife when platform
is browser and cjs when platform is node)
--loader:X=L Use loader L to load file extension X, where L is
one of: base64 | binary | copy | css | dataurl |
empty | file | global-css | js | json | jsx |
local-css | text | ts | tsx
--minify Minify the output (sets all --minify-* flags)
--outdir=... The output directory (for multiple entry points)
--outfile=... The output file (for one entry point)
--packages=... Set to "external" to avoid bundling any package
--platform=... Platform target (browser | node | neutral,
default browser)
--serve=... Start a local HTTP server on this host:port for outputs
--sourcemap Emit a source map
--splitting Enable code splitting (currently only for esm)
--target=... Environment target (e.g. es2017, chrome58, firefox57,
safari11, edge16, node10, ie9, opera45, default esnext)
--watch Watch mode: rebuild on file system changes (stops when
stdin is closed, use "--watch=forever" to ignore stdin)
` + colors.Bold + `Advanced options:` + colors.Reset + `
--abs-paths=... Emit absolute instead of relative paths in these
situations (code | log | metafile)
--allow-overwrite Allow output files to overwrite input files
--analyze Print a report about the contents of the bundle
(use "--analyze=verbose" for a detailed report)
--asset-names=... Path template to use for "file" loader files
(default "[name]-[hash]")
--banner:T=... Text to be prepended to each output file of type T
where T is one of: css | js
--certfile=... Certificate for serving HTTPS (see also "--keyfile")
--charset=utf8 Do not escape UTF-8 code points
--chunk-names=... Path template to use for code splitting chunks
(default "[name]-[hash]")
--color=... Force use of color terminal escapes (true | false)
--cors-origin=... Allow cross-origin requests from this origin
--drop:... Remove certain constructs (console | debugger)
--drop-labels=... Remove labeled statements with these label names
--entry-names=... Path template to use for entry point output paths
(default "[dir]/[name]", can also use "[hash]")
--footer:T=... Text to be appended to each output file of type T
where T is one of: css | js
--global-name=... The name of the global for the IIFE format
--ignore-annotations Enable this to work with packages that have
incorrect tree-shaking annotations
--inject:F Import the file F into all input files and
automatically replace matching globals with imports
--jsx-dev Use React's automatic runtime in development mode
--jsx-factory=... What to use for JSX instead of React.createElement
--jsx-fragment=... What to use for JSX instead of React.Fragment
--jsx-import-source=... Override the package name for the automatic runtime
(default "react")
--jsx-side-effects Do not remove unused JSX expressions
--jsx=... Set to "automatic" to use React's automatic runtime
or to "preserve" to disable transforming JSX to JS
--keep-names Preserve "name" on functions and classes
--keyfile=... Key for serving HTTPS (see also "--certfile")
--legal-comments=... Where to place legal comments (none | inline |
eof | linked | external, default eof when bundling
and inline otherwise)
--line-limit=... Lines longer than this will be wrap onto a new line
--log-level=... Disable logging (verbose | debug | info | warning |
error | silent, default info)
--log-limit=... Maximum message count or 0 to disable (default 6)
--log-override:X=Y Use log level Y for log messages with identifier X
--main-fields=... Override the main file order in package.json
(default "browser,module,main" when platform is
browser and "main,module" when platform is node)
--mangle-cache=... Save "mangle props" decisions to a JSON file
--mangle-props=... Rename all properties matching a regular expression
--mangle-quoted=... Enable renaming of quoted properties (true | false)
--metafile=... Write metadata about the build to a JSON file
(see also: ` + colors.Underline + `https://esbuild.github.io/analyze/` + colors.Reset + `)
--minify-whitespace Remove whitespace in output files
--minify-identifiers Shorten identifiers in output files
--minify-syntax Use equivalent but shorter syntax in output files
--out-extension:.js=.mjs Use a custom output extension instead of ".js"
--outbase=... The base path used to determine entry point output
paths (for multiple entry points)
--preserve-symlinks Disable symlink resolution for module lookup
--public-path=... Set the base URL for the "file" loader
--pure:N Mark the name N as a pure function for tree shaking
--reserve-props=... Do not mangle these properties
--resolve-extensions=... A comma-separated list of implicit extensions
(default ".tsx,.ts,.jsx,.js,.css,.json")
--serve-fallback=... Serve this HTML page when the request doesn't match
--servedir=... What to serve in addition to generated output files
--source-root=... Sets the "sourceRoot" field in generated source maps
--sourcefile=... Set the source file for the source map (for stdin)
--sourcemap=external Do not link to the source map with a comment
--sourcemap=inline Emit the source map with an inline data URL
--sources-content=false Omit "sourcesContent" in generated source maps
--supported:F=... Consider syntax F to be supported (true | false)
--tree-shaking=... Force tree shaking on or off (false | true)
--tsconfig=... Use this tsconfig.json file instead of other ones
--tsconfig-raw=... Override all tsconfig.json files with this string
--version Print the current version (` + esbuildVersion + `) and exit
--watch-delay=... Wait before watch mode rebuilds (in milliseconds)
` + colors.Bold + `Examples:` + colors.Reset + `
` + colors.Dim + `# Produces dist/entry_point.js and dist/entry_point.js.map` + colors.Reset + `
esbuild --bundle entry_point.js --outdir=dist --minify --sourcemap
` + colors.Dim + `# Allow JSX syntax in .js files` + colors.Reset + `
esbuild --bundle entry_point.js --outfile=out.js --loader:.js=jsx
` + colors.Dim + `# Substitute the identifier RELEASE for the literal true` + colors.Reset + `
esbuild example.js --outfile=out.js --define:RELEASE=true
` + colors.Dim + `# Provide input via stdin, get output via stdout` + colors.Reset + `
esbuild --minify --loader=ts < input.ts > output.js
` + colors.Dim + `# Automatically rebuild when input files are changed` + colors.Reset + `
esbuild app.ts --bundle --watch
` + colors.Dim + `# Start a local HTTP server for everything in "www"` + colors.Reset + `
esbuild app.ts --bundle --servedir=www --outdir=www/js
`
}
func main() {
logger.API = logger.CLIAPI
osArgs := os.Args[1:]
heapFile := ""
traceFile := ""
cpuprofileFile := ""
isRunningService := false
sendPings := false
isWatch := false
isWatchForever := false
isServe := false
// Do an initial scan over the argument list
argsEnd := 0
for _, arg := range osArgs {
switch {
// Show help if a common help flag is provided
case arg == "-h", arg == "-help", arg == "--help", arg == "/?":
logger.PrintText(os.Stdout, logger.LevelSilent, os.Args, helpText)
os.Exit(0)
// Special-case the version flag here
case arg == "--version":
fmt.Printf("%s\n", esbuildVersion)
os.Exit(0)
case strings.HasPrefix(arg, "--heap="):
heapFile = arg[len("--heap="):]
case strings.HasPrefix(arg, "--trace="):
traceFile = arg[len("--trace="):]
case strings.HasPrefix(arg, "--timing"):
// This is a hidden flag because it's only intended for debugging esbuild
// itself. The output is not documented and not stable.
api_helpers.UseTimer = true
case strings.HasPrefix(arg, "--cpuprofile="):
cpuprofileFile = arg[len("--cpuprofile="):]
// This flag turns the process into a long-running service that uses
// message passing with the host process over stdin/stdout
case strings.HasPrefix(arg, "--service="):
hostVersion := arg[len("--service="):]
isRunningService = true
// Validate the host's version number to make sure esbuild was installed
// correctly. This check was added because some people have reported
// errors that appear to indicate an incorrect installation.
if hostVersion != esbuildVersion {
logger.PrintErrorToStderr(osArgs,
fmt.Sprintf("Cannot start service: Host version %q does not match binary version %q",
hostVersion, esbuildVersion))
os.Exit(1)
}
case strings.HasPrefix(arg, "--ping"):
sendPings = true
default:
// Some people want to be able to run esbuild's watch mode such that it
// never exits. However, esbuild ends watch mode when stdin is closed
// because stdin is always closed when the parent process terminates, so
// ending watch mode when stdin is closed is a good way to avoid
// accidentally creating esbuild processes that live forever.
//
// Explicitly allow processes that live forever with "--watch=forever".
// This may be a reasonable thing to do in a short-lived VM where all
// processes in the VM are only started once and then the VM is killed
// when the processes are no longer needed.
if arg == "--watch" || arg == "--watch=true" {
isWatch = true
} else if arg == "--watch=forever" {
arg = "--watch"
isWatch = true
isWatchForever = true
} else if arg == "--serve" ||
strings.HasPrefix(arg, "--serve=") ||
strings.HasPrefix(arg, "--servedir=") ||
strings.HasPrefix(arg, "--serve-fallback=") {
isServe = true
}
// Strip any arguments that were handled above
osArgs[argsEnd] = arg
argsEnd++
}
}
osArgs = osArgs[:argsEnd]
// Run in service mode if requested
if isRunningService {
runService(sendPings)
return
}
// Print help text when there are no arguments
isStdinTTY := logger.GetTerminalInfo(os.Stdin).IsTTY
if len(osArgs) == 0 && isStdinTTY {
logger.PrintText(os.Stdout, logger.LevelSilent, osArgs, helpText)
os.Exit(0)
}
// Capture the defer statements below so the "done" message comes last
exitCode := 1
func() {
// To view a CPU trace, use "go tool trace [file]". Note that the trace
// viewer doesn't work under Windows Subsystem for Linux for some reason.
if traceFile != "" {
if done := createTraceFile(osArgs, traceFile); done == nil {
return
} else {
defer done()
}
}
// To view a heap trace, use "go tool pprof [file]" and type "top". You can
// also drop it into https://speedscope.app and use the "left heavy" or
// "sandwich" view modes.
if heapFile != "" {
if done := createHeapFile(osArgs, heapFile); done == nil {
return
} else {
defer done()
}
}
// To view a CPU profile, drop the file into https://speedscope.app.
// Note: Running the CPU profiler doesn't work under Windows subsystem for
// Linux. The profiler has to be built for native Windows and run using the
// command prompt instead.
if cpuprofileFile != "" {
if done := createCpuprofileFile(osArgs, cpuprofileFile); done == nil {
return
} else {
defer done()
}
}
if cpuprofileFile != "" {
// The CPU profiler in Go only runs at 100 Hz, which is far too slow to
// return useful information for esbuild, since it's so fast. Let's keep
// running for 30 seconds straight, which should give us 3,000 samples.
seconds := 30.0
start := time.Now()
for time.Since(start).Seconds() < seconds {
exitCode = cli.Run(osArgs)
}
} else {
if !isWatch && !isServe {
// If this is not a long-running process and there is at most a single
// entry point, then disable the GC since we're just going to allocate
// a bunch of memory and then exit anyway. This speedup is not
// insignificant. We don't do this when there are multiple entry points
// since otherwise esbuild could unnecessarily use much more memory
// than it might otherwise need to process many entry points.
nonFlagCount := 0
for _, arg := range osArgs {
if !strings.HasPrefix(arg, "-") {
nonFlagCount++
}
}
if nonFlagCount <= 1 {
debug.SetGCPercent(-1)
}
} else if isServe && isServeUnsupported() {
// The development server isn't supported on WebAssembly, so we will
// immediately call "os.Exit(1)" below, which will call "process.exit(1)"
// in node. However, node has a bug/feature where any pending calls to
// "fs.read(process.stdin.fd)" hold up "process.exit()" without seemingly
// any way to stop this from happening. So to avoid this bug/feature,
// we explicitly avoid listening to stdin in this case (when we know
// that we are about to exit due to an invalid flag).
} else if !isStdinTTY && !isWatchForever {
// If stdin isn't a TTY, watch stdin and abort in case it is closed.
// This is necessary when the esbuild binary executable is invoked via
// the Erlang VM, which doesn't provide a way to exit a child process.
// See: https://github.com/brunch/brunch/issues/920.
//
// We don't do this when stdin is a TTY because that interferes with
// the Unix background job system. If we read from stdin then Ctrl+Z
// to move the process to the background will incorrectly cause the
// job to stop. See: https://github.com/brunch/brunch/issues/998.
go func() {
// This just discards information from stdin because we don't use
// it and we can avoid unnecessarily allocating space for it
buffer := make([]byte, 512)
for {
_, err := os.Stdin.Read(buffer)
if err != nil {
if options := logger.OutputOptionsForArgs(osArgs); options.LogLevel <= logger.LevelInfo {
if isWatch {
// Mention why watch mode was stopped to reduce confusion, and
// call out "--watch=forever" to get the alternative behavior
logger.PrintTextWithColor(os.Stderr, options.Color, func(colors logger.Colors) string {
return fmt.Sprintf("%s[watch] stopped automatically because stdin was closed (use \"--watch=forever\" to keep watching even after stdin is closed)%s\n", colors.Dim, colors.Reset)
})
} else if isServe {
logger.PrintTextWithColor(os.Stderr, options.Color, func(colors logger.Colors) string {
return fmt.Sprintf("%s[serve] stopped automatically because stdin was closed (keep stdin open to continue serving)%s\n", colors.Dim, colors.Reset)
})
}
}
// Only exit cleanly if stdin was closed cleanly
if err == io.EOF {
os.Exit(0)
} else {
os.Exit(1)
}
}
// Some people attempt to keep esbuild's watch mode open by piping
// an infinite stream of data to stdin such as with "< /dev/zero".
// This will make esbuild spin at 100% CPU. To avoid this, put a
// small delay after we read some data from stdin.
time.Sleep(4 * time.Millisecond)
}
}()
}
exitCode = cli.Run(osArgs)
}
}()
os.Exit(exitCode)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_glob_test.go | internal/bundler_tests/bundler_glob_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var glob_suite = suite{
name: "glob",
}
func TestGlobBasicNoBundle(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestGlobBasicNoSplitting(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
"/src/a.js": `module.exports = 'a'`,
"/src/b.js": `module.exports = 'b'`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSGlobBasicNoSplitting(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
const ab = Math.random() < 0.5 ? 'a.ts' : 'b.ts'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
"/src/a.ts": `module.exports = 'a'`,
"/src/b.ts": `module.exports = 'b'`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestGlobBasicSplitting(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
"/src/a.js": `module.exports = 'a'`,
"/src/b.js": `module.exports = 'b'`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
CodeSplitting: true,
},
})
}
func TestTSGlobBasicSplitting(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
const ab = Math.random() < 0.5 ? 'a.ts' : 'b.ts'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
"/src/a.ts": `module.exports = 'a'`,
"/src/b.ts": `module.exports = 'b'`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
CodeSplitting: true,
},
})
}
func TestGlobDirDoesNotExist(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab),
import: import('./src/' + ab),
},
template: {
require: require(` + "`./src/${ab}`" + `),
import: import(` + "`./src/${ab}`" + `),
},
})
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
CodeSplitting: true,
},
expectedScanLog: `entry.js: ERROR: Could not resolve require("./src/**/*")
entry.js: ERROR: Could not resolve import("./src/**/*")
`,
})
}
func TestGlobNoMatches(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab + '.json'),
import: import('./src/' + ab + '.json'),
},
template: {
require: require(` + "`./src/${ab}.json`" + `),
import: import(` + "`./src/${ab}.json`" + `),
},
})
`,
"/src/dummy.js": ``,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
CodeSplitting: true,
},
expectedScanLog: `entry.js: WARNING: The glob pattern require("./src/**/*.json") did not match any files
entry.js: WARNING: The glob pattern import("./src/**/*.json") did not match any files
`,
})
}
func TestGlobEntryPointAbsPath(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
works = true
`,
},
entryPaths: []string{"/Users/user/project/**/*.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestGlobWildcardSlash(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/' + ab + '.js'),
import: import('./src/' + ab + '.js'),
},
template: {
require: require(` + "`./src/${ab}.js`" + `),
import: import(` + "`./src/${ab}.js`" + `),
},
})
`,
"/src/file-a.js": `module.exports = 'a'`,
"/src/file-b.js": `module.exports = 'b'`,
"/src/file-a.js.map": `DO NOT BUNDLE`,
"/src/file-b.js.map": `DO NOT BUNDLE`,
"/src/nested/dir/file-a.js": `module.exports = 'a'`,
"/src/nested/dir/file-b.js": `module.exports = 'b'`,
"/src/nested/dir/file-a.js.map": `DO NOT BUNDLE`,
"/src/nested/dir/file-b.js.map": `DO NOT BUNDLE`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestGlobWildcardNoSlash(t *testing.T) {
glob_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const ab = Math.random() < 0.5 ? 'a.js' : 'b.js'
console.log({
concat: {
require: require('./src/file-' + ab + '.js'),
import: import('./src/file-' + ab + '.js'),
},
template: {
require: require(` + "`./src/file-${ab}.js`" + `),
import: import(` + "`./src/file-${ab}.js`" + `),
},
})
`,
"/src/file-a.js": `module.exports = 'a'`,
"/src/file-b.js": `module.exports = 'b'`,
"/src/file-a.js.map": `DO NOT BUNDLE`,
"/src/file-b.js.map": `DO NOT BUNDLE`,
"/src/nested/dir/file-a.js": `DO NOT BUNDLE`,
"/src/nested/dir/file-b.js": `DO NOT BUNDLE`,
"/src/nested/dir/file-a.js.map": `DO NOT BUNDLE`,
"/src/nested/dir/file-b.js.map": `DO NOT BUNDLE`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_importstar_ts_test.go | internal/bundler_tests/bundler_importstar_ts_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var importstar_ts_suite = suite{
name: "importstar_ts",
}
func TestTSImportStarUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
"/foo.ts": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportImportStarUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportImportStarNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportImportStarCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarAsUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarAsNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarAsCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './bar'
let foo = 234
console.log(foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarExportStarCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.ts": `
export const foo = 123
`,
"/bar.ts": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarCommonJSUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
"/foo.ts": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarCommonJSCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.ts": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarCommonJSNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.ts": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarAndCommonJS(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
const ns2 = require('./foo')
console.log(ns.foo, ns2.foo)
`,
"/foo.ts": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarNoBundleUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarNoBundleCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarNoBundleNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarMangleNoBundleUnused(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarMangleNoBundleCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportStarMangleNoBundleNoCapture(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSReExportTypeOnlyFileES6(t *testing.T) {
importstar_ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './re-export'
console.log(ns.foo)
`,
"/re-export.ts": `
export * from './types1'
export * from './types2'
export * from './types3'
export * from './values'
`,
"/types1.ts": `
export interface Foo {}
export type Bar = number
console.log('some code')
`,
"/types2.ts": `
import {Foo} from "./type"
export {Foo}
console.log('some code')
`,
"/types3.ts": `
export {Foo} from "./type"
console.log('some code')
`,
"/values.ts": `
export let foo = 123
`,
"/type.ts": `
export type Foo = number
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_tsconfig_test.go | internal/bundler_tests/bundler_tsconfig_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var tsconfig_suite = suite{
name: "tsconfig",
}
func TestTsconfigPaths(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.ts": `
import baseurl_dot from './baseurl_dot'
import baseurl_nested from './baseurl_nested'
console.log(baseurl_dot, baseurl_nested)
`,
// Tests with "baseUrl": "."
"/Users/user/project/baseurl_dot/index.ts": `
import test0 from 'test0'
import test1 from 'test1/foo'
import test2 from 'test2/foo'
import test3 from 'test3/foo'
import test4 from 'test4/foo'
import test5 from 'test5/foo'
import absoluteIn from './absolute-in'
import absoluteInStar from './absolute-in-star'
import absoluteOut from './absolute-out'
import absoluteOutStar from './absolute-out-star'
export default {
test0,
test1,
test2,
test3,
test4,
test5,
absoluteIn,
absoluteInStar,
absoluteOut,
absoluteOutStar,
}
`,
"/Users/user/project/baseurl_dot/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"test0": ["./test0-success.ts"],
"test1/*": ["./test1-success.ts"],
"test2/*": ["./test2-success/*"],
"t*t3/foo": ["./test3-succ*s.ts"],
"test4/*": ["./test4-first/*", "./test4-second/*"],
"test5/*": ["./test5-first/*", "./test5-second/*"],
"/virtual-in/test": ["./actual/test"],
"/virtual-in-star/*": ["./actual/*"],
"/virtual-out/test": ["/Users/user/project/baseurl_dot/actual/test"],
"/virtual-out-star/*": ["/Users/user/project/baseurl_dot/actual/*"],
}
}
}
`,
"/Users/user/project/baseurl_dot/test0-success.ts": `
export default 'test0-success'
`,
"/Users/user/project/baseurl_dot/test1-success.ts": `
export default 'test1-success'
`,
"/Users/user/project/baseurl_dot/test2-success/foo.ts": `
export default 'test2-success'
`,
"/Users/user/project/baseurl_dot/test3-success.ts": `
export default 'test3-success'
`,
"/Users/user/project/baseurl_dot/test4-first/foo.ts": `
export default 'test4-success'
`,
"/Users/user/project/baseurl_dot/test5-second/foo.ts": `
export default 'test5-success'
`,
"/Users/user/project/baseurl_dot/absolute-in.ts": `
export {default} from '/virtual-in/test'
`,
"/Users/user/project/baseurl_dot/absolute-in-star.ts": `
export {default} from '/virtual-in-star/test'
`,
"/Users/user/project/baseurl_dot/absolute-out.ts": `
export {default} from '/virtual-out/test'
`,
"/Users/user/project/baseurl_dot/absolute-out-star.ts": `
export {default} from '/virtual-out-star/test'
`,
"/Users/user/project/baseurl_dot/actual/test.ts": `
export default 'absolute-success'
`,
// Tests with "baseUrl": "nested"
"/Users/user/project/baseurl_nested/index.ts": `
import test0 from 'test0'
import test1 from 'test1/foo'
import test2 from 'test2/foo'
import test3 from 'test3/foo'
import test4 from 'test4/foo'
import test5 from 'test5/foo'
import absoluteIn from './absolute-in'
import absoluteInStar from './absolute-in-star'
import absoluteOut from './absolute-out'
import absoluteOutStar from './absolute-out-star'
export default {
test0,
test1,
test2,
test3,
test4,
test5,
absoluteIn,
absoluteInStar,
absoluteOut,
absoluteOutStar,
}
`,
"/Users/user/project/baseurl_nested/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": "nested",
"paths": {
"test0": ["./test0-success.ts"],
"test1/*": ["./test1-success.ts"],
"test2/*": ["./test2-success/*"],
"t*t3/foo": ["./test3-succ*s.ts"],
"test4/*": ["./test4-first/*", "./test4-second/*"],
"test5/*": ["./test5-first/*", "./test5-second/*"],
"/virtual-in/test": ["./actual/test"],
"/virtual-in-star/*": ["./actual/*"],
"/virtual-out/test": ["/Users/user/project/baseurl_nested/nested/actual/test"],
"/virtual-out-star/*": ["/Users/user/project/baseurl_nested/nested/actual/*"],
}
}
}
`,
"/Users/user/project/baseurl_nested/nested/test0-success.ts": `
export default 'test0-success'
`,
"/Users/user/project/baseurl_nested/nested/test1-success.ts": `
export default 'test1-success'
`,
"/Users/user/project/baseurl_nested/nested/test2-success/foo.ts": `
export default 'test2-success'
`,
"/Users/user/project/baseurl_nested/nested/test3-success.ts": `
export default 'test3-success'
`,
"/Users/user/project/baseurl_nested/nested/test4-first/foo.ts": `
export default 'test4-success'
`,
"/Users/user/project/baseurl_nested/nested/test5-second/foo.ts": `
export default 'test5-success'
`,
"/Users/user/project/baseurl_nested/absolute-in.ts": `
export {default} from '/virtual-in/test'
`,
"/Users/user/project/baseurl_nested/absolute-in-star.ts": `
export {default} from '/virtual-in/test'
`,
"/Users/user/project/baseurl_nested/absolute-out.ts": `
export {default} from '/virtual-out/test'
`,
"/Users/user/project/baseurl_nested/absolute-out-star.ts": `
export {default} from '/virtual-out-star/test'
`,
"/Users/user/project/baseurl_nested/nested/actual/test.ts": `
export default 'absolute-success'
`,
},
entryPaths: []string{"/Users/user/project/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigPathsNoBaseURL(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.ts": `
import simple from './simple'
import extended from './extended'
console.log(simple, extended)
`,
// Tests with "baseUrl": "."
"/Users/user/project/simple/index.ts": `
import test0 from 'test0'
import test1 from 'test1/foo'
import test2 from 'test2/foo'
import test3 from 'test3/foo'
import test4 from 'test4/foo'
import test5 from 'test5/foo'
import absolute from './absolute'
export default {
test0,
test1,
test2,
test3,
test4,
test5,
absolute,
}
`,
"/Users/user/project/simple/tsconfig.json": `
{
"compilerOptions": {
"paths": {
"test0": ["./test0-success.ts"],
"test1/*": ["./test1-success.ts"],
"test2/*": ["./test2-success/*"],
"t*t3/foo": ["./test3-succ*s.ts"],
"test4/*": ["./test4-first/*", "./test4-second/*"],
"test5/*": ["./test5-first/*", "./test5-second/*"],
"/virtual/*": ["./actual/*"],
}
}
}
`,
"/Users/user/project/simple/test0-success.ts": `
export default 'test0-success'
`,
"/Users/user/project/simple/test1-success.ts": `
export default 'test1-success'
`,
"/Users/user/project/simple/test2-success/foo.ts": `
export default 'test2-success'
`,
"/Users/user/project/simple/test3-success.ts": `
export default 'test3-success'
`,
"/Users/user/project/simple/test4-first/foo.ts": `
export default 'test4-success'
`,
"/Users/user/project/simple/test5-second/foo.ts": `
export default 'test5-success'
`,
"/Users/user/project/simple/absolute.ts": `
export {default} from '/virtual/test'
`,
"/Users/user/project/simple/actual/test.ts": `
export default 'absolute-success'
`,
// Tests with "baseUrl": "nested"
"/Users/user/project/extended/index.ts": `
import test0 from 'test0'
import test1 from 'test1/foo'
import test2 from 'test2/foo'
import test3 from 'test3/foo'
import test4 from 'test4/foo'
import test5 from 'test5/foo'
import absolute from './absolute'
export default {
test0,
test1,
test2,
test3,
test4,
test5,
absolute,
}
`,
"/Users/user/project/extended/tsconfig.json": `
{
"extends": "./nested/tsconfig.json"
}
`,
"/Users/user/project/extended/nested/tsconfig.json": `
{
"compilerOptions": {
"paths": {
"test0": ["./test0-success.ts"],
"test1/*": ["./test1-success.ts"],
"test2/*": ["./test2-success/*"],
"t*t3/foo": ["./test3-succ*s.ts"],
"test4/*": ["./test4-first/*", "./test4-second/*"],
"test5/*": ["./test5-first/*", "./test5-second/*"],
"/virtual/*": ["./actual/*"],
}
}
}
`,
"/Users/user/project/extended/nested/test0-success.ts": `
export default 'test0-success'
`,
"/Users/user/project/extended/nested/test1-success.ts": `
export default 'test1-success'
`,
"/Users/user/project/extended/nested/test2-success/foo.ts": `
export default 'test2-success'
`,
"/Users/user/project/extended/nested/test3-success.ts": `
export default 'test3-success'
`,
"/Users/user/project/extended/nested/test4-first/foo.ts": `
export default 'test4-success'
`,
"/Users/user/project/extended/nested/test5-second/foo.ts": `
export default 'test5-success'
`,
"/Users/user/project/extended/absolute.ts": `
export {default} from '/virtual/test'
`,
"/Users/user/project/extended/nested/actual/test.ts": `
export default 'absolute-success'
`,
},
entryPaths: []string{"/Users/user/project/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigBadPathsNoBaseURL(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.ts": `
import "should-not-be-imported"
`,
"/Users/user/project/should-not-be-imported.ts": `
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"paths": {
"test": [
".",
"..",
"./good",
".\\good",
"../good",
"..\\good",
"/good",
"\\good",
"c:/good",
"c:\\good",
"C:/good",
"C:\\good",
"bad",
"@bad/core",
".*/bad",
"..*/bad",
"c*:\\bad",
"c:*\\bad",
"http://bad"
]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
expectedScanLog: `Users/user/project/entry.ts: ERROR: Could not resolve "should-not-be-imported"
NOTE: Use the relative path "./should-not-be-imported" to reference the file "Users/user/project/should-not-be-imported.ts". Without the leading "./", the path "should-not-be-imported" is being interpreted as a package path instead.
Users/user/project/tsconfig.json: WARNING: Non-relative path "bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path "@bad/core" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path ".*/bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path "..*/bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path "c*:\\bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path "c:*\\bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
Users/user/project/tsconfig.json: WARNING: Non-relative path "http://bad" is not allowed when "baseUrl" is not set (did you forget a leading "./"?)
`,
})
}
// https://github.com/evanw/esbuild/issues/913
func TestTsconfigPathsOverriddenBaseURL(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.ts": `
import test from '#/test'
console.log(test)
`,
"/Users/user/project/src/test.ts": `
export default 123
`,
"/Users/user/project/tsconfig.json": `
{
"extends": "./tsconfig.paths.json",
"compilerOptions": {
"baseUrl": "./src"
}
}
`,
"/Users/user/project/tsconfig.paths.json": `
{
"compilerOptions": {
"paths": {
"#/*": ["./*"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigPathsOverriddenBaseURLDifferentDir(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.ts": `
import test from '#/test'
console.log(test)
`,
"/Users/user/project/src/test.ts": `
export default 123
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "../tsconfig.paths.json",
"compilerOptions": {
"baseUrl": "./"
}
}
`,
"/Users/user/project/tsconfig.paths.json": `
{
"compilerOptions": {
"paths": {
"#/*": ["./*"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigPathsMissingBaseURL(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.ts": `
import test from '#/test'
console.log(test)
`,
"/Users/user/project/src/test.ts": `
export default 123
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "../tsconfig.paths.json",
"compilerOptions": {
}
}
`,
"/Users/user/project/tsconfig.paths.json": `
{
"compilerOptions": {
"paths": {
"#/*": ["./*"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
expectedScanLog: `Users/user/project/src/entry.ts: ERROR: Could not resolve "#/test"
NOTE: You can mark the path "#/test" as external to exclude it from the bundle, which will remove this error and leave the unresolved path in the bundle.
`,
})
}
func TestTsconfigPathsTypeOnly(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.ts": `
import { fib } from "fib";
console.log(fib(10));
`,
"/Users/user/project/node_modules/fib/index.js": `
export function fib(input) {
if (input < 2) {
return input;
}
return fib(input - 1) + fib(input - 2);
}
`,
"/Users/user/project/fib-local.d.ts": `
export function fib(input: number): number;
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"fib": ["fib-local.d.ts"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJSX(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "R.c",
"jsxFragmentFactory": "R.F"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigNestedJSX(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.ts": `
import factory from './factory'
import fragment from './fragment'
import both from './both'
console.log(factory, fragment, both)
`,
"/Users/user/project/factory/index.tsx": `
export default <><div/><div/></>
`,
"/Users/user/project/factory/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "h"
}
}
`,
"/Users/user/project/fragment/index.tsx": `
export default <><div/><div/></>
`,
"/Users/user/project/fragment/tsconfig.json": `
{
"compilerOptions": {
"jsxFragmentFactory": "a.b"
}
}
`,
"/Users/user/project/both/index.tsx": `
export default <><div/><div/></>
`,
"/Users/user/project/both/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "R.c",
"jsxFragmentFactory": "R.F"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigPreserveJSX(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsx": "preserve" // This should be ignored
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigPreserveJSXAutomatic(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsx": "preserve" // This should be ignored
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
JSX: config.JSXOptions{
AutomaticRuntime: true,
},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-runtime": true,
}},
},
},
})
}
func TestTsconfigReactJSX(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsx": "react-jsx",
"jsxImportSource": "notreact"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"notreact/jsx-runtime": true,
}},
},
},
})
}
func TestTsconfigReactJSXDev(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsx": "react-jsxdev"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-dev-runtime": true,
}},
},
},
})
}
func TestTsconfigReactJSXWithDevInMainConfig(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.tsx": `
console.log(<><div/><div/></>)
`,
"/Users/user/project/tsconfig.json": `
{
"compilerOptions": {
"jsx": "react-jsx"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
JSX: config.JSXOptions{
Development: true,
},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-dev-runtime": true,
}},
},
},
})
}
func TestTsconfigJsonBaseUrl(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.js": `
import fn from 'lib/util'
console.log(fn())
`,
"/Users/user/project/src/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": "."
}
}
`,
"/Users/user/project/src/lib/util.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestJsconfigJsonBaseUrl(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.js": `
import fn from 'lib/util'
console.log(fn())
`,
"/Users/user/project/src/jsconfig.json": `
{
"compilerOptions": {
"baseUrl": "."
}
}
`,
"/Users/user/project/src/lib/util.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonAbsoluteBaseUrl(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.js": `
import fn from 'lib/util'
console.log(fn())
`,
"/Users/user/project/src/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": "/Users/user/project/src"
}
}
`,
"/Users/user/project/src/lib/util.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonCommentAllowed(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.js": `
import fn from 'lib/util'
console.log(fn())
`,
"/Users/user/project/src/tsconfig.json": `
{
// Single-line comment
"compilerOptions": {
"baseUrl": "."
}
}
`,
"/Users/user/project/src/lib/util.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonTrailingCommaAllowed(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.js": `
import fn from 'lib/util'
console.log(fn())
`,
"/Users/user/project/src/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": ".",
},
}
`,
"/Users/user/project/src/lib/util.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonExtends(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
console.log(<div/>, <></>)
`,
"/tsconfig.json": `
{
"extends": "./base",
"compilerOptions": {
"jsxFragmentFactory": "derivedFragment"
}
}
`,
"/base.json": `
{
"compilerOptions": {
"jsxFactory": "baseFactory",
"jsxFragmentFactory": "baseFragment"
}
}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTsconfigJsonExtendsAbsolute(t *testing.T) {
tsconfig_suite.expectBundledUnix(t, bundled{
files: map[string]string{
"/Users/user/project/entry.jsx": `
console.log(<div/>, <></>)
`,
"/Users/user/project/tsconfig.json": `
{
"extends": "/Users/user/project/base.json",
"compilerOptions": {
"jsxFragmentFactory": "derivedFragment"
}
}
`,
"/Users/user/project/base.json": `
{
"compilerOptions": {
"jsxFactory": "baseFactory",
"jsxFragmentFactory": "baseFragment"
}
}
`,
},
entryPaths: []string{"/Users/user/project/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
tsconfig_suite.expectBundledWindows(t, bundled{
files: map[string]string{
"C:\\Users\\user\\project\\entry.jsx": `
console.log(<div/>, <></>)
`,
"C:\\Users\\user\\project\\tsconfig.json": `
{
"extends": "C:\\Users\\user\\project\\base.json",
"compilerOptions": {
"jsxFragmentFactory": "derivedFragment"
}
}
`,
"C:\\Users\\user\\project\\base.json": `
{
"compilerOptions": {
"jsxFactory": "baseFactory",
"jsxFragmentFactory": "baseFragment"
}
}
`,
},
entryPaths: []string{"C:\\Users\\user\\project\\entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "C:\\out.js",
},
})
}
func TestTsconfigJsonExtendsThreeLevels(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.jsx": `
import "test/import.js"
console.log(<div/>, <></>)
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "./path1/base",
"compilerOptions": {
"jsxFragmentFactory": "derivedFragment"
}
}
`,
"/Users/user/project/src/path1/base.json": `
{
"extends": "../path2/base2"
}
`,
"/Users/user/project/src/path2/base2.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"test/*": ["./works/*"]
},
"jsxFactory": "baseFactory",
"jsxFragmentFactory": "baseFragment"
}
}
`,
"/Users/user/project/src/path2/works/import.js": `
console.log('works')
`,
},
entryPaths: []string{"/Users/user/project/src/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTsconfigJsonExtendsLoop(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(123)
`,
"/tsconfig.json": `
{
"extends": "./base.json"
}
`,
"/base.json": `
{
"extends": "./tsconfig"
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `base.json: WARNING: Base config file "./tsconfig" forms cycle
`,
})
}
func TestTsconfigJsonExtendsPackage(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.jsx": `
console.log(<div/>)
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "@package/foo/tsconfig.json"
}
`,
"/Users/user/project/node_modules/@package/foo/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "worked"
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonOverrideMissing(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.ts": `
import 'foo'
`,
"/Users/user/project/src/foo-bad.ts": `
console.log('bad')
`,
"/Users/user/project/src/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"foo": ["./foo-bad.ts"]
}
}
}
`,
"/Users/user/project/other/foo-good.ts": `
console.log('good')
`,
"/Users/user/project/other/config-for-ts.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"foo": ["./foo-good.ts"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
TSConfigPath: "/Users/user/project/other/config-for-ts.json",
},
})
}
func TestTsconfigJsonOverrideNodeModules(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.ts": `
import 'foo'
`,
"/Users/user/project/src/node_modules/foo/index.js": `
console.log('default')
`,
"/Users/user/project/src/foo-bad.ts": `
console.log('bad')
`,
"/Users/user/project/src/tsconfig.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"foo": ["./foo-bad.ts"]
}
}
}
`,
"/Users/user/project/other/foo-good.ts": `
console.log('good')
`,
"/Users/user/project/other/config-for-ts.json": `
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"foo": ["./foo-good.ts"]
}
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
TSConfigPath: "/Users/user/project/other/config-for-ts.json",
},
})
}
func TestTsconfigJsonOverrideInvalid(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": ``,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
TSConfigPath: "/this/file/doesn't/exist/tsconfig.json",
},
expectedScanLog: `ERROR: Cannot find tsconfig file "this/file/doesn't/exist/tsconfig.json"
`,
})
}
func TestTsconfigJsonNodeModulesImplicitFile(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.tsx": `
console.log(<div/>)
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "foo"
}
`,
"/Users/user/project/src/node_modules/foo/tsconfig.json": `
{
"compilerOptions": {
"jsx": "react",
"jsxFactory": "worked"
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonNodeModulesTsconfigPathExact(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.tsx": `
console.log(<div/>)
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "foo"
}
`,
"/Users/user/project/src/node_modules/foo/package.json": `
{
"tsconfig": "over/here.json"
}
`,
"/Users/user/project/src/node_modules/foo/over/here.json": `
{
"compilerOptions": {
"jsx": "react",
"jsxFactory": "worked"
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonNodeModulesTsconfigPathImplicitJson(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/app/entry.tsx": `
console.log(<div/>)
`,
"/Users/user/project/src/tsconfig.json": `
{
"extends": "foo"
}
`,
"/Users/user/project/src/node_modules/foo/package.json": `
{
"tsconfig": "over/here"
}
`,
"/Users/user/project/src/node_modules/foo/over/here.json": `
{
"compilerOptions": {
"jsx": "react",
"jsxFactory": "worked"
}
}
`,
},
entryPaths: []string{"/Users/user/project/src/app/entry.tsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestTsconfigJsonNodeModulesTsconfigPathDirectory(t *testing.T) {
tsconfig_suite.expectBundled(t, bundled{
files: map[string]string{
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_yarnpnp_test.go | internal/bundler_tests/bundler_yarnpnp_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var yarnpnp_suite = suite{
name: "yarnpnp",
}
// https://github.com/evanw/esbuild/issues/3698
func TestTsconfigPackageJsonExportsYarnPnP(t *testing.T) {
yarnpnp_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/packages/app/index.tsx": `
console.log(<div/>)
`,
"/Users/user/project/packages/app/tsconfig.json": `
{
"extends": "tsconfigs/config"
}
`,
"/Users/user/project/packages/tsconfigs/package.json": `
{
"exports": {
"./config": "./configs/tsconfig.json"
}
}
`,
"/Users/user/project/packages/tsconfigs/configs/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "success"
}
}
`,
"/Users/user/project/.pnp.data.json": `
{
"packageRegistryData": [
[
"app",
[
[
"workspace:packages/app",
{
"packageLocation": "./packages/app/",
"packageDependencies": [
[
"tsconfigs",
"workspace:packages/tsconfigs"
]
],
"linkType": "SOFT"
}
]
]
],
[
"tsconfigs",
[
[
"workspace:packages/tsconfigs",
{
"packageLocation": "./packages/tsconfigs/",
"packageDependencies": [],
"linkType": "SOFT"
}
]
]
]
]
}
`,
},
entryPaths: []string{"/Users/user/project/packages/app/index.tsx"},
absWorkingDir: "/Users/user/project",
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
// https://github.com/evanw/esbuild/issues/3915
func TestTsconfigStackOverflowYarnPnP(t *testing.T) {
yarnpnp_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/entry.jsx": `
console.log(<div />)
`,
"/Users/user/project/tsconfig.json": `
{
"extends": "tsconfigs/config"
}
`,
"/Users/user/project/packages/tsconfigs/package.json": `
{
"exports": {
"./config": "./configs/tsconfig.json"
}
}
`,
"/Users/user/project/packages/tsconfigs/configs/tsconfig.json": `
{
"compilerOptions": {
"jsxFactory": "success"
}
}
`,
"/Users/user/project/.pnp.data.json": `
{
"packageRegistryData": [
[null, [
[null, {
"packageLocation": "./",
"packageDependencies": [
["tsconfigs", "virtual:some-path"]
],
"linkType": "SOFT"
}]
]],
["tsconfigs", [
["virtual:some-path", {
"packageLocation": "./packages/tsconfigs/",
"packageDependencies": [
["tsconfigs", "virtual:some-path"]
],
"packagePeers": [],
"linkType": "SOFT"
}],
["workspace:packages/tsconfigs", {
"packageLocation": "./packages/tsconfigs/",
"packageDependencies": [
["tsconfigs", "workspace:packages/tsconfigs"]
],
"linkType": "SOFT"
}]
]]
]
}
`,
},
entryPaths: []string{"/Users/user/project/entry.jsx"},
absWorkingDir: "/Users/user/project",
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestWindowsCrossVolumeReferenceYarnPnP(t *testing.T) {
yarnpnp_suite.expectBundledWindows(t, bundled{
files: map[string]string{
"D:\\project\\entry.jsx": `
import * as React from 'react'
console.log(<div />)
`,
"C:\\Users\\user\\AppData\\Local\\Yarn\\Berry\\cache\\react.zip\\node_modules\\react\\index.js": `
export function createElement() {}
`,
"D:\\project\\.pnp.data.json": `
{
"packageRegistryData": [
[null, [
[null, {
"packageLocation": "./",
"packageDependencies": [
["react", "npm:19.1.1"],
["project", "workspace:."]
],
"linkType": "SOFT"
}]
]],
["react", [
["npm:19.1.1", {
"packageLocation": "../../C:/Users/user/AppData/Local/Yarn/Berry/cache/react.zip/node_modules/react/",
"packageDependencies": [
["react", "npm:19.1.1"]
],
"linkType": "HARD"
}]
]],
["project", [
["workspace:.", {
"packageLocation": "./",
"packageDependencies": [
["react", "npm:19.1.1"],
["project", "workspace:."]
],
"linkType": "SOFT"
}]
]]
]
}
`,
},
entryPaths: []string{"D:\\project\\entry.jsx"},
absWorkingDir: "D:\\project",
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "D:\\project\\out.js",
},
})
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_loader_test.go | internal/bundler_tests/bundler_loader_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/bundler"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
)
var loader_suite = suite{
name: "loader",
}
func TestLoaderFile(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.svg'))
`,
"/test.svg": "<svg></svg>",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out/",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".svg": config.LoaderFile,
},
},
})
}
func TestLoaderFileMultipleNoCollision(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(
require('./a/test.txt'),
require('./b/test.txt'),
)
`,
// Two files with the same contents but different paths
"/a/test.txt": "test",
"/b/test.txt": "test",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/dist/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderFile,
},
},
})
}
func TestJSXSyntaxInJSWithJSXLoader(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(<div/>)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJSX,
},
},
})
}
func TestJSXPreserveCapitalLetter(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import { mustStartWithUpperCaseLetter as Test } from './foo'
console.log(<Test/>)
`,
"/foo.js": `
export class mustStartWithUpperCaseLetter {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
},
})
}
func TestJSXPreserveCapitalLetterMinify(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import { mustStartWithUpperCaseLetter as XYYYY } from './foo'
console.log(<XYYYY tag-must-start-with-capital-letter />)
`,
"/foo.js": `
export class mustStartWithUpperCaseLetter {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MinifyIdentifiers: true,
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
},
})
}
func TestJSXPreserveCapitalLetterMinifyNested(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
x = () => {
class XYYYYY {} // This should be named "Y" due to frequency analysis
return <XYYYYY tag-must-start-with-capital-letter />
}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MinifyIdentifiers: true,
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
},
})
}
func TestRequireCustomExtensionString(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.custom'))
`,
"/test.custom": `#include <stdio.h>`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".custom": config.LoaderText,
},
},
})
}
func TestRequireCustomExtensionBase64(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.custom'))
`,
"/test.custom": "a\x00b\x80c\xFFd",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".custom": config.LoaderBase64,
},
},
})
}
func TestRequireCustomExtensionDataURL(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.custom'))
`,
"/test.custom": "a\x00b\x80c\xFFd",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".custom": config.LoaderDataURL,
},
},
})
}
func TestRequireCustomExtensionPreferLongest(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.txt'), require('./test.base64.txt'))
`,
"/test.txt": `test.txt`,
"/test.base64.txt": `test.base64.txt`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderText,
".base64.txt": config.LoaderBase64,
},
},
})
}
func TestAutoDetectMimeTypeFromExtension(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.svg'))
`,
"/test.svg": "a\x00b\x80c\xFFd",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".svg": config.LoaderDataURL,
},
},
})
}
func TestLoaderJSONCommonJSAndES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const x_json = require('./x.json')
import y_json from './y.json'
import {small, if as fi} from './z.json'
console.log(x_json, y_json, small, fi)
`,
"/x.json": `{"x": true}`,
"/y.json": `{"y1": true, "y2": false}`,
"/z.json": `{
"big": "this is a big long line of text that should be discarded",
"small": "some small text",
"if": "test keyword imports"
}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONInvalidIdentifierES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './test.json'
import * as ns2 from './test2.json'
console.log(ns['invalid-identifier'], ns2)
`,
"/test.json": `{"invalid-identifier": true}`,
"/test2.json": `{"invalid-identifier": true}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONMissingES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {missing} from './test.json'
`,
"/test.json": `{"present": true}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: No matching export in "test.json" for import "missing"
`,
})
}
func TestLoaderTextCommonJSAndES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const x_txt = require('./x.txt')
import y_txt from './y.txt'
console.log(x_txt, y_txt)
`,
"/x.txt": "x",
"/y.txt": "y",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderBase64CommonJSAndES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const x_b64 = require('./x.b64')
import y_b64 from './y.b64'
console.log(x_b64, y_b64)
`,
"/x.b64": "x",
"/y.b64": "y",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".b64": config.LoaderBase64,
},
},
})
}
func TestLoaderDataURLCommonJSAndES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const x_url = require('./x.txt')
import y_url from './y.txt'
console.log(x_url, y_url)
`,
"/x.txt": "x",
"/y.txt": "y",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderDataURL,
},
},
})
}
func TestLoaderFileCommonJSAndES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const x_url = require('./x.txt')
import y_url from './y.txt'
console.log(x_url, y_url)
`,
"/x.txt": "x",
"/y.txt": "y",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderFile,
},
},
})
}
func TestLoaderFileRelativePathJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import x from '../images/image.png'
console.log(x)
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFileRelativePathCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.css": `
div {
background: url(../images/image.png);
}
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFileRelativePathAssetNamesJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import x from '../images/image.png'
console.log(x)
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
AssetPathTemplate: []config.PathTemplate{
{Data: "", Placeholder: config.DirPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFileExtPathAssetNamesJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import x from '../images/image.png'
import y from '../uploads/file.txt'
console.log(x, y)
`,
"/src/images/image.png": "x",
"/src/uploads/file.txt": "y",
},
entryPaths: []string{"/src/entries/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
AssetPathTemplate: []config.PathTemplate{
{Data: "", Placeholder: config.ExtPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
".txt": config.LoaderFile,
},
},
})
}
func TestLoaderFileRelativePathAssetNamesCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.css": `
div {
background: url(../images/image.png);
}
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
AssetPathTemplate: []config.PathTemplate{
{Data: "", Placeholder: config.DirPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFilePublicPathJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import x from '../images/image.png'
console.log(x)
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
PublicPath: "https://example.com",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFilePublicPathCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.css": `
div {
background: url(../images/image.png);
}
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
PublicPath: "https://example.com",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFilePublicPathAssetNamesJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import x from '../images/image.png'
console.log(x)
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
PublicPath: "https://example.com",
AssetPathTemplate: []config.PathTemplate{
{Data: "", Placeholder: config.DirPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFilePublicPathAssetNamesCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.css": `
div {
background: url(../images/image.png);
}
`,
"/src/images/image.png": "x",
},
entryPaths: []string{"/src/entries/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
PublicPath: "https://example.com",
AssetPathTemplate: []config.PathTemplate{
{Data: "", Placeholder: config.DirPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFileOneSourceTwoDifferentOutputPathsJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.js": `
import '../shared/common.js'
`,
"/src/entries/other/entry.js": `
import '../../shared/common.js'
`,
"/src/shared/common.js": `
import x from './common.png'
console.log(x)
`,
"/src/shared/common.png": "x",
},
entryPaths: []string{
"/src/entries/entry.js",
"/src/entries/other/entry.js",
},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderFileOneSourceTwoDifferentOutputPathsCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entries/entry.css": `
@import "../shared/common.css";
`,
"/src/entries/other/entry.css": `
@import "../../shared/common.css";
`,
"/src/shared/common.css": `
div {
background: url(common.png);
}
`,
"/src/shared/common.png": "x",
},
entryPaths: []string{
"/src/entries/entry.css",
"/src/entries/other/entry.css",
},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/src",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderFile,
},
},
})
}
func TestLoaderJSONNoBundle(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/test.json": `{"test": 123, "invalid-identifier": true}`,
},
entryPaths: []string{"/test.json"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONNoBundleES6(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/test.json": `{"test": 123, "invalid-identifier": true}`,
},
entryPaths: []string{"/test.json"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatESModule,
UnsupportedJSFeatures: compat.ArbitraryModuleNamespaceNames,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONNoBundleES6ArbitraryModuleNamespaceNames(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/test.json": `{"test": 123, "invalid-identifier": true}`,
},
entryPaths: []string{"/test.json"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONNoBundleCommonJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/test.json": `{"test": 123, "invalid-identifier": true}`,
},
entryPaths: []string{"/test.json"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONNoBundleIIFE(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/test.json": `{"test": 123, "invalid-identifier": true}`,
},
entryPaths: []string{"/test.json"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
})
}
func TestLoaderJSONSharedWithMultipleEntriesIssue413(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import data from './data.json'
console.log('a:', data)
`,
"/b.js": `
import data from './data.json'
console.log('b:', data)
`,
"/data.json": `{"test": 123}`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestLoaderFileWithQueryParameter(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
// Each of these should have a separate identity (i.e. end up in the output file twice)
import foo from './file.txt?foo'
import bar from './file.txt?bar'
console.log(foo, bar)
`,
"/file.txt": `This is some text`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderFile,
},
},
})
}
func TestLoaderFromExtensionWithQueryParameter(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import foo from './file.abc?query.xyz'
console.log(foo)
`,
"/file.abc": `This should not be base64 encoded`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".abc": config.LoaderText,
".xyz": config.LoaderBase64,
},
},
})
}
func TestLoaderDataURLTextCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
@import "data:text/css,body{color:%72%65%64}";
@import "data:text/css;base64,Ym9keXtiYWNrZ3JvdW5kOmJsdWV9";
@import "data:text/css;charset=UTF-8,body{color:%72%65%64}";
@import "data:text/css;charset=UTF-8;base64,Ym9keXtiYWNrZ3JvdW5kOmJsdWV9";
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestLoaderDataURLTextCSSCannotImport(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
@import "data:text/css,@import './other.css';";
`,
"/other.css": `
div { should-not-be-imported: true }
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `<data:text/css,@import './other.css';>: ERROR: Could not resolve "./other.css"
`,
})
}
func TestLoaderDataURLTextJavaScript(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "data:text/javascript,console.log('%31%32%33')";
import "data:text/javascript;base64,Y29uc29sZS5sb2coMjM0KQ==";
import "data:text/javascript;charset=UTF-8,console.log(%31%32%33)";
import "data:text/javascript;charset=UTF-8;base64,Y29uc29sZS5sb2coMjM0KQ==";
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestLoaderDataURLTextJavaScriptCannotImport(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "data:text/javascript,import './other.js'"
`,
"/other.js": `
shouldNotBeImported = true
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `<data:text/javascript,import './other.js'>: ERROR: Could not resolve "./other.js"
`,
})
}
// The "+" character must not be interpreted as a " " character
func TestLoaderDataURLTextJavaScriptPlusCharacter(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "data:text/javascript,console.log(1+2)";
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestLoaderDataURLApplicationJSON(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import a from 'data:application/json,"%31%32%33"';
import b from 'data:application/json;base64,eyJ3b3JrcyI6dHJ1ZX0=';
import c from 'data:application/json;charset=UTF-8,%31%32%33';
import d from 'data:application/json;charset=UTF-8;base64,eyJ3b3JrcyI6dHJ1ZX0=';
console.log([
a, b, c, d,
])
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestLoaderDataURLUnknownMIME(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import a from 'data:some/thing;what,someData%31%32%33';
import b from 'data:other/thing;stuff;base64,c29tZURhdGEyMzQ=';
console.log(a, b)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestLoaderDataURLExtensionBasedMIME(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.foo": `
export { default as css } from "./example.css"
export { default as eot } from "./example.eot"
export { default as gif } from "./example.gif"
export { default as htm } from "./example.htm"
export { default as html } from "./example.html"
export { default as jpeg } from "./example.jpeg"
export { default as jpg } from "./example.jpg"
export { default as js } from "./example.js"
export { default as json } from "./example.json"
export { default as mjs } from "./example.mjs"
export { default as otf } from "./example.otf"
export { default as pdf } from "./example.pdf"
export { default as png } from "./example.png"
export { default as sfnt } from "./example.sfnt"
export { default as svg } from "./example.svg"
export { default as ttf } from "./example.ttf"
export { default as wasm } from "./example.wasm"
export { default as webp } from "./example.webp"
export { default as woff } from "./example.woff"
export { default as woff2 } from "./example.woff2"
export { default as xml } from "./example.xml"
`,
"/example.css": `css`,
"/example.eot": `eot`,
"/example.gif": `gif`,
"/example.htm": `htm`,
"/example.html": `html`,
"/example.jpeg": `jpeg`,
"/example.jpg": `jpg`,
"/example.js": `js`,
"/example.json": `json`,
"/example.mjs": `mjs`,
"/example.otf": `otf`,
"/example.pdf": `pdf`,
"/example.png": `png`,
"/example.sfnt": `sfnt`,
"/example.svg": `svg`,
"/example.ttf": `ttf`,
"/example.wasm": `wasm`,
"/example.webp": `webp`,
"/example.woff": `woff`,
"/example.woff2": `woff2`,
"/example.xml": `xml`,
},
entryPaths: []string{"/entry.foo"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".foo": config.LoaderJS,
".css": config.LoaderDataURL,
".eot": config.LoaderDataURL,
".gif": config.LoaderDataURL,
".htm": config.LoaderDataURL,
".html": config.LoaderDataURL,
".jpeg": config.LoaderDataURL,
".jpg": config.LoaderDataURL,
".js": config.LoaderDataURL,
".json": config.LoaderDataURL,
".mjs": config.LoaderDataURL,
".otf": config.LoaderDataURL,
".pdf": config.LoaderDataURL,
".png": config.LoaderDataURL,
".sfnt": config.LoaderDataURL,
".svg": config.LoaderDataURL,
".ttf": config.LoaderDataURL,
".wasm": config.LoaderDataURL,
".webp": config.LoaderDataURL,
".woff": config.LoaderDataURL,
".woff2": config.LoaderDataURL,
".xml": config.LoaderDataURL,
},
},
})
}
// Percent-encoded data URLs should switch over to base64
// data URLs if it would result in a smaller size
func TestLoaderDataURLBase64VsPercentEncoding(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import a from './shouldUsePercent_1.txt'
import b from './shouldUsePercent_2.txt'
import c from './shouldUseBase64_1.txt'
import d from './shouldUseBase64_2.txt'
console.log(
a,
b,
c,
d,
)
`,
"/shouldUsePercent_1.txt": "\n\n\n",
"/shouldUsePercent_2.txt": "\n\n\n\n",
"/shouldUseBase64_1.txt": "\n\n\n\n\n",
"/shouldUseBase64_2.txt": "\n\n\n\n\n\n",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderDataURL,
},
},
})
}
func TestLoaderDataURLBase64InvalidUTF8(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import a from './binary.txt'
console.log(a)
`,
"/binary.txt": "\xFF",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderDataURL,
},
},
})
}
func TestLoaderDataURLEscapePercents(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import a from './percents.txt'
console.log(a)
`,
"/percents.txt": `
%, %3, %33, %333
%, %e, %ee, %eee
%, %E, %EE, %EEE
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".txt": config.LoaderDataURL,
},
},
})
}
func TestLoaderCopyWithBundleFromJS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import x from "../assets/some.file"
console.log(x)
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".file": config.LoaderCopy,
},
},
})
}
func TestLoaderCopyWithBundleFromCSS(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.css": `
body {
background: url(../assets/some.file);
}
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{"/Users/user/project/src/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".file": config.LoaderCopy,
},
},
})
}
func TestLoaderCopyWithBundleEntryPoint(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import x from "../assets/some.file"
console.log(x)
`,
"/Users/user/project/src/entry.css": `
body {
background: url(../assets/some.file);
}
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/src/entry.css",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderCSS,
".file": config.LoaderCopy,
},
NeedsMetafile: true,
},
})
}
func TestLoaderCopyWithTransform(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `console.log('entry')`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModePassThrough,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".file": config.LoaderCopy,
},
},
})
}
func TestLoaderCopyWithFormat(t *testing.T) {
loader_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `console.log('entry')`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatIIFE,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_ts_test.go | internal/bundler_tests/bundler_ts_test.go | package bundler_tests
import (
"regexp"
"testing"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
)
var ts_suite = suite{
name: "ts",
}
func TestTSDeclareConst(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare const require: any
declare const exports: any;
declare const module: any
declare const foo: any
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareLet(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare let require: any
declare let exports: any;
declare let module: any
declare let foo: any
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareVar(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare var require: any
declare var exports: any;
declare var module: any
declare var foo: any
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareClass(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare class require {}
declare class exports {};
declare class module {}
declare class foo {}
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareClassFields(t *testing.T) {
// Note: this test uses arrow functions to validate that
// scopes inside "declare" fields are correctly discarded
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import './define-false'
import './define-true'
`,
"/define-false/index.ts": `
class Foo {
a
declare b
[(() => null, c)]
declare [(() => null, d)]
static A
static declare B
static [(() => null, C)]
static declare [(() => null, D)]
}
(() => new Foo())()
`,
"/define-true/index.ts": `
class Bar {
a
declare b
[(() => null, c)]
declare [(() => null, d)]
static A
static declare B
static [(() => null, C)]
static declare [(() => null, D)]
}
(() => new Bar())()
`,
"/define-false/tsconfig.json": `{
"compilerOptions": {
"useDefineForClassFields": false
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
UnsupportedJSFeatures: compat.ClassField,
},
})
}
func TestTSDeclareFunction(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare function require(): void
declare function exports(): void;
declare function module(): void
declare function foo() {}
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareNamespace(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare namespace require {}
declare namespace exports {};
declare namespace module {}
declare namespace foo {}
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareEnum(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare enum require {}
declare enum exports {};
declare enum module {}
declare enum foo {}
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSDeclareConstEnum(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare const enum require {}
declare const enum exports {};
declare const enum module {}
declare const enum foo {}
let foo = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSConstEnumComments(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/bar.ts": `
export const enum Foo {
"%/*" = 1,
"*/%" = 2,
}
`,
"/foo.ts": `
import { Foo } from "./bar";
const enum Bar {
"%/*" = 1,
"*/%" = 2,
}
console.log({
'should have comments': [
Foo["%/*"],
Bar["%/*"],
],
'should not have comments': [
Foo["*/%"],
Bar["*/%"],
],
});
`,
},
entryPaths: []string{"/foo.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportEmptyNamespace(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {ns} from './ns.ts'
function foo(): ns.type {}
foo();
`,
"/ns.ts": `
export namespace ns {}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportMissingES6(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import fn, {x as a, y as b} from './foo'
console.log(fn(a, b))
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.ts: ERROR: No matching export in "foo.js" for import "default"
entry.ts: ERROR: No matching export in "foo.js" for import "y"
`,
})
}
func TestTSImportMissingUnusedES6(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import fn, {x as a, y as b} from './foo'
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExportMissingES6(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns)
`,
"/foo.ts": `
export {nope} from './bar'
`,
"/bar.js": `
export const yep = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// It's an error to import from a file that does not exist
func TestTSImportMissingFile(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {Something} from './doesNotExist.ts'
let foo = new Something
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.ts: ERROR: Could not resolve "./doesNotExist.ts"
`,
})
}
// It's not an error to import a type from a file that does not exist
func TestTSImportTypeOnlyFile(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {SomeType1} from './doesNotExist1.ts'
import {SomeType2} from './doesNotExist2.ts'
let foo: SomeType1 = bar()
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExportEquals(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
import b from './b.ts'
console.log(b)
`,
"/b.ts": `
export = [123, foo]
function foo() {}
`,
},
entryPaths: []string{"/a.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExportNamespace(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
import {Foo} from './b.ts'
console.log(new Foo)
`,
"/b.ts": `
export class Foo {}
export namespace Foo {
export let foo = 1
}
export namespace Foo {
export let bar = 2
}
`,
},
entryPaths: []string{"/a.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSNamespaceKeepNames(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
namespace ns {
export let foo = () => {}
export function bar() {}
export class Baz {}
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
KeepNames: true,
},
})
}
func TestTSNamespaceKeepNamesTargetES2015(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
namespace ns {
export let foo = () => {}
export function bar() {}
export class Baz {}
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
KeepNames: true,
UnsupportedJSFeatures: es(2015),
},
})
}
func TestTSMinifyEnum(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
enum Foo { A, B, C = Foo }
`,
"/b.ts": `
export enum Foo { X, Y, Z = Foo }
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
},
})
}
func TestTSMinifyNestedEnum(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
function foo() { enum Foo { A, B, C = Foo } return Foo }
`,
"/b.ts": `
export function foo() { enum Foo { X, Y, Z = Foo } return Foo }
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
},
})
}
func TestTSMinifyNestedEnumNoLogicalAssignment(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
function foo() { enum Foo { A, B, C = Foo } return Foo }
`,
"/b.ts": `
export function foo() { enum Foo { X, Y, Z = Foo } return Foo }
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
UnsupportedJSFeatures: compat.LogicalAssignment,
},
})
}
func TestTSMinifyNestedEnumNoArrow(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
function foo() { enum Foo { A, B, C = Foo } return Foo }
`,
"/b.ts": `
export function foo() { enum Foo { X, Y, Z = Foo } return Foo }
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
UnsupportedJSFeatures: compat.Arrow,
},
})
}
func TestTSMinifyNamespace(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
"/b.ts": `
export namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
},
})
}
func TestTSMinifyNamespaceNoLogicalAssignment(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
"/b.ts": `
export namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
UnsupportedJSFeatures: compat.LogicalAssignment,
},
})
}
func TestTSMinifyNamespaceNoArrow(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.ts": `
namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
"/b.ts": `
export namespace Foo {
export namespace Bar {
foo(Foo, Bar)
}
}
`,
},
entryPaths: []string{"/a.ts", "/b.ts"},
options: config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputDir: "/",
UnsupportedJSFeatures: compat.Arrow,
},
})
}
func TestTSMinifyDerivedClass(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
class Foo extends Bar {
foo = 1;
bar = 2;
constructor() {
super();
foo();
bar();
}
}
`,
"/tsconfig.json": `{
"compilerOptions": {
"useDefineForClassFields": false
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
MinifySyntax: true,
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestTSMinifyEnumPropertyNames(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import { CrossFileGood, CrossFileBad } from './cross-file'
const enum SameFileGood {
STR = 'str 1',
NUM = 123,
}
const enum SameFileBad {
PROTO = '__proto__',
CONSTRUCTOR = 'constructor',
PROTOTYPE = 'prototype',
}
class Foo {
[100] = 100;
'200' = 200;
['300'] = 300;
[SameFileGood.STR] = SameFileGood.STR;
[SameFileGood.NUM] = SameFileGood.NUM;
[CrossFileGood.STR] = CrossFileGood.STR;
[CrossFileGood.NUM] = CrossFileGood.NUM;
}
shouldNotBeComputed(
class {
[100] = 100;
'200' = 200;
['300'] = 300;
[SameFileGood.STR] = SameFileGood.STR;
[SameFileGood.NUM] = SameFileGood.NUM;
[CrossFileGood.STR] = CrossFileGood.STR;
[CrossFileGood.NUM] = CrossFileGood.NUM;
},
{
[100]: 100,
'200': 200,
['300']: 300,
[SameFileGood.STR]: SameFileGood.STR,
[SameFileGood.NUM]: SameFileGood.NUM,
[CrossFileGood.STR]: CrossFileGood.STR,
[CrossFileGood.NUM]: CrossFileGood.NUM,
},
)
mustBeComputed(
{ [SameFileBad.PROTO]: null },
{ [CrossFileBad.PROTO]: null },
class { [SameFileBad.CONSTRUCTOR]() {} },
class { [CrossFileBad.CONSTRUCTOR]() {} },
class { static [SameFileBad.PROTOTYPE]() {} },
class { static [CrossFileBad.PROTOTYPE]() {} },
)
`,
"/cross-file.ts": `
export const enum CrossFileGood {
STR = 'str 2',
NUM = 321,
}
export const enum CrossFileBad {
PROTO = '__proto__',
CONSTRUCTOR = 'constructor',
PROTOTYPE = 'prototype',
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSMinifyEnumCrossFileInlineStringsIntoTemplates(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import { CrossFile } from './cross-file'
enum SameFile {
STR = 'str 1',
NUM = 123,
}
console.log(` + "`" + `
SameFile.STR = ${SameFile.STR}
SameFile.NUM = ${SameFile.NUM}
CrossFile.STR = ${CrossFile.STR}
CrossFile.NUM = ${CrossFile.NUM}
` + "`" + `)
`,
"/cross-file.ts": `
export enum CrossFile {
STR = 'str 2',
NUM = 321,
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportVsLocalCollisionAllTypes(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {a, b, c, d, e} from './other.ts'
let a
const b = 0
var c
function d() {}
class e {}
console.log(a, b, c, d, e)
`,
"/other.ts": `
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportVsLocalCollisionMixed(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {a, b, c, d, e, real} from './other.ts'
let a
const b = 0
var c
function d() {}
class e {}
console.log(a, b, c, d, e, real)
`,
"/other.ts": `
export let real = 123
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportEqualsEliminationTest(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import a = foo.a
import b = a.b
import c = b.c
import x = foo.x
import y = x.y
import z = y.z
export let bar = c
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImportEqualsTreeShakingFalse(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import { foo } from 'pkg'
import used = foo.used
import unused = foo.unused
export { used }
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModePassThrough,
AbsOutputFile: "/out.js",
TreeShaking: false,
},
})
}
func TestTSImportEqualsTreeShakingTrue(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import { foo } from 'pkg'
import used = foo.used
import unused = foo.unused
export { used }
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModePassThrough,
AbsOutputFile: "/out.js",
TreeShaking: true,
},
})
}
func TestTSImportEqualsBundle(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import { foo } from 'pkg'
import used = foo.used
import unused = foo.unused
export { used }
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Exact: map[string]bool{
"pkg": true,
},
},
},
},
})
}
func TestTSImportEqualsUndefinedImport(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import * as ns from './import.ts'
import value_copy = ns.value
import Type_copy = ns.Type
let foo: Type_copy = value_copy
console.log(foo)
`,
"/import.ts": `
export let value = 123
export type Type = number
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Exact: map[string]bool{
"pkg": true,
},
},
},
},
})
}
func TestTSMinifiedBundleES6(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import {foo} from './a'
console.log(foo())
`,
"/a.ts": `
export function foo() {
return 123
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSMinifiedBundleCommonJS(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
const {foo} = require('./a')
console.log(foo(), require('./j.json'))
`,
"/a.ts": `
exports.foo = function() {
return 123
}
`,
"/j.json": `
{"test": true}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
MinifySyntax: true,
MinifyWhitespace: true,
MinifyIdentifiers: true,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExperimentalDecoratorsNoConfig(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
declare let x: any, y: any
@x.y()
@(new y.x)
export default class Foo {
@x @y mUndef: any
@x @y mDef = 1
@x @y method() { return new Foo }
@x @y accessor aUndef: any
@x @y accessor aDef = 1
@x @y static sUndef: any
@x @y static sDef = new Foo
@x @y static sMethod() { return new Foo }
@x @y static accessor asUndef: any
@x @y static accessor asDef = 1
@x @y #mUndef: any
@x @y #mDef = 1
@x @y #method() { return new Foo }
@x @y accessor #aUndef: any
@x @y accessor #aDef = 1
@x @y static #sUndef: any
@x @y static #sDef = 1
@x @y static #sMethod() { return new Foo }
@x @y static accessor #asUndef: any
@x @y static accessor #asDef = 1
}
`,
"/tsconfig.json": `{
"compilerOptions": {
"experimentalDecorators": false
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExperimentalDecorators(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import all from './all'
import all_computed from './all_computed'
import {a} from './a'
import {b} from './b'
import {c} from './c'
import {d} from './d'
import e from './e'
import f from './f'
import g from './g'
import h from './h'
import {i} from './i'
import {j} from './j'
import k from './k'
import {fn} from './arguments'
console.log(all, all_computed, a, b, c, d, e, f, g, h, i, j, k, fn)
`,
"/all.ts": `
@x.y()
@new y.x()
export default class Foo {
@x @y mUndef
@x @y mDef = 1
@x @y method(@x0 @y0 arg0, @x1 @y1 arg1) { return new Foo }
@x @y declare mDecl
@x @y abstract mAbst
constructor(@x0 @y0 arg0, @x1 @y1 arg1) {}
@x @y static sUndef
@x @y static sDef = new Foo
@x @y static sMethod(@x0 @y0 arg0, @x1 @y1 arg1) { return new Foo }
@x @y static declare mDecl
}
`,
"/all_computed.ts": `
@x?.[_ + 'y']()
@new y?.[_ + 'x']()
export default class Foo {
@x @y [mUndef()]
@x @y [mDef()] = 1
@x @y [method()](@x0 @y0 arg0, @x1 @y1 arg1) { return new Foo }
@x @y declare [mDecl()]
@x @y abstract [mAbst()]
// Side effect order must be preserved even for fields without decorators
[xUndef()]
[xDef()] = 2
static [yUndef()]
static [yDef()] = 3
@x @y static [sUndef()]
@x @y static [sDef()] = new Foo
@x @y static [sMethod()](@x0 @y0 arg0, @x1 @y1 arg1) { return new Foo }
@x @y static declare [mDecl()]
}
`,
"/a.ts": `
@x(() => 0) @y(() => 1)
class a_class {
fn() { return new a_class }
static z = new a_class
}
export let a = a_class
`,
"/b.ts": `
@x(() => 0) @y(() => 1)
abstract class b_class {
fn() { return new b_class }
static z = new b_class
}
export let b = b_class
`,
"/c.ts": `
@x(() => 0) @y(() => 1)
export class c {
fn() { return new c }
static z = new c
}
`,
"/d.ts": `
@x(() => 0) @y(() => 1)
export abstract class d {
fn() { return new d }
static z = new d
}
`,
"/e.ts": `
@x(() => 0) @y(() => 1)
export default class {}
`,
"/f.ts": `
@x(() => 0) @y(() => 1)
export default class f {
fn() { return new f }
static z = new f
}
`,
"/g.ts": `
@x(() => 0) @y(() => 1)
export default abstract class {}
`,
"/h.ts": `
@x(() => 0) @y(() => 1)
export default abstract class h {
fn() { return new h }
static z = new h
}
`,
"/i.ts": `
class i_class {
@x(() => 0) @y(() => 1)
foo
}
export let i = i_class
`,
"/j.ts": `
export class j {
@x(() => 0) @y(() => 1)
foo() {}
}
`,
"/k.ts": `
export default class {
foo(@x(() => 0) @y(() => 1) x) {}
}
`,
"/arguments.ts": `
function dec(x: any): any {}
export function fn(x: string): any {
class Foo {
@dec(arguments[0])
[arguments[0]]() {}
}
return Foo;
}
`,
"/tsconfig.json": `{
"compilerOptions": {
"useDefineForClassFields": false,
"experimentalDecorators": true
}
}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExperimentalDecoratorsKeepNames(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
@decoratorMustComeAfterName
export class Foo {}
`,
"/tsconfig.json": `{
"compilerOptions": {
"experimentalDecorators": true
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
KeepNames: true,
},
})
}
// See: https://github.com/evanw/esbuild/issues/2147
func TestTSExperimentalDecoratorScopeIssue2147(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
let foo = 1
class Foo {
method1(@dec(foo) foo = 2) {}
method2(@dec(() => foo) foo = 3) {}
}
class Bar {
static x = class {
static y = () => {
let bar = 1
@dec(bar)
@dec(() => bar)
class Baz {
@dec(bar) method1() {}
@dec(() => bar) method2() {}
method3(@dec(() => bar) bar) {}
method4(@dec(() => bar) bar) {}
}
return Baz
}
}
}
`,
"/tsconfig.json": `{
"compilerOptions": {
"useDefineForClassFields": false,
"experimentalDecorators": true
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModePassThrough,
AbsOutputFile: "/out.js",
},
})
}
func TestTSExportDefaultTypeIssue316(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import dc_def, { bar as dc } from './keep/declare-class'
import dl_def, { bar as dl } from './keep/declare-let'
import im_def, { bar as im } from './keep/interface-merged'
import in_def, { bar as _in } from './keep/interface-nested'
import tn_def, { bar as tn } from './keep/type-nested'
import vn_def, { bar as vn } from './keep/value-namespace'
import vnm_def, { bar as vnm } from './keep/value-namespace-merged'
import i_def, { bar as i } from './remove/interface'
import ie_def, { bar as ie } from './remove/interface-exported'
import t_def, { bar as t } from './remove/type'
import te_def, { bar as te } from './remove/type-exported'
import ton_def, { bar as ton } from './remove/type-only-namespace'
import tone_def, { bar as tone } from './remove/type-only-namespace-exported'
export default [
dc_def, dc,
dl_def, dl,
im_def, im,
in_def, _in,
tn_def, tn,
vn_def, vn,
vnm_def, vnm,
i,
ie,
t,
te,
ton,
tone,
]
`,
"/keep/declare-class.ts": `
declare class foo {}
export default foo
export let bar = 123
`,
"/keep/declare-let.ts": `
declare let foo: number
export default foo
export let bar = 123
`,
"/keep/interface-merged.ts": `
class foo {
static x = new foo
}
interface foo {}
export default foo
export let bar = 123
`,
"/keep/interface-nested.ts": `
if (true) {
interface foo {}
}
export default foo
export let bar = 123
`,
"/keep/type-nested.ts": `
if (true) {
type foo = number
}
export default foo
export let bar = 123
`,
"/keep/value-namespace.ts": `
namespace foo {
export let num = 0
}
export default foo
export let bar = 123
`,
"/keep/value-namespace-merged.ts": `
namespace foo {
export type num = number
}
namespace foo {
export let num = 0
}
export default foo
export let bar = 123
`,
"/remove/interface.ts": `
interface foo { }
export default foo
export let bar = 123
`,
"/remove/interface-exported.ts": `
export interface foo { }
export default foo
export let bar = 123
`,
"/remove/type.ts": `
type foo = number
export default foo
export let bar = 123
`,
"/remove/type-exported.ts": `
export type foo = number
export default foo
export let bar = 123
`,
"/remove/type-only-namespace.ts": `
namespace foo {
export type num = number
}
export default foo
export let bar = 123
`,
"/remove/type-only-namespace-exported.ts": `
export namespace foo {
export type num = number
}
export default foo
export let bar = 123
`,
"/tsconfig.json": `{
"compilerOptions": {
"useDefineForClassFields": false
}
}`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTSImplicitExtensions(t *testing.T) {
ts_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
import './pick-js.js'
import './pick-ts.js'
import './pick-jsx.jsx'
import './pick-tsx.jsx'
import './order-js.js'
import './order-jsx.jsx'
import 'pkg/foo-js.js'
import 'pkg/foo-jsx.jsx'
import 'pkg-exports/xyz-js'
import 'pkg-exports/xyz-jsx'
import 'pkg-exports/foo-js.js'
import 'pkg-exports/foo-jsx.jsx'
import 'pkg-imports'
`,
"/pick-js.js": `console.log("correct")`,
"/pick-js.ts": `console.log("wrong")`,
"/pick-ts.jsx": `console.log("wrong")`,
"/pick-ts.ts": `console.log("correct")`,
"/pick-jsx.jsx": `console.log("correct")`,
"/pick-jsx.tsx": `console.log("wrong")`,
"/pick-tsx.js": `console.log("wrong")`,
"/pick-tsx.tsx": `console.log("correct")`,
"/order-js.ts": `console.log("correct")`,
"/order-js.tsx": `console.log("wrong")`,
"/order-jsx.ts": `console.log("correct")`,
"/order-jsx.tsx": `console.log("wrong")`,
"/node_modules/pkg/foo-js.ts": `console.log("correct")`,
"/node_modules/pkg/foo-jsx.tsx": `console.log("correct")`,
"/node_modules/pkg-exports/package.json": `{
"exports": {
"./xyz-js": "./abc-js.js",
"./xyz-jsx": "./abc-jsx.jsx",
"./*": "./lib/*"
}
}`,
"/node_modules/pkg-exports/abc-js.ts": `console.log("correct")`,
"/node_modules/pkg-exports/abc-jsx.tsx": `console.log("correct")`,
"/node_modules/pkg-exports/lib/foo-js.ts": `console.log("correct")`,
"/node_modules/pkg-exports/lib/foo-jsx.tsx": `console.log("correct")`,
"/node_modules/pkg-imports/package.json": `{
"imports": {
"#xyz-js": "./abc-js.js",
"#xyz-jsx": "./abc-jsx.jsx",
"#bar/*": "./lib/*"
}
}`,
"/node_modules/pkg-imports/index.js": `
import "#xyz-js"
import "#xyz-jsx"
import "#bar/foo-js.js"
import "#bar/foo-jsx.jsx"
`,
"/node_modules/pkg-imports/abc-js.ts": `console.log("correct")`,
"/node_modules/pkg-imports/abc-jsx.tsx": `console.log("correct")`,
"/node_modules/pkg-imports/lib/foo-js.ts": `console.log("correct")`,
"/node_modules/pkg-imports/lib/foo-jsx.tsx": `console.log("correct")`,
},
entryPaths: []string{"/entry.ts"},
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_importstar_test.go | internal/bundler_tests/bundler_importstar_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var importstar_suite = suite{
name: "importstar",
}
func TestImportStarUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
"/foo.js": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportImportStarUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportImportStarNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportImportStarCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarAsUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarAsNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarAsCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './bar'
let foo = 234
console.log(foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './bar'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './bar'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.js": `
export const foo = 123
`,
"/bar.js": `
export * from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarCommonJSUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarCommonJSCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarCommonJSNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarAndCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
const ns2 = require('./foo')
console.log(ns.foo, ns2.foo)
`,
"/foo.js": `
export const foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarNoBundleUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarNoBundleCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarNoBundleNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarMangleNoBundleUnused(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarMangleNoBundleCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarMangleNoBundleNoCapture(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
let foo = 234
console.log(ns.foo, ns.foo, foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
MinifySyntax: true,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarExportStarOmitAmbiguous(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './common'
console.log(ns)
`,
"/common.js": `
export * from './foo'
export * from './bar'
`,
"/foo.js": `
export const x = 1
export const y = 2
`,
"/bar.js": `
export const y = 3
export const z = 4
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportExportStarAmbiguousError(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {x, y, z} from './common'
console.log(x, y, z)
`,
"/common.js": `
export * from './foo'
export * from './bar'
`,
"/foo.js": `
export const x = 1
export const y = 2
`,
"/bar.js": `
export const y = 3
export const z = 4
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: Ambiguous import "y" has multiple matching exports
foo.js: NOTE: One matching export is here:
bar.js: NOTE: Another matching export is here:
`,
})
}
func TestImportExportStarAmbiguousWarning(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './common'
console.log(ns.x, ns.y, ns.z)
`,
"/common.js": `
export * from './foo'
export * from './bar'
`,
"/foo.js": `
export const x = 1
export const y = 2
`,
"/bar.js": `
export const y = 3
export const z = 4
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "y" will always be undefined because there are multiple matching exports
foo.js: NOTE: One matching export is here:
bar.js: NOTE: Another matching export is here:
`,
})
}
func TestReExportStarNameCollisionNotAmbiguousImport(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {x, y} from './common'
console.log(x, y)
`,
"/common.js": `
export * from './a'
export * from './b'
`,
"/a.js": `
export * from './c'
`,
"/b.js": `
export {x} from './c'
`,
"/c.js": `
export let x = 1, y = 2
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarNameCollisionNotAmbiguousExport(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from './a'
export * from './b'
`,
"/a.js": `
export * from './c'
`,
"/b.js": `
export {x} from './c'
`,
"/c.js": `
export let x = 1, y = 2
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarNameShadowingNotAmbiguous(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {x} from './a'
console.log(x)
`,
"/a.js": `
export * from './b'
export let x = 1
`,
"/b.js": `
export let x = 2
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarNameShadowingNotAmbiguousReExport(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {x} from './a'
console.log(x)
`,
"/a.js": `
export * from './b'
`,
"/b.js": `
export * from './c'
export let x = 1
`,
"/c.js": `
export let x = 2
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestImportStarOfExportStarAs(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as foo_ns from './foo'
console.log(foo_ns)
`,
"/foo.js": `
export * as bar_ns from './bar'
`,
"/bar.js": `
export const bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportOfExportStar(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {bar} from './foo'
console.log(bar)
`,
"/foo.js": `
export * from './bar'
`,
"/bar.js": `
// Add some statements to increase the part index (this reproduced a crash)
statement()
statement()
statement()
statement()
export const bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportOfExportStarOfImport(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {bar} from './foo'
console.log(bar)
`,
"/foo.js": `
// Add some statements to increase the part index (this reproduced a crash)
statement()
statement()
statement()
statement()
export * from './bar'
`,
"/bar.js": `
export {value as bar} from './baz'
`,
"/baz.js": `
export const value = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfIIFE(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfIIFEWithName(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
GlobalName: []string{"someName"},
},
})
}
func TestExportSelfES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfCommonJSMinified(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
module.exports = {foo: 123}
console.log(require('./entry'))
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
MinifyIdentifiers: true,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestImportSelfCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
exports.foo = 123
import {foo} from './entry'
console.log(foo)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfAsNamespaceES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * as ns from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestImportExportSelfAsNamespaceES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
import * as ns from './entry'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportOtherFileExportSelfAsNamespaceES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from './foo'
`,
"/foo.js": `
export const foo = 123
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportOtherFileImportExportSelfAsNamespaceES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from './foo'
`,
"/foo.js": `
export const foo = 123
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestOtherFileExportSelfAsNamespaceUnusedES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {foo} from './foo'
`,
"/foo.js": `
export const foo = 123
export * as ns from './foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestOtherFileImportExportSelfAsNamespaceUnusedES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {foo} from './foo'
`,
"/foo.js": `
export const foo = 123
import * as ns from './foo'
export {ns}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfAsNamespaceCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
export * as ns from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfAndRequireSelfCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export const foo = 123
console.log(require('./entry'))
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportSelfAndImportSelfCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as x from './entry'
export const foo = 123
console.log(x)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportOtherAsNamespaceCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * as ns from './foo'
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestImportExportOtherAsNamespaceCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
export {ns}
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestNamespaceImportMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns, ns.foo)
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "foo" will always be undefined because there is no matching export in "foo.js"
`,
})
}
func TestExportOtherCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {bar} from './foo'
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestExportOtherNestedCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {y} from './bar'
`,
"/bar.js": `
export {x as y} from './foo'
`,
"/foo.js": `
exports.foo = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestNamespaceImportUnusedMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns.foo)
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "foo" will always be undefined because there is no matching export in "foo.js"
`,
})
}
func TestNamespaceImportMissingCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns, ns.foo)
`,
"/foo.js": `
exports.x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestNamespaceImportUnusedMissingCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns.foo)
`,
"/foo.js": `
exports.x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportNamespaceImportMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './foo'
console.log(ns, ns.foo)
`,
"/foo.js": `
export * as ns from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportNamespaceImportUnusedMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {ns} from './foo'
console.log(ns.foo)
`,
"/foo.js": `
export * as ns from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestNamespaceImportReExportMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns, ns.foo)
`,
"/foo.js": `
export {foo} from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `foo.js: ERROR: No matching export in "bar.js" for import "foo"
foo.js: ERROR: No matching export in "bar.js" for import "foo"
`,
})
}
func TestNamespaceImportReExportUnusedMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns.foo)
`,
"/foo.js": `
export {foo} from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `foo.js: ERROR: No matching export in "bar.js" for import "foo"
foo.js: ERROR: No matching export in "bar.js" for import "foo"
`,
})
}
func TestNamespaceImportReExportStarMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns, ns.foo)
`,
"/foo.js": `
export * from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "foo" will always be undefined because there is no matching export in "foo.js"
`,
})
}
func TestNamespaceImportReExportStarUnusedMissingES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns.foo)
`,
"/foo.js": `
export * from './bar'
`,
"/bar.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "foo" will always be undefined because there is no matching export in "foo.js"
`,
})
}
func TestExportStarDefaultExportCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from './foo'
`,
"/foo.js": `
export default 'default' // This should not be picked up
export let foo = 'foo'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestIssue176(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as things from './folders'
console.log(JSON.stringify(things))
`,
"/folders/index.js": `
export * from "./child"
`,
"/folders/child/index.js": `
export { foo } from './foo'
`,
"/folders/child/foo.js": `
export const foo = () => 'hi there'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarExternalIIFE(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
GlobalName: []string{"mod"},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"foo": true,
}},
},
},
})
}
func TestReExportStarExternalES6(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"foo": true,
}},
},
},
})
}
func TestReExportStarExternalCommonJS(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"foo": true,
}},
},
},
})
}
func TestReExportStarIIFENoBundle(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
GlobalName: []string{"mod"},
},
})
}
func TestReExportStarES6NoBundle(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarCommonJSNoBundle(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestReExportStarAsExternalIIFE(t *testing.T) {
importstar_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * as out from "foo"
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_css_test.go | internal/bundler_tests/bundler_css_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
)
var css_suite = suite{
name: "css",
}
func TestCSSEntryPoint(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
body {
background: white;
color: black }
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.css",
},
})
}
func TestCSSAtImportMissing(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
@import "./missing.css";
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.css",
},
expectedScanLog: `entry.css: ERROR: Could not resolve "./missing.css"
`,
})
}
func TestCSSAtImportExternal(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
@import "./internal.css";
@import "./external1.css";
@import "./external2.css";
@import "./charset1.css";
@import "./charset2.css";
@import "./external5.css" screen;
`,
"/internal.css": `
@import "./external5.css" print;
.before { color: red }
`,
"/charset1.css": `
@charset "UTF-8";
@import "./external3.css";
@import "./external4.css";
@import "./external5.css";
@import "https://www.example.com/style1.css";
@import "https://www.example.com/style2.css";
@import "https://www.example.com/style3.css" print;
.middle { color: green }
`,
"/charset2.css": `
@charset "UTF-8";
@import "./external3.css";
@import "./external5.css" screen;
@import "https://www.example.com/style1.css";
@import "https://www.example.com/style3.css";
.after { color: blue }
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExternalSettings: config.ExternalSettings{
PostResolve: config.ExternalMatchers{Exact: map[string]bool{
"/external1.css": true,
"/external2.css": true,
"/external3.css": true,
"/external4.css": true,
"/external5.css": true,
}},
},
},
})
}
func TestCSSAtImport(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
@import "./a.css";
@import "./b.css";
.entry { color: red }
`,
"/a.css": `
@import "./shared.css";
.a { color: green }
`,
"/b.css": `
@import "./shared.css";
.b { color: blue }
`,
"/shared.css": `
.shared { color: black }
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.css",
},
})
}
func TestCSSFromJSMissingImport(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {missing} from "./a.css"
console.log(missing)
`,
"/a.css": `
.a { color: red }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedCompileLog: `entry.js: ERROR: No matching export in "a.css" for import "missing"
`,
})
}
func TestCSSFromJSMissingStarImport(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from "./a.css"
console.log(ns.missing)
`,
"/a.css": `
.a { color: red }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedCompileLog: `entry.js: WARNING: Import "missing" will always be undefined because there is no matching export in "a.css"
`,
})
}
func TestImportGlobalCSSFromJS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "./a.js"
import "./b.js"
`,
"/a.js": `
import * as stylesA from "./a.css"
console.log('a', stylesA.a, stylesA.default.a)
`,
"/a.css": `
.a { color: red }
`,
"/b.js": `
import * as stylesB from "./b.css"
console.log('b', stylesB.b, stylesB.default.b)
`,
"/b.css": `
.b { color: blue }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedCompileLog: `a.js: WARNING: Import "a" will always be undefined because there is no matching export in "a.css"
b.js: WARNING: Import "b" will always be undefined because there is no matching export in "b.css"
`,
})
}
func TestImportLocalCSSFromJS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "./a.js"
import "./b.js"
`,
"/a.js": `
import * as stylesA from "./dir1/style.css"
console.log('file 1', stylesA.button, stylesA.default.a)
`,
"/dir1/style.css": `
.a { color: red }
.button { display: none }
`,
"/b.js": `
import * as stylesB from "./dir2/style.css"
console.log('file 2', stylesB.button, stylesB.default.b)
`,
"/dir2/style.css": `
.b { color: blue }
.button { display: none }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
},
})
}
func TestImportLocalCSSFromJSMinifyIdentifiers(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "./a.js"
import "./b.js"
`,
"/a.js": `
import * as stylesA from "./dir1/style.css"
console.log('file 1', stylesA.button, stylesA.default.a)
`,
"/dir1/style.css": `
.a { color: red }
.button { display: none }
`,
"/b.js": `
import * as stylesB from "./dir2/style.css"
console.log('file 2', stylesB.button, stylesB.default.b)
`,
"/dir2/style.css": `
.b { color: blue }
.button { display: none }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
MinifyIdentifiers: true,
},
})
}
func TestImportLocalCSSFromJSMinifyIdentifiersAvoidGlobalNames(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "./global.css"
import "./local.module.css"
`,
"/global.css": `
:is(.a, .b, .c, .d, .e, .f, .g, .h, .i, .j, .k, .l, .m, .n, .o, .p, .q, .r, .s, .t, .u, .v, .w, .x, .y, .z),
:is(.A, .B, .C, .D, .E, .F, .G, .H, .I, .J, .K, .L, .M, .N, .O, .P, .Q, .R, .S, .T, .U, .V, .W, .X, .Y, .Z),
._ { color: red }
`,
"/local.module.css": `
.rename-this { color: blue }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderCSS,
".module.css": config.LoaderLocalCSS,
},
MinifyIdentifiers: true,
},
})
}
// See: https://github.com/evanw/esbuild/issues/3295
func TestImportLocalCSSFromJSMinifyIdentifiersMultipleEntryPoints(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import { foo, bar } from "./a.module.css";
console.log(foo, bar);
`,
"/a.module.css": `
.foo { color: #001; }
.bar { color: #002; }
`,
"/b.js": `
import { foo, bar } from "./b.module.css";
console.log(foo, bar);
`,
"/b.module.css": `
.foo { color: #003; }
.bar { color: #004; }
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
MinifyIdentifiers: true,
},
})
}
func TestImportCSSFromJSLocalVsGlobal(t *testing.T) {
css := `
.top_level { color: #000 }
:global(.GLOBAL) { color: #001 }
:local(.local) { color: #002 }
div:global(.GLOBAL) { color: #003 }
div:local(.local) { color: #004 }
.top_level:global(div) { color: #005 }
.top_level:local(div) { color: #006 }
:global(div.GLOBAL) { color: #007 }
:local(div.local) { color: #008 }
div:global(span.GLOBAL) { color: #009 }
div:local(span.local) { color: #00A }
div:global(#GLOBAL_A.GLOBAL_B.GLOBAL_C):local(.local_a.local_b#local_c) { color: #00B }
div:global(#GLOBAL_A .GLOBAL_B .GLOBAL_C):local(.local_a .local_b #local_c) { color: #00C }
.nested {
:global(&.GLOBAL) { color: #00D }
:local(&.local) { color: #00E }
&:global(.GLOBAL) { color: #00F }
&:local(.local) { color: #010 }
}
:global(.GLOBAL_A .GLOBAL_B) { color: #011 }
:local(.local_a .local_b) { color: #012 }
div:global(.GLOBAL_A .GLOBAL_B):hover { color: #013 }
div:local(.local_a .local_b):hover { color: #014 }
div :global(.GLOBAL_A .GLOBAL_B) span { color: #015 }
div :local(.local_a .local_b) span { color: #016 }
div > :global(.GLOBAL_A ~ .GLOBAL_B) + span { color: #017 }
div > :local(.local_a ~ .local_b) + span { color: #018 }
div:global(+ .GLOBAL_A):hover { color: #019 }
div:local(+ .local_a):hover { color: #01A }
:global.GLOBAL:local.local { color: #01B }
:global .GLOBAL :local .local { color: #01C }
:global {
.GLOBAL {
before: outer;
:local {
before: inner;
.local {
color: #01D;
}
after: inner;
}
after: outer;
}
}
`
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import normalStyles from "./normal.css"
import globalStyles from "./LOCAL.global-css"
import localStyles from "./LOCAL.local-css"
console.log('should be empty:', normalStyles)
console.log('fewer local names:', globalStyles)
console.log('more local names:', localStyles)
`,
"/normal.css": css,
"/LOCAL.global-css": css,
"/LOCAL.local-css": css,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderCSS,
".global-css": config.LoaderGlobalCSS,
".local-css": config.LoaderLocalCSS,
},
},
})
}
func TestImportCSSFromJSLowerBareLocalAndGlobal(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
.before { color: #000 }
:local { .button { color: #000 } }
.after { color: #000 }
.before { color: #001 }
:global { .button { color: #001 } }
.after { color: #001 }
div { :local { .button { color: #002 } } }
div { :global { .button { color: #003 } } }
:local(:global) { color: #004 }
:global(:local) { color: #005 }
:local(:global) { .button { color: #006 } }
:global(:local) { .button { color: #007 } }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
UnsupportedCSSFeatures: compat.Nesting,
},
})
}
func TestImportCSSFromJSLocalAtKeyframes(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
@keyframes local_name { to { color: red } }
div :global { animation-name: none }
div :local { animation-name: none }
div :global { animation-name: global_name }
div :local { animation-name: local_name }
div :global { animation-name: global_name1, none, global_name2, Inherit, INITIAL, revert, revert-layer, unset }
div :local { animation-name: local_name1, none, local_name2, Inherit, INITIAL, revert, revert-layer, unset }
div :global { animation: 2s infinite global_name }
div :local { animation: 2s infinite local_name }
/* Someone wanted to be able to name their animations "none" */
@keyframes "none" { to { color: red } }
div :global { animation-name: "none" }
div :local { animation-name: "none" }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
UnsupportedCSSFeatures: compat.Nesting,
},
})
}
func TestImportCSSFromJSLocalAtCounterStyle(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import list_style_type from "./list_style_type.css"
import list_style from "./list_style.css"
console.log(list_style_type, list_style)
`,
"/list_style_type.css": `
@counter-style local { symbols: A B C }
div :global { list-style-type: GLOBAL }
div :local { list-style-type: local }
/* Must not accept invalid type values */
div :local { list-style-type: none }
div :local { list-style-type: INITIAL }
div :local { list-style-type: decimal }
div :local { list-style-type: disc }
div :local { list-style-type: SQUARE }
div :local { list-style-type: circle }
div :local { list-style-type: disclosure-OPEN }
div :local { list-style-type: DISCLOSURE-closed }
div :local { list-style-type: LAO }
div :local { list-style-type: "\1F44D" }
`,
"/list_style.css": `
@counter-style local { symbols: A B C }
div :global { list-style: GLOBAL }
div :local { list-style: local }
/* The first one is the type */
div :local { list-style: local none }
div :local { list-style: local url(http://) }
div :local { list-style: local linear-gradient(red, green) }
div :local { list-style: local inside }
div :local { list-style: local outside }
/* The second one is the type */
div :local { list-style: none local }
div :local { list-style: url(http://) local }
div :local { list-style: linear-gradient(red, green) local }
div :local { list-style: local inside }
div :local { list-style: local outside }
div :local { list-style: inside inside }
div :local { list-style: inside outside }
div :local { list-style: outside inside }
div :local { list-style: outside outside }
/* The type is set to "none" here */
div :local { list-style: url(http://) none invalid }
div :local { list-style: linear-gradient(red, green) none invalid }
/* Must not accept invalid type values */
div :local { list-style: INITIAL }
div :local { list-style: decimal }
div :local { list-style: disc }
div :local { list-style: SQUARE }
div :local { list-style: circle }
div :local { list-style: disclosure-OPEN }
div :local { list-style: DISCLOSURE-closed }
div :local { list-style: LAO }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
UnsupportedCSSFeatures: compat.Nesting,
},
})
}
func TestImportCSSFromJSLocalAtContainer(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
@container not (max-width: 100px) { div { color: red } }
@container local (max-width: 100px) { div { color: red } }
@container local not (max-width: 100px) { div { color: red } }
@container local (max-width: 100px) or (min-height: 100px) { div { color: red } }
@container local (max-width: 100px) and (min-height: 100px) { div { color: red } }
@container general_enclosed(max-width: 100px) { div { color: red } }
@container local general_enclosed(max-width: 100px) { div { color: red } }
div :global { container-name: NONE initial }
div :local { container-name: none INITIAL }
div :global { container-name: GLOBAL1 GLOBAL2 }
div :local { container-name: local1 local2 }
div :global { container: none }
div :local { container: NONE }
div :global { container: NONE / size }
div :local { container: none / size }
div :global { container: GLOBAL1 GLOBAL2 }
div :local { container: local1 local2 }
div :global { container: GLOBAL1 GLOBAL2 / size }
div :local { container: local1 local2 / size }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
UnsupportedCSSFeatures: compat.Nesting,
},
})
}
func TestImportCSSFromJSNthIndexLocal(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
:nth-child(2n of .local) { color: #000 }
:nth-child(2n of :local(#local), :global(.GLOBAL)) { color: #001 }
:nth-child(2n of .local1 :global .GLOBAL1, .GLOBAL2 :local .local2) { color: #002 }
.local1, :nth-child(2n of :global .GLOBAL), .local2 { color: #003 }
:nth-last-child(2n of .local) { color: #000 }
:nth-last-child(2n of :local(#local), :global(.GLOBAL)) { color: #001 }
:nth-last-child(2n of .local1 :global .GLOBAL1, .GLOBAL2 :local .local2) { color: #002 }
.local1, :nth-last-child(2n of :global .GLOBAL), .local2 { color: #003 }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
UnsupportedCSSFeatures: compat.Nesting,
},
})
}
func TestImportCSSFromJSComposes(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.module.css"
console.log(styles)
`,
"/global.css": `
.GLOBAL1 {
color: black;
}
`,
"/styles.module.css": `
@import "global.css";
.local0 {
composes: local1;
:global {
composes: GLOBAL1 GLOBAL2;
}
}
.local0 {
composes: GLOBAL2 GLOBAL3 from global;
composes: local1 local2;
background: green;
}
.local0 :global {
composes: GLOBAL4;
}
.local3 {
border: 1px solid black;
composes: local4;
}
.local4 {
opacity: 0.5;
}
.local1 {
color: red;
composes: local3;
}
.fromOtherFile {
composes: local0 from "other1.module.css";
composes: local0 from "other2.module.css";
}
`,
"/other1.module.css": `
.local0 {
composes: base1 base2 from "base.module.css";
color: blue;
}
`,
"/other2.module.css": `
.local0 {
composes: base1 base3 from "base.module.css";
background: purple;
}
`,
"/base.module.css": `
.base1 {
cursor: pointer;
}
.base2 {
display: inline;
}
.base3 {
float: left;
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderCSS,
".module.css": config.LoaderLocalCSS,
},
},
})
}
func TestImportCSSFromJSComposesFromMissingImport(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.module.css"
console.log(styles)
`,
"/styles.module.css": `
.foo {
composes: x from "file.module.css";
composes: y from "file.module.css";
composes: z from "file.module.css";
composes: x from "file.css";
}
`,
"/file.module.css": `
.x {
color: red;
}
:global(.y) {
color: blue;
}
`,
"/file.css": `
.x {
color: red;
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".module.css": config.LoaderLocalCSS,
".css": config.LoaderCSS,
},
},
expectedCompileLog: `styles.module.css: ERROR: Cannot use global name "y" with "composes"
file.module.css: NOTE: The global name "y" is defined here:
NOTE: Use the ":local" selector to change "y" into a local name.
styles.module.css: ERROR: The name "z" never appears in "file.module.css"
styles.module.css: ERROR: Cannot use global name "x" with "composes"
file.css: NOTE: The global name "x" is defined here:
NOTE: Use the "local-css" loader for "file.css" to enable local names.
`,
})
}
func TestImportCSSFromJSComposesFromNotCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
.foo {
composes: bar from "file.txt";
}
`,
"/file.txt": `
.bar {
color: red;
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
".txt": config.LoaderText,
},
},
expectedScanLog: `styles.css: ERROR: Cannot use "composes" with "file.txt"
NOTE: You can only use "composes" with CSS files and "file.txt" is not a CSS file (it was loaded with the "text" loader).
`,
})
}
func TestImportCSSFromJSComposesCircular(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
.foo {
composes: bar;
}
.bar {
composes: foo;
}
.baz {
composes: baz;
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
},
})
}
func TestImportCSSFromJSComposesFromCircular(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
.foo {
composes: bar from "other.css";
}
.bar {
composes: bar from "styles.css";
}
`,
"/other.css": `
.bar {
composes: foo from "styles.css";
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
},
})
}
func TestImportCSSFromJSComposesFromUndefined(t *testing.T) {
note := "NOTE: The specification of \"composes\" does not define an order when class declarations from separate files are composed together. " +
"The value of the \"zoom\" property for \"foo\" may change unpredictably as the code is edited. " +
"Make sure that all definitions of \"zoom\" for \"foo\" are in a single file."
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import styles from "./styles.css"
console.log(styles)
`,
"/styles.css": `
@import "well-defined.css";
@import "undefined/case1.css";
@import "undefined/case2.css";
@import "undefined/case3.css";
@import "undefined/case4.css";
@import "undefined/case5.css";
`,
"/well-defined.css": `
.z1 { composes: z2; zoom: 1; }
.z2 { zoom: 2; }
.z4 { zoom: 4; }
.z3 { composes: z4; zoom: 3; }
.z5 { composes: foo bar from "file-1.css"; }
`,
"/undefined/case1.css": `
.foo {
composes: foo from "../file-1.css";
zoom: 2;
}
`,
"/undefined/case2.css": `
.foo {
composes: foo from "../file-1.css";
composes: foo from "../file-2.css";
}
`,
"/undefined/case3.css": `
.foo { composes: nested1 nested2; }
.nested1 { zoom: 3; }
.nested2 { composes: foo from "../file-2.css"; }
`,
"/undefined/case4.css": `
.foo { composes: nested1 nested2; }
.nested1 { composes: foo from "../file-1.css"; }
.nested2 { zoom: 3; }
`,
"/undefined/case5.css": `
.foo { composes: nested1 nested2; }
.nested1 { composes: foo from "../file-1.css"; }
.nested2 { composes: foo from "../file-2.css"; }
`,
"/file-1.css": `
.foo { zoom: 1; }
.bar { zoom: 2; }
`,
"/file-2.css": `
.foo { zoom: 2; }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderLocalCSS,
},
},
expectedCompileLog: `undefined/case1.css: WARNING: The value of "zoom" in the "foo" class is undefined
file-1.css: NOTE: The first definition of "zoom" is here:
undefined/case1.css: NOTE: The second definition of "zoom" is here:
` + note + `
undefined/case2.css: WARNING: The value of "zoom" in the "foo" class is undefined
file-1.css: NOTE: The first definition of "zoom" is here:
file-2.css: NOTE: The second definition of "zoom" is here:
` + note + `
undefined/case3.css: WARNING: The value of "zoom" in the "foo" class is undefined
undefined/case3.css: NOTE: The first definition of "zoom" is here:
file-2.css: NOTE: The second definition of "zoom" is here:
` + note + `
undefined/case4.css: WARNING: The value of "zoom" in the "foo" class is undefined
file-1.css: NOTE: The first definition of "zoom" is here:
undefined/case4.css: NOTE: The second definition of "zoom" is here:
` + note + `
undefined/case5.css: WARNING: The value of "zoom" in the "foo" class is undefined
file-1.css: NOTE: The first definition of "zoom" is here:
file-2.css: NOTE: The second definition of "zoom" is here:
` + note + `
`,
})
}
func TestImportCSSFromJSWriteToStdout(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import "./entry.css"
`,
"/entry.css": `
.entry { color: red }
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
WriteToStdout: true,
},
expectedScanLog: `entry.js: ERROR: Cannot import "entry.css" into a JavaScript file without an output path configured
`,
})
}
func TestImportJSFromCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export default 123
`,
"/entry.css": `
@import "./entry.js";
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `entry.css: ERROR: Cannot import "entry.js" into a CSS file
NOTE: An "@import" rule can only be used to import another CSS file and "entry.js" is not a CSS file (it was loaded with the "js" loader).
`,
})
}
func TestImportJSONFromCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.json": `
{}
`,
"/entry.css": `
@import "./entry.json";
`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `entry.css: ERROR: Cannot import "entry.json" into a CSS file
NOTE: An "@import" rule can only be used to import another CSS file and "entry.json" is not a CSS file (it was loaded with the "json" loader).
`,
})
}
func TestMissingImportURLInCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entry.css": `
a { background: url(./one.png); }
b { background: url("./two.png"); }
`,
},
entryPaths: []string{"/src/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `src/entry.css: ERROR: Could not resolve "./one.png"
src/entry.css: ERROR: Could not resolve "./two.png"
`,
})
}
func TestExternalImportURLInCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/entry.css": `
div:after {
content: 'If this is recognized, the path should become "../src/external.png"';
background: url(./external.png);
}
/* These URLs should be external automatically */
a { background: url(http://example.com/images/image.png) }
b { background: url(https://example.com/images/image.png) }
c { background: url(//example.com/images/image.png) }
d { background: url(data:image/png;base64,iVBORw0KGgo=) }
path { fill: url(#filter) }
`,
},
entryPaths: []string{"/src/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExternalSettings: config.ExternalSettings{
PostResolve: config.ExternalMatchers{Exact: map[string]bool{
"/src/external.png": true,
}},
},
},
})
}
func TestInvalidImportURLInCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
a {
background: url(./js.js);
background: url("./jsx.jsx");
background: url(./ts.ts);
background: url('./tsx.tsx');
background: url(./json.json);
background: url(./css.css);
}
`,
"/js.js": `export default 123`,
"/jsx.jsx": `export default 123`,
"/ts.ts": `export default 123`,
"/tsx.tsx": `export default 123`,
"/json.json": `{ "test": true }`,
"/css.css": `a { color: red }`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedScanLog: `entry.css: ERROR: Cannot use "js.js" as a URL
NOTE: You can't use a "url()" token to reference the file "js.js" because it was loaded with the "js" loader, which doesn't provide a URL to embed in the resulting CSS.
entry.css: ERROR: Cannot use "jsx.jsx" as a URL
NOTE: You can't use a "url()" token to reference the file "jsx.jsx" because it was loaded with the "jsx" loader, which doesn't provide a URL to embed in the resulting CSS.
entry.css: ERROR: Cannot use "ts.ts" as a URL
NOTE: You can't use a "url()" token to reference the file "ts.ts" because it was loaded with the "ts" loader, which doesn't provide a URL to embed in the resulting CSS.
entry.css: ERROR: Cannot use "tsx.tsx" as a URL
NOTE: You can't use a "url()" token to reference the file "tsx.tsx" because it was loaded with the "tsx" loader, which doesn't provide a URL to embed in the resulting CSS.
entry.css: ERROR: Cannot use "json.json" as a URL
NOTE: You can't use a "url()" token to reference the file "json.json" because it was loaded with the "json" loader, which doesn't provide a URL to embed in the resulting CSS.
entry.css: ERROR: Cannot use "css.css" as a URL
NOTE: You can't use a "url()" token to reference a CSS file, and "css.css" is a CSS file (it was loaded with the "css" loader).
`,
})
}
func TestTextImportURLInCSSText(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
a {
background: url(./example.txt);
}
`,
"/example.txt": `This is some text.`,
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
})
}
func TestDataURLImportURLInCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
a {
background: url(./example.png);
}
`,
"/example.png": "\x89\x50\x4E\x47\x0D\x0A\x1A\x0A",
},
entryPaths: []string{"/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".png": config.LoaderDataURL,
},
},
})
}
func TestBinaryImportURLInCSS(t *testing.T) {
css_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.css": `
a {
background: url(./example.png);
}
`,
"/example.png": "\x89\x50\x4E\x47\x0D\x0A\x1A\x0A",
},
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_splitting_test.go | internal/bundler_tests/bundler_splitting_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var splitting_suite = suite{
name: "splitting",
}
func TestSplittingSharedES6IntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {foo} from "./shared.js"
console.log(foo)
`,
"/b.js": `
import {foo} from "./shared.js"
console.log(foo)
`,
"/shared.js": `export let foo = 123`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingSharedCommonJSIntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
const {foo} = require("./shared.js")
console.log(foo)
`,
"/b.js": `
const {foo} = require("./shared.js")
console.log(foo)
`,
"/shared.js": `exports.foo = 123`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicES6IntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import("./foo.js").then(({bar}) => console.log(bar))
`,
"/foo.js": `
export let bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicCommonJSIntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import("./foo.js").then(({default: {bar}}) => console.log(bar))
`,
"/foo.js": `
exports.bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicAndNotDynamicES6IntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {bar as a} from "./foo.js"
import("./foo.js").then(({bar: b}) => console.log(a, b))
`,
"/foo.js": `
export let bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicAndNotDynamicCommonJSIntoES6(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {bar as a} from "./foo.js"
import("./foo.js").then(({default: {bar: b}}) => console.log(a, b))
`,
"/foo.js": `
exports.bar = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingAssignToLocal(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {foo, setFoo} from "./shared.js"
setFoo(123)
console.log(foo)
`,
"/b.js": `
import {foo} from "./shared.js"
console.log(foo)
`,
"/shared.js": `
export let foo
export function setFoo(value) {
foo = value
}
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingSideEffectsWithoutDependencies(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {a} from "./shared.js"
console.log(a)
`,
"/b.js": `
import {b} from "./shared.js"
console.log(b)
`,
"/shared.js": `
export let a = 1
export let b = 2
console.log('side effect')
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingNestedDirectories(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/pages/pageA/page.js": `
import x from "../shared.js"
console.log(x)
`,
"/Users/user/project/src/pages/pageB/page.js": `
import x from "../shared.js"
console.log(-x)
`,
"/Users/user/project/src/pages/shared.js": `
export default 123
`,
},
entryPaths: []string{
"/Users/user/project/src/pages/pageA/page.js",
"/Users/user/project/src/pages/pageB/page.js",
},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/Users/user/project/out",
},
})
}
func TestSplittingCircularReferenceIssue251(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
export * from './b.js';
export var p = 5;
`,
"/b.js": `
export * from './a.js';
export var q = 6;
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingMissingLazyExport(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {foo} from './common.js'
console.log(foo())
`,
"/b.js": `
import {bar} from './common.js'
console.log(bar())
`,
"/common.js": `
import * as ns from './empty.js'
export function foo() { return [ns, ns.missing] }
export function bar() { return [ns.missing] }
`,
"/empty.js": `
// This forces the module into ES6 mode without importing or exporting anything
import.meta
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
expectedCompileLog: `common.js: WARNING: Import "missing" will always be undefined because the file "empty.js" has no exports
`,
})
}
func TestSplittingReExportIssue273(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
export const a = 1
`,
"/b.js": `
export { a } from './a'
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicImportIssue272(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import('./b')
`,
"/b.js": `
export default 1
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDynamicImportOutsideSourceTreeIssue264(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry1.js": `
import('package')
`,
"/Users/user/project/src/entry2.js": `
import('package')
`,
"/Users/user/project/node_modules/package/index.js": `
console.log('imported')
`,
},
entryPaths: []string{"/Users/user/project/src/entry1.js", "/Users/user/project/src/entry2.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingCrossChunkAssignmentDependencies(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {setValue} from './shared'
setValue(123)
`,
"/b.js": `
import './shared'
`,
"/shared.js": `
var observer;
var value;
export function setObserver(cb) {
observer = cb;
}
export function getValue() {
return value;
}
export function setValue(next) {
value = next;
if (observer) observer();
}
sideEffects(getValue);
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingCrossChunkAssignmentDependenciesRecursive(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import { setX } from './x'
setX()
`,
"/b.js": `
import { setZ } from './z'
setZ()
`,
"/c.js": `
import { setX2 } from './x'
import { setY2 } from './y'
import { setZ2 } from './z'
setX2();
setY2();
setZ2();
`,
"/x.js": `
let _x
export function setX(v) { _x = v }
export function setX2(v) { _x = v }
`,
"/y.js": `
import { setX } from './x'
let _y
export function setY(v) { _y = v }
export function setY2(v) { setX(v); _y = v }
`,
"/z.js": `
import { setY } from './y'
let _z
export function setZ(v) { _z = v }
export function setZ2(v) { setY(v); _z = v }
`,
},
entryPaths: []string{"/a.js", "/b.js", "/c.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingDuplicateChunkCollision(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import "./ab"
`,
"/b.js": `
import "./ab"
`,
"/c.js": `
import "./cd"
`,
"/d.js": `
import "./cd"
`,
"/ab.js": `
console.log(123)
`,
"/cd.js": `
console.log(123)
`,
},
entryPaths: []string{"/a.js", "/b.js", "/c.js", "/d.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
MinifyWhitespace: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingMinifyIdentifiersCrashIssue437(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import {foo} from "./shared"
console.log(foo)
`,
"/b.js": `
import {foo} from "./shared"
console.log(foo)
`,
"/c.js": `
import "./shared"
`,
"/shared.js": `
export function foo(bar) {}
`,
},
entryPaths: []string{"/a.js", "/b.js", "/c.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
MinifyIdentifiers: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingHybridESMAndCJSIssue617(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
export let foo
`,
"/b.js": `
export let bar = require('./a')
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
func TestSplittingPublicPathEntryName(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import("./b")
`,
"/b.js": `
console.log('b')
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeBundle,
CodeSplitting: true,
OutputFormat: config.FormatESModule,
PublicPath: "/www/",
AbsOutputDir: "/out",
},
})
}
func TestSplittingChunkPathDirPlaceholderImplicitOutbase(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/project/entry.js": `
console.log(import('./output-path/should-contain/this-text/file'))
`,
"/project/output-path/should-contain/this-text/file.js": `
console.log('file.js')
`,
},
entryPaths: []string{"/project/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
CodeSplitting: true,
AbsOutputDir: "/out",
ChunkPathTemplate: []config.PathTemplate{
{Data: "./", Placeholder: config.DirPlaceholder},
{Data: "/", Placeholder: config.NamePlaceholder},
{Data: "-", Placeholder: config.HashPlaceholder},
},
},
})
}
func TestEdgeCaseIssue2793WithSplitting(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/a.js": `
export const A = 42;
`,
"/src/b.js": `
export const B = async () => (await import(".")).A
`,
"/src/index.js": `
export * from "./a"
export * from "./b"
`,
},
entryPaths: []string{"/src/index.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
CodeSplitting: true,
AbsOutputDir: "/out",
},
})
}
func TestEdgeCaseIssue2793WithoutSplitting(t *testing.T) {
splitting_suite.expectBundled(t, bundled{
files: map[string]string{
"/src/a.js": `
export const A = 42;
`,
"/src/b.js": `
export const B = async () => (await import(".")).A
`,
"/src/index.js": `
export * from "./a"
export * from "./b"
`,
},
entryPaths: []string{"/src/index.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputDir: "/out",
},
})
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_lower_test.go | internal/bundler_tests/bundler_lower_test.go | package bundler_tests
// This file contains tests for "lowering" syntax, which means converting it to
// older JavaScript. For example, "a ** b" becomes a call to "Math.pow(a, b)"
// when lowered. Which syntax is lowered is determined by the language target.
import (
"testing"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
)
var lower_suite = suite{
name: "lower",
}
func TestLowerOptionalCatchNameCollisionNoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
try {}
catch { var e, e2 }
var e3
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2018),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerObjectSpreadNoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
let tests = [
{...a, ...b},
{a, b, ...c},
{...a, b, c},
{a, ...b, c},
{a, b, ...c, ...d, e, f, ...g, ...h, i, j},
]
let jsx = [
<div {...a} {...b}/>,
<div a b {...c}/>,
<div {...a} b c/>,
<div a {...b} c/>,
<div a b {...c} {...d} e f {...g} {...h} i j/>,
]
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
UnsupportedJSFeatures: es(2017),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerExponentiationOperatorNoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
let tests = {
// Exponentiation operator
0: a ** b ** c,
1: (a ** b) ** c,
// Exponentiation assignment operator
2: a **= b,
3: a.b **= c,
4: a[b] **= c,
5: a().b **= c,
6: a()[b] **= c,
7: a[b()] **= c,
8: a()[b()] **= c,
// These all should not need capturing (no object identity)
9: a[0] **= b,
10: a[false] **= b,
11: a[null] **= b,
12: a[void 0] **= b,
13: a[123n] **= b,
14: a[this] **= b,
// These should need capturing (have object identitiy)
15: a[/x/] **= b,
16: a[{}] **= b,
17: a[[]] **= b,
18: a[() => {}] **= b,
19: a[function() {}] **= b,
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: WARNING: Big integer literals are not available in the configured target environment and may crash at run-time
`,
})
}
func TestLowerPrivateFieldAssignments2015NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
unary() {
this.#x++
this.#x--
++this.#x
--this.#x
}
binary() {
this.#x = 1
this.#x += 1
this.#x -= 1
this.#x *= 1
this.#x /= 1
this.#x %= 1
this.#x **= 1
this.#x <<= 1
this.#x >>= 1
this.#x >>>= 1
this.#x &= 1
this.#x |= 1
this.#x ^= 1
this.#x &&= 1
this.#x ||= 1
this.#x ??= 1
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldAssignments2019NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
unary() {
this.#x++
this.#x--
++this.#x
--this.#x
}
binary() {
this.#x = 1
this.#x += 1
this.#x -= 1
this.#x *= 1
this.#x /= 1
this.#x %= 1
this.#x **= 1
this.#x <<= 1
this.#x >>= 1
this.#x >>>= 1
this.#x &= 1
this.#x |= 1
this.#x ^= 1
this.#x &&= 1
this.#x ||= 1
this.#x ??= 1
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2019),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldAssignments2020NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
unary() {
this.#x++
this.#x--
++this.#x
--this.#x
}
binary() {
this.#x = 1
this.#x += 1
this.#x -= 1
this.#x *= 1
this.#x /= 1
this.#x %= 1
this.#x **= 1
this.#x <<= 1
this.#x >>= 1
this.#x >>>= 1
this.#x &= 1
this.#x |= 1
this.#x ^= 1
this.#x &&= 1
this.#x ||= 1
this.#x ??= 1
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldAssignmentsNextNoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
unary() {
this.#x++
this.#x--
++this.#x
--this.#x
}
binary() {
this.#x = 1
this.#x += 1
this.#x -= 1
this.#x *= 1
this.#x /= 1
this.#x %= 1
this.#x **= 1
this.#x <<= 1
this.#x >>= 1
this.#x >>>= 1
this.#x &= 1
this.#x |= 1
this.#x ^= 1
this.#x &&= 1
this.#x ||= 1
this.#x ??= 1
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldOptionalChain2019NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
foo() {
this?.#x.y
this?.y.#x
this.#x?.y
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2019),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldOptionalChain2020NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
foo() {
this?.#x.y
this?.y.#x
this.#x?.y
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateFieldOptionalChainNextNoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Foo {
#x
foo() {
this?.#x.y
this?.y.#x
this.#x?.y
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestTSLowerPrivateFieldOptionalChain2015NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
class Foo {
#x
foo() {
this?.#x.y
this?.y.#x
this.#x?.y
}
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestTSLowerPrivateStaticMembers2015NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
class Foo {
static #x
static get #y() {}
static set #y(x) {}
static #z() {}
foo() {
Foo.#x += 1
Foo.#y += 1
Foo.#z()
}
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestTSLowerPrivateFieldAndMethodAvoidNameCollision2015(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.ts": `
export class WeakMap {
#x
}
export class WeakSet {
#y() {}
}
`,
},
entryPaths: []string{"/entry.ts"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateGetterSetter2015(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
get #foo() { return this.foo }
set #bar(val) { this.bar = val }
get #prop() { return this.prop }
set #prop(val) { this.prop = val }
foo(fn) {
fn().#foo
fn().#bar = 1
fn().#prop
fn().#prop = 2
}
unary(fn) {
fn().#prop++;
fn().#prop--;
++fn().#prop;
--fn().#prop;
}
binary(fn) {
fn().#prop = 1;
fn().#prop += 1;
fn().#prop -= 1;
fn().#prop *= 1;
fn().#prop /= 1;
fn().#prop %= 1;
fn().#prop **= 1;
fn().#prop <<= 1;
fn().#prop >>= 1;
fn().#prop >>>= 1;
fn().#prop &= 1;
fn().#prop |= 1;
fn().#prop ^= 1;
fn().#prop &&= 1;
fn().#prop ||= 1;
fn().#prop ??= 1;
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateGetterSetter2019(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
get #foo() { return this.foo }
set #bar(val) { this.bar = val }
get #prop() { return this.prop }
set #prop(val) { this.prop = val }
foo(fn) {
fn().#foo
fn().#bar = 1
fn().#prop
fn().#prop = 2
}
unary(fn) {
fn().#prop++;
fn().#prop--;
++fn().#prop;
--fn().#prop;
}
binary(fn) {
fn().#prop = 1;
fn().#prop += 1;
fn().#prop -= 1;
fn().#prop *= 1;
fn().#prop /= 1;
fn().#prop %= 1;
fn().#prop **= 1;
fn().#prop <<= 1;
fn().#prop >>= 1;
fn().#prop >>>= 1;
fn().#prop &= 1;
fn().#prop |= 1;
fn().#prop ^= 1;
fn().#prop &&= 1;
fn().#prop ||= 1;
fn().#prop ??= 1;
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2019),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateGetterSetter2020(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
get #foo() { return this.foo }
set #bar(val) { this.bar = val }
get #prop() { return this.prop }
set #prop(val) { this.prop = val }
foo(fn) {
fn().#foo
fn().#bar = 1
fn().#prop
fn().#prop = 2
}
unary(fn) {
fn().#prop++;
fn().#prop--;
++fn().#prop;
--fn().#prop;
}
binary(fn) {
fn().#prop = 1;
fn().#prop += 1;
fn().#prop -= 1;
fn().#prop *= 1;
fn().#prop /= 1;
fn().#prop %= 1;
fn().#prop **= 1;
fn().#prop <<= 1;
fn().#prop >>= 1;
fn().#prop >>>= 1;
fn().#prop &= 1;
fn().#prop |= 1;
fn().#prop ^= 1;
fn().#prop &&= 1;
fn().#prop ||= 1;
fn().#prop ??= 1;
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateGetterSetterNext(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
get #foo() { return this.foo }
set #bar(val) { this.bar = val }
get #prop() { return this.prop }
set #prop(val) { this.prop = val }
foo(fn) {
fn().#foo
fn().#bar = 1
fn().#prop
fn().#prop = 2
}
unary(fn) {
fn().#prop++;
fn().#prop--;
++fn().#prop;
--fn().#prop;
}
binary(fn) {
fn().#prop = 1;
fn().#prop += 1;
fn().#prop -= 1;
fn().#prop *= 1;
fn().#prop /= 1;
fn().#prop %= 1;
fn().#prop **= 1;
fn().#prop <<= 1;
fn().#prop >>= 1;
fn().#prop >>>= 1;
fn().#prop &= 1;
fn().#prop |= 1;
fn().#prop ^= 1;
fn().#prop &&= 1;
fn().#prop ||= 1;
fn().#prop ??= 1;
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateMethod2019(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
#field
#method() {}
baseline() {
a().foo
b().foo(x)
c()?.foo(x)
d().foo?.(x)
e()?.foo?.(x)
}
privateField() {
a().#field
b().#field(x)
c()?.#field(x)
d().#field?.(x)
e()?.#field?.(x)
f()?.foo.#field(x).bar()
}
privateMethod() {
a().#method
b().#method(x)
c()?.#method(x)
d().#method?.(x)
e()?.#method?.(x)
f()?.foo.#method(x).bar()
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2019),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateMethod2020(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
#field
#method() {}
baseline() {
a().foo
b().foo(x)
c()?.foo(x)
d().foo?.(x)
e()?.foo?.(x)
}
privateField() {
a().#field
b().#field(x)
c()?.#field(x)
d().#field?.(x)
e()?.#field?.(x)
f()?.foo.#field(x).bar()
}
privateMethod() {
a().#method
b().#method(x)
c()?.#method(x)
d().#method?.(x)
e()?.#method?.(x)
f()?.foo.#method(x).bar()
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateMethodNext(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
#field
#method() {}
baseline() {
a().foo
b().foo(x)
c()?.foo(x)
d().foo?.(x)
e()?.foo?.(x)
}
privateField() {
a().#field
b().#field(x)
c()?.#field(x)
d().#field?.(x)
e()?.#field?.(x)
f()?.foo.#field(x).bar()
}
privateMethod() {
a().#method
b().#method(x)
c()?.#method(x)
d().#method?.(x)
e()?.#method?.(x)
f()?.foo.#method(x).bar()
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateClassExpr2020NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export let Foo = class {
#field
#method() {}
static #staticField
static #staticMethod() {}
foo() {
this.#field = this.#method()
Foo.#staticField = Foo.#staticMethod()
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerPrivateMethodWithModifiers2020(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export class Foo {
*#g() {}
async #a() {}
async *#ag() {}
static *#sg() {}
static async #sa() {}
static async *#sag() {}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2020),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsync2016NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
async function foo(bar) {
await bar
return [this, arguments]
}
class Foo {async foo() {}}
new (class Bar extends class { } {
constructor() {
let x = 1;
(async () => {
console.log("before super", x); // (1) Sync phase
await 1;
console.log("after super", x); // (2) Async phase
})();
super();
x = 2;
}
})();
export default [
foo,
Foo,
async function() {},
async () => {},
{async foo() {}},
class {async foo() {}},
function() {
return async (bar) => {
await bar
return [this, arguments]
}
},
]
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsync2017NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
async function foo(bar) {
await bar
return arguments
}
class Foo {async foo() {}}
export default [
foo,
Foo,
async function() {},
async () => {},
{async foo() {}},
class {async foo() {}},
function() {
return async (bar) => {
await bar
return [this, arguments]
}
},
]
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2017),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsyncThis2016CommonJS(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
exports.foo = async () => this
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsyncThis2016ES6(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {bar} from "./other"
export let foo = async () => this
`,
"/other.js": `
export let bar = async () => {}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
debugLogs: true,
expectedScanLog: `entry.js: DEBUG: Top-level "this" will be replaced with undefined since this file is an ECMAScript module
entry.js: NOTE: This file is considered to be an ECMAScript module because of the "export" keyword here:
`,
})
}
func TestLowerAsyncES5(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import './fn-stmt'
import './fn-expr'
import './arrow-1'
import './arrow-2'
import './export-def-1'
import './export-def-2'
import './obj-method'
`,
"/fn-stmt.js": `async function foo() {}`,
"/fn-expr.js": `(async function() {})`,
"/arrow-1.js": `(async () => {})`,
"/arrow-2.js": `(async x => {})`,
"/export-def-1.js": `export default async function foo() {}`,
"/export-def-2.js": `export default async function() {}`,
"/obj-method.js": `({async foo() {}})`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(5),
AbsOutputFile: "/out.js",
},
expectedScanLog: `arrow-1.js: ERROR: Transforming async functions to the configured target environment is not supported yet
arrow-2.js: ERROR: Transforming async functions to the configured target environment is not supported yet
export-def-1.js: ERROR: Transforming async functions to the configured target environment is not supported yet
export-def-2.js: ERROR: Transforming async functions to the configured target environment is not supported yet
fn-expr.js: ERROR: Transforming async functions to the configured target environment is not supported yet
fn-stmt.js: ERROR: Transforming async functions to the configured target environment is not supported yet
obj-method.js: ERROR: Transforming async functions to the configured target environment is not supported yet
`,
})
}
func TestLowerAsyncSuperES2017NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
async test(key) {
return [
await super.foo,
await super[key],
await ([super.foo] = [0]),
await ([super[key]] = [0]),
await (super.foo = 1),
await (super[key] = 1),
await (super.foo += 2),
await (super[key] += 2),
await ++super.foo,
await ++super[key],
await super.foo++,
await super[key]++,
await super.foo.name,
await super[key].name,
await super.foo?.name,
await super[key]?.name,
await super.foo(1, 2),
await super[key](1, 2),
await super.foo?.(1, 2),
await super[key]?.(1, 2),
await (() => super.foo)(),
await (() => super[key])(),
await (() => super.foo())(),
await (() => super[key]())(),
await super.foo` + "``" + `,
await super[key]` + "``" + `,
]
}
}
// This covers a bug that caused a compiler crash
let fn = async () => class extends Base {
a = super.a
b = () => super.b
c() { return super.c }
d() { return () => super.d }
}
// This covers a bug that generated bad code
class Derived2 extends Base {
async a() { return class { [super.foo] = 123 } }
b = async () => class { [super.foo] = 123 }
}
// This covers putting the generated temporary variable inside the loop
for (let i = 0; i < 3; i++) {
objs.push({
__proto__: {
foo() { return i },
},
async bar() { return super.foo() },
})
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2017),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsyncSuperES2016NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
async test(key) {
return [
await super.foo,
await super[key],
await ([super.foo] = [0]),
await ([super[key]] = [0]),
await (super.foo = 1),
await (super[key] = 1),
await (super.foo += 2),
await (super[key] += 2),
await ++super.foo,
await ++super[key],
await super.foo++,
await super[key]++,
await super.foo.name,
await super[key].name,
await super.foo?.name,
await super[key]?.name,
await super.foo(1, 2),
await super[key](1, 2),
await super.foo?.(1, 2),
await super[key]?.(1, 2),
await (() => super.foo)(),
await (() => super[key])(),
await (() => super.foo())(),
await (() => super[key]())(),
await super.foo` + "``" + `,
await super[key]` + "``" + `,
]
}
}
// This covers a bug that caused a compiler crash
let fn = async () => class extends Base {
a = super.a
b = () => super.b
c() { return super.c }
d() { return () => super.d }
}
// This covers a bug that generated bad code
class Derived2 extends Base {
async a() { return class { [super.foo] = 123 } }
b = async () => class { [super.foo] = 123 }
}
// This covers putting the generated temporary variable inside the loop
for (let i = 0; i < 3; i++) {
objs.push({
__proto__: {
foo() { return i },
},
async bar() { return super.foo() },
})
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerStaticAsyncSuperES2021NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
static test = async (key) => {
return [
await super.foo,
await super[key],
await ([super.foo] = [0]),
await ([super[key]] = [0]),
await (super.foo = 1),
await (super[key] = 1),
await (super.foo += 2),
await (super[key] += 2),
await ++super.foo,
await ++super[key],
await super.foo++,
await super[key]++,
await super.foo.name,
await super[key].name,
await super.foo?.name,
await super[key]?.name,
await super.foo(1, 2),
await super[key](1, 2),
await super.foo?.(1, 2),
await super[key]?.(1, 2),
await (() => super.foo)(),
await (() => super[key])(),
await (() => super.foo())(),
await (() => super[key]())(),
await super.foo` + "``" + `,
await super[key]` + "``" + `,
]
}
}
// This covers a bug that caused a compiler crash
let fn = async () => class extends Base {
static a = super.a
static b = () => super.b
static c() { return super.c }
static d() { return () => super.d }
}
// This covers a bug that generated bad code
class Derived2 extends Base {
static async a() { return class { [super.foo] = 123 } }
static b = async () => class { [super.foo] = 123 }
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2021),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerStaticAsyncSuperES2016NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
static test = async (key) => {
return [
await super.foo,
await super[key],
await ([super.foo] = [0]),
await ([super[key]] = [0]),
await (super.foo = 1),
await (super[key] = 1),
await (super.foo += 2),
await (super[key] += 2),
await ++super.foo,
await ++super[key],
await super.foo++,
await super[key]++,
await super.foo.name,
await super[key].name,
await super.foo?.name,
await super[key]?.name,
await super.foo(1, 2),
await super[key](1, 2),
await super.foo?.(1, 2),
await super[key]?.(1, 2),
await (() => super.foo)(),
await (() => super[key])(),
await (() => super.foo())(),
await (() => super[key]())(),
await super.foo` + "``" + `,
await super[key]` + "``" + `,
]
}
}
// This covers a bug that caused a compiler crash
let fn = async () => class extends Base {
static a = super.a
static b = () => super.b
static c() { return super.c }
static d() { return () => super.d }
}
// This covers a bug that generated bad code
class Derived2 extends Base {
static async a() { return class { [super.foo] = 123 } }
static b = async () => class { [super.foo] = 123 }
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerStaticSuperES2021NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
static test = key => {
return [
super.foo,
super[key],
([super.foo] = [0]),
([super[key]] = [0]),
(super.foo = 1),
(super[key] = 1),
(super.foo += 2),
(super[key] += 2),
++super.foo,
++super[key],
super.foo++,
super[key]++,
super.foo.name,
super[key].name,
super.foo?.name,
super[key]?.name,
super.foo(1, 2),
super[key](1, 2),
super.foo?.(1, 2),
super[key]?.(1, 2),
(() => super.foo)(),
(() => super[key])(),
(() => super.foo())(),
(() => super[key]())(),
super.foo` + "``" + `,
super[key]` + "``" + `,
]
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2021),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerStaticSuperES2016NoBundle(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
class Derived extends Base {
static test = key => {
return [
super.foo,
super[key],
([super.foo] = [0]),
([super[key]] = [0]),
(super.foo = 1),
(super[key] = 1),
(super.foo += 2),
(super[key] += 2),
++super.foo,
++super[key],
super.foo++,
super[key]++,
super.foo.name,
super[key].name,
super.foo?.name,
super[key]?.name,
super.foo(1, 2),
super[key](1, 2),
super.foo?.(1, 2),
super[key]?.(1, 2),
(() => super.foo)(),
(() => super[key])(),
(() => super.foo())(),
(() => super[key]())(),
super.foo` + "``" + `,
super[key]` + "``" + `,
]
}
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsyncArrowSuperES2016(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export { default as foo1 } from "./foo1"
export { default as foo2 } from "./foo2"
export { default as foo3 } from "./foo3"
export { default as foo4 } from "./foo4"
export { default as bar1 } from "./bar1"
export { default as bar2 } from "./bar2"
export { default as bar3 } from "./bar3"
export { default as bar4 } from "./bar4"
export { default as baz1 } from "./baz1"
export { default as baz2 } from "./baz2"
import "./outer"
`,
"/foo1.js": `export default class extends x { foo1() { return async () => super.foo('foo1') } }`,
"/foo2.js": `export default class extends x { foo2() { return async () => () => super.foo('foo2') } }`,
"/foo3.js": `export default class extends x { foo3() { return () => async () => super.foo('foo3') } }`,
"/foo4.js": `export default class extends x { foo4() { return async () => async () => super.foo('foo4') } }`,
"/bar1.js": `export default class extends x { bar1 = async () => super.foo('bar1') }`,
"/bar2.js": `export default class extends x { bar2 = async () => () => super.foo('bar2') }`,
"/bar3.js": `export default class extends x { bar3 = () => async () => super.foo('bar3') }`,
"/bar4.js": `export default class extends x { bar4 = async () => async () => super.foo('bar4') }`,
"/baz1.js": `export default class extends x { async baz1() { return () => super.foo('baz1') } }`,
"/baz2.js": `export default class extends x { async baz2() { return () => () => super.foo('baz2') } }`,
"/outer.js": `
// Helper functions for "super" shouldn't be inserted into this outer function
export default (async function () {
class y extends z {
foo = async () => super.foo()
}
await new y().foo()()
})()
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
UnsupportedJSFeatures: es(2016),
AbsOutputFile: "/out.js",
},
})
}
func TestLowerAsyncArrowSuperSetterES2016(t *testing.T) {
lower_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export { default as foo1 } from "./foo1"
export { default as foo2 } from "./foo2"
export { default as foo3 } from "./foo3"
export { default as foo4 } from "./foo4"
export { default as bar1 } from "./bar1"
export { default as bar2 } from "./bar2"
export { default as bar3 } from "./bar3"
export { default as bar4 } from "./bar4"
export { default as baz1 } from "./baz1"
export { default as baz2 } from "./baz2"
import "./outer"
`,
"/foo1.js": `export default class extends x { foo1() { return async () => super.foo = 'foo1' } }`,
"/foo2.js": `export default class extends x { foo2() { return async () => () => super.foo = 'foo2' } }`,
"/foo3.js": `export default class extends x { foo3() { return () => async () => super.foo = 'foo3' } }`,
"/foo4.js": `export default class extends x { foo4() { return async () => async () => super.foo = 'foo4' } }`,
"/bar1.js": `export default class extends x { bar1 = async () => super.foo = 'bar1' }`,
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_test.go | internal/bundler_tests/bundler_test.go | package bundler_tests
// Bundling test results are stored in snapshot files, located in the
// "snapshots" directory. This allows test results to be updated easily without
// manually rewriting all of the expected values. To update the tests run
// "UPDATE_SNAPSHOTS=1 make test" and commit the updated values. Make sure to
// inspect the diff to ensure the expected values are valid.
import (
"fmt"
"io/ioutil"
"os"
"path"
"sort"
"strings"
"sync"
"testing"
"github.com/evanw/esbuild/internal/bundler"
"github.com/evanw/esbuild/internal/cache"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/linker"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/test"
)
func es(version int) compat.JSFeature {
return compat.UnsupportedJSFeatures(map[compat.Engine]compat.Semver{
compat.ES: {Parts: []int{version}},
})
}
func assertLog(t *testing.T, msgs []logger.Msg, expected string) {
t.Helper()
var text strings.Builder
for _, msg := range msgs {
text.WriteString(msg.String(logger.OutputOptions{}, logger.TerminalInfo{}))
}
test.AssertEqualWithDiff(t, text.String(), expected)
}
func hasErrors(msgs []logger.Msg) bool {
for _, msg := range msgs {
if msg.Kind == logger.Error {
return true
}
}
return false
}
type bundled struct {
files map[string]string
entryPaths []string
entryPathsAdvanced []bundler.EntryPoint
expectedScanLog string
expectedCompileLog string
options config.Options
debugLogs bool
absWorkingDir string
}
type suite struct {
expectedSnapshots map[string]string
generatedSnapshots map[string]string
name string
path string
mutex sync.Mutex
}
func (s *suite) expectBundled(t *testing.T, args bundled) {
t.Helper()
s.__expectBundledImpl(t, args, fs.MockUnix)
// Handle conversion to Windows-style paths
{
files := make(map[string]string)
for k, v := range args.files {
files[unix2win(k)] = v
}
args.files = files
args.entryPaths = append([]string{}, args.entryPaths...)
for i, entry := range args.entryPaths {
args.entryPaths[i] = unix2win(entry)
}
args.absWorkingDir = unix2win(args.absWorkingDir)
args.options.InjectPaths = append([]string{}, args.options.InjectPaths...)
for i, absPath := range args.options.InjectPaths {
args.options.InjectPaths[i] = unix2win(absPath)
}
aliases := make(map[string]string)
for k, v := range args.options.PackageAliases {
if strings.HasPrefix(v, "/") {
v = unix2win(v)
}
aliases[k] = v
}
args.options.PackageAliases = aliases
replace := make(map[string]bool)
for k, v := range args.options.ExternalSettings.PostResolve.Exact {
replace[unix2win(k)] = v
}
args.options.ExternalSettings.PostResolve.Exact = replace
args.options.AbsOutputFile = unix2win(args.options.AbsOutputFile)
args.options.AbsOutputBase = unix2win(args.options.AbsOutputBase)
args.options.AbsOutputDir = unix2win(args.options.AbsOutputDir)
args.options.TSConfigPath = unix2win(args.options.TSConfigPath)
}
s.__expectBundledImpl(t, args, fs.MockWindows)
}
func (s *suite) expectBundledUnix(t *testing.T, args bundled) {
t.Helper()
s.__expectBundledImpl(t, args, fs.MockUnix)
}
func (s *suite) expectBundledWindows(t *testing.T, args bundled) {
t.Helper()
s.__expectBundledImpl(t, args, fs.MockWindows)
}
// Don't call this directly. Call the helpers above instead.
func (s *suite) __expectBundledImpl(t *testing.T, args bundled, fsKind fs.MockKind) {
t.Helper()
testName := t.Name()
subName := "Unix"
if fsKind == fs.MockWindows {
subName = "Windows"
}
t.Run(subName, func(t *testing.T) {
t.Helper()
// Prepare the options
if args.options.ExtensionOrder == nil {
args.options.ExtensionOrder = []string{".tsx", ".ts", ".jsx", ".js", ".css", ".json"}
}
if args.options.AbsOutputFile != "" {
if fsKind == fs.MockWindows {
args.options.AbsOutputDir = unix2win(path.Dir(win2unix(args.options.AbsOutputFile)))
} else {
args.options.AbsOutputDir = path.Dir(args.options.AbsOutputFile)
}
}
if args.options.Mode == config.ModeBundle || (args.options.Mode == config.ModeConvertFormat && args.options.OutputFormat == config.FormatIIFE) {
// Apply this default to all tests since it was not configurable when the tests were written
args.options.TreeShaking = true
}
if args.options.Mode == config.ModeBundle && args.options.OutputFormat == config.FormatPreserve {
// The format can't be "preserve" while bundling
args.options.OutputFormat = config.FormatESModule
}
logKind := logger.DeferLogNoVerboseOrDebug
if args.debugLogs {
logKind = logger.DeferLogAll
}
entryPoints := make([]bundler.EntryPoint, 0, len(args.entryPaths)+len(args.entryPathsAdvanced))
for _, path := range args.entryPaths {
entryPoints = append(entryPoints, bundler.EntryPoint{InputPath: path})
}
entryPoints = append(entryPoints, args.entryPathsAdvanced...)
if args.absWorkingDir == "" {
if fsKind == fs.MockWindows {
args.absWorkingDir = "C:\\"
} else {
args.absWorkingDir = "/"
}
}
if args.options.AbsOutputDir == "" {
args.options.AbsOutputDir = args.absWorkingDir // Match the behavior of the API in this case
}
// Run the bundler
log := logger.NewDeferLog(logKind, nil)
caches := cache.MakeCacheSet()
mockFS := fs.MockFS(args.files, fsKind, args.absWorkingDir)
args.options.OmitRuntimeForTests = true
bundle := bundler.ScanBundle(config.BuildCall, log, mockFS, caches, entryPoints, args.options, nil)
msgs := log.Done()
assertLog(t, msgs, args.expectedScanLog)
// Stop now if there were any errors during the scan
if hasErrors(msgs) {
return
}
log = logger.NewDeferLog(logKind, nil)
results, metafileJSON := bundle.Compile(log, nil, nil, linker.Link)
msgs = log.Done()
assertLog(t, msgs, args.expectedCompileLog)
// Stop now if there were any errors during the compile
if hasErrors(msgs) {
return
}
// Don't include source maps in results since they are just noise. Source
// map validity is tested separately in a test that uses Mozilla's source
// map parsing library.
generated := ""
for _, result := range results {
if generated != "" {
generated += "\n"
}
if fsKind == fs.MockWindows {
result.AbsPath = win2unix(result.AbsPath)
}
generated += fmt.Sprintf("---------- %s ----------\n%s", result.AbsPath, string(result.Contents))
}
if metafileJSON != "" {
generated += fmt.Sprintf("---------- metafile.json ----------\n%s", metafileJSON)
}
s.compareSnapshot(t, testName, generated)
})
}
const snapshotsDir = "snapshots"
const snapshotSplitter = "\n================================================================================\n"
var globalTestMutex sync.Mutex
var globalSuites map[*suite]bool
var globalUpdateSnapshots bool
func (s *suite) compareSnapshot(t *testing.T, testName string, generated string) {
t.Helper()
// Initialize the test suite during the first test
s.mutex.Lock()
defer s.mutex.Unlock()
if s.path == "" {
s.path = snapshotsDir + "/snapshots_" + s.name + ".txt"
s.generatedSnapshots = make(map[string]string)
s.expectedSnapshots = make(map[string]string)
if contents, err := ioutil.ReadFile(s.path); err == nil {
// Replacing CRLF with LF is necessary to fix tests in GitHub actions,
// which for some reason check out the source code in CLRF mode
for _, part := range strings.Split(strings.ReplaceAll(string(contents), "\r\n", "\n"), snapshotSplitter) {
if newline := strings.IndexByte(part, '\n'); newline != -1 {
key := part[:newline]
value := part[newline+1:]
s.expectedSnapshots[key] = value
} else {
s.expectedSnapshots[part] = ""
}
}
}
globalTestMutex.Lock()
defer globalTestMutex.Unlock()
if globalSuites == nil {
globalSuites = make(map[*suite]bool)
}
globalSuites[s] = true
_, globalUpdateSnapshots = os.LookupEnv("UPDATE_SNAPSHOTS")
}
// Check against the stored snapshot if present
s.generatedSnapshots[testName] = generated
if !globalUpdateSnapshots {
if expected, ok := s.expectedSnapshots[testName]; ok {
test.AssertEqualWithDiff(t, generated, expected)
} else {
t.Fatalf("No snapshot saved for %s\n%s%s%s",
testName,
logger.TerminalColors.Green,
generated,
logger.TerminalColors.Reset,
)
}
}
}
func (s *suite) updateSnapshots() {
os.Mkdir(snapshotsDir, 0755)
keys := make([]string, 0, len(s.generatedSnapshots))
for key := range s.generatedSnapshots {
keys = append(keys, key)
}
sort.Strings(keys)
var contents strings.Builder
for i, key := range keys {
if i > 0 {
contents.WriteString(snapshotSplitter)
}
contents.WriteString(fmt.Sprintf("%s\n%s", key, s.generatedSnapshots[key]))
}
if err := ioutil.WriteFile(s.path, []byte(contents.String()), 0644); err != nil {
panic(err)
}
}
func (s *suite) validateSnapshots() bool {
isValid := true
for key := range s.expectedSnapshots {
if _, ok := s.generatedSnapshots[key]; !ok {
if isValid {
fmt.Printf("%s\n", s.path)
}
fmt.Printf(" No test found for snapshot %s\n", key)
isValid = false
}
}
return isValid
}
func TestMain(m *testing.M) {
code := m.Run()
if globalSuites != nil {
if globalUpdateSnapshots {
for s := range globalSuites {
s.updateSnapshots()
}
} else {
for s := range globalSuites {
if !s.validateSnapshots() {
code = 1
}
}
}
}
os.Exit(code)
}
func win2unix(p string) string {
if strings.HasPrefix(p, "C:\\") {
p = p[2:]
}
p = strings.ReplaceAll(p, "\\", "/")
return p
}
func unix2win(p string) string {
p = strings.ReplaceAll(p, "/", "\\")
if strings.HasPrefix(p, "\\") {
p = "C:" + p
}
return p
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_default_test.go | internal/bundler_tests/bundler_default_test.go | package bundler_tests
import (
"regexp"
"strings"
"testing"
"github.com/evanw/esbuild/internal/bundler"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/logger"
)
var default_suite = suite{
name: "default",
}
func TestSimpleES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {fn} from './foo'
console.log(fn())
`,
"/foo.js": `
export function fn() {
return 123
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestSimpleCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const fn = require('./foo')
console.log(fn())
`,
"/foo.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// This test makes sure that require() calls are still recognized in nested
// scopes. It guards against bugs where require() calls are only recognized in
// the top-level module scope.
func TestNestedCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
function nestedScope() {
const fn = require('./foo')
console.log(fn())
}
nestedScope()
`,
"/foo.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// This test makes sure that NewExpressions containing require() calls aren't
// broken.
func TestNewExpressionCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
new (require("./foo.js")).Foo();
`,
"/foo.js": `
class Foo {}
module.exports = {Foo};
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestCommonJSFromES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const {foo} = require('./foo')
console.log(foo(), bar())
const {bar} = require('./bar') // This should not be hoisted
`,
"/foo.js": `
export function foo() {
return 'foo'
}
`,
"/bar.js": `
export function bar() {
return 'bar'
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestES6FromCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {foo} from './foo'
console.log(foo(), bar())
import {bar} from './bar' // This should be hoisted
`,
"/foo.js": `
exports.foo = function() {
return 'foo'
}
`,
"/bar.js": `
exports.bar = function() {
return 'bar'
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// This test makes sure that ES6 imports are still recognized in nested
// scopes. It guards against bugs where require() calls are only recognized in
// the top-level module scope.
func TestNestedES6FromCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {fn} from './foo'
(() => {
console.log(fn())
})()
`,
"/foo.js": `
exports.fn = function() {
return 123
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestExportFormsES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export default 123
export var v = 234
export let l = 234
export const c = 234
export {Class as C}
export function Fn() {}
export class Class {}
export * from './a'
export * as b from './b'
`,
"/a.js": "export const abc = undefined",
"/b.js": "export const xyz = null",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
})
}
func TestExportFormsIIFE(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export default 123
export var v = 234
export let l = 234
export const c = 234
export {Class as C}
export function Fn() {}
export class Class {}
export * from './a'
export * as b from './b'
`,
"/a.js": "export const abc = undefined",
"/b.js": "export const xyz = null",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
GlobalName: []string{"globalName"},
AbsOutputFile: "/out.js",
},
})
}
func TestExportFormsWithMinifyIdentifiersAndNoBundle(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
export default 123
export var varName = 234
export let letName = 234
export const constName = 234
function Func2() {}
class Class2 {}
export {Class as Cls, Func2 as Fn2, Class2 as Cls2}
export function Func() {}
export class Class {}
export * from './a'
export * as fromB from './b'
`,
"/b.js": "export default function() {}",
"/c.js": "export default function foo() {}",
"/d.js": "export default class {}",
"/e.js": "export default class Foo {}",
},
entryPaths: []string{
"/a.js",
"/b.js",
"/c.js",
"/d.js",
"/e.js",
},
options: config.Options{
MinifyIdentifiers: true,
AbsOutputDir: "/out",
},
})
}
func TestImportFormsWithNoBundle(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import 'foo'
import {} from 'foo'
import * as ns from 'foo'
import {a, b as c} from 'foo'
import def from 'foo'
import def2, * as ns2 from 'foo'
import def3, {a2, b as c3} from 'foo'
const imp = [
import('foo'),
function nested() { return import('foo') },
]
console.log(ns, a, c, def, def2, ns2, def3, a2, c3, imp)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
AbsOutputFile: "/out.js",
},
})
}
func TestImportFormsWithMinifyIdentifiersAndNoBundle(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import 'foo'
import {} from 'foo'
import * as ns from 'foo'
import {a, b as c} from 'foo'
import def from 'foo'
import def2, * as ns2 from 'foo'
import def3, {a2, b as c3} from 'foo'
const imp = [
import('foo'),
function() { return import('foo') },
]
console.log(ns, a, c, def, def2, ns2, def3, a2, c3, imp)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
MinifyIdentifiers: true,
AbsOutputFile: "/out.js",
},
})
}
func TestExportFormsCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
require('./commonjs')
require('./c')
require('./d')
require('./e')
require('./f')
require('./g')
require('./h')
`,
"/commonjs.js": `
export default 123
export var v = 234
export let l = 234
export const c = 234
export {Class as C}
export function Fn() {}
export class Class {}
export * from './a'
export * as b from './b'
`,
"/a.js": "export const abc = undefined",
"/b.js": "export const xyz = null",
"/c.js": "export default class {}",
"/d.js": "export default class Foo {} Foo.prop = 123",
"/e.js": "export default function() {}",
"/f.js": "export default function foo() {} foo.prop = 123",
"/g.js": "export default async function() {}",
"/h.js": "export default async function foo() {} foo.prop = 123",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestExportChain(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {b as a} from './foo'
`,
"/foo.js": `
export {c as b} from './bar'
`,
"/bar.js": `
export const c = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestExportInfiniteCycle1(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {a as b} from './entry'
export {b as c} from './entry'
export {c as d} from './entry'
export {d as a} from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: Detected cycle while resolving import "a"
entry.js: ERROR: Detected cycle while resolving import "b"
entry.js: ERROR: Detected cycle while resolving import "c"
entry.js: ERROR: Detected cycle while resolving import "d"
`,
})
}
func TestExportInfiniteCycle2(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export {a as b} from './foo'
export {c as d} from './foo'
`,
"/foo.js": `
export {b as c} from './entry'
export {d as a} from './entry'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: Detected cycle while resolving import "a"
entry.js: ERROR: Detected cycle while resolving import "c"
foo.js: ERROR: Detected cycle while resolving import "b"
foo.js: ERROR: Detected cycle while resolving import "d"
`,
})
}
func TestJSXImportsCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import {elem, frag} from './custom-react'
console.log(<div/>, <>fragment</>)
`,
"/custom-react.js": `
module.exports = {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
JSX: config.JSXOptions{
Factory: config.DefineExpr{Parts: []string{"elem"}},
Fragment: config.DefineExpr{Parts: []string{"frag"}},
},
AbsOutputFile: "/out.js",
},
})
}
func TestJSXImportsES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import {elem, frag} from './custom-react'
console.log(<div/>, <>fragment</>)
`,
"/custom-react.js": `
export function elem() {}
export function frag() {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
JSX: config.JSXOptions{
Factory: config.DefineExpr{Parts: []string{"elem"}},
Fragment: config.DefineExpr{Parts: []string{"frag"}},
},
AbsOutputFile: "/out.js",
},
})
}
func TestJSXSyntaxInJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(<div/>)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: The JSX syntax extension is not currently enabled
NOTE: The esbuild loader for this file is currently set to "js" but it must be set to "jsx" to be able to parse JSX syntax. ` +
`You can use 'Loader: map[string]api.Loader{".js": api.LoaderJSX}' to do that.
`,
})
}
func TestJSXConstantFragments(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import './default'
import './null'
import './boolean'
import './number'
import './string-single-empty'
import './string-double-empty'
import './string-single-punctuation'
import './string-double-punctuation'
import './string-template'
`,
"/default.jsx": `console.log(<></>)`,
"/null.jsx": `console.log(<></>) // @jsxFrag null`,
"/boolean.jsx": `console.log(<></>) // @jsxFrag true`,
"/number.jsx": `console.log(<></>) // @jsxFrag 123`,
"/string-single-empty.jsx": `console.log(<></>) // @jsxFrag ''`,
"/string-double-empty.jsx": `console.log(<></>) // @jsxFrag ""`,
"/string-single-punctuation.jsx": `console.log(<></>) // @jsxFrag '['`,
"/string-double-punctuation.jsx": `console.log(<></>) // @jsxFrag "["`,
"/string-template.jsx": `console.log(<></>) // @jsxFrag ` + "``",
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
JSX: config.JSXOptions{
Fragment: config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("]")},
},
},
},
expectedScanLog: `string-template.jsx: WARNING: Invalid JSX fragment: ` + "``" + `
`,
})
}
func TestJSXAutomaticImportsCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import {jsx, Fragment} from './custom-react'
console.log(<div jsx={jsx}/>, <><Fragment/></>)
`,
"/custom-react.js": `
module.exports = {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
JSX: config.JSXOptions{
AutomaticRuntime: true,
},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-runtime": true,
}},
},
AbsOutputFile: "/out.js",
},
})
}
func TestJSXAutomaticImportsES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.jsx": `
import {jsx, Fragment} from './custom-react'
console.log(<div jsx={jsx}/>, <><Fragment/></>)
`,
"/custom-react.js": `
export function jsx() {}
export function Fragment() {}
`,
},
entryPaths: []string{"/entry.jsx"},
options: config.Options{
Mode: config.ModeBundle,
JSX: config.JSXOptions{
AutomaticRuntime: true,
},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-runtime": true,
}},
},
AbsOutputFile: "/out.js",
},
})
}
func TestJSXAutomaticSyntaxInJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(<div/>)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
JSX: config.JSXOptions{
AutomaticRuntime: true,
},
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"react/jsx-runtime": true,
}},
},
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: The JSX syntax extension is not currently enabled
NOTE: The esbuild loader for this file is currently set to "js" but it must be set to "jsx" to be able to parse JSX syntax. ` +
`You can use 'Loader: map[string]api.Loader{".js": api.LoaderJSX}' to do that.
`,
})
}
func TestNodeModules(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestRequireChildDirCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
console.log(require('./dir'))
`,
"/Users/user/project/src/dir/index.js": `
module.exports = 123
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireChildDirES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import value from './dir'
console.log(value)
`,
"/Users/user/project/src/dir/index.js": `
export default 123
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireParentDirCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/dir/entry.js": `
console.log(require('..'))
`,
"/Users/user/project/src/index.js": `
module.exports = 123
`,
},
entryPaths: []string{"/Users/user/project/src/dir/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireParentDirES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/dir/entry.js": `
import value from '..'
console.log(value)
`,
"/Users/user/project/src/index.js": `
export default 123
`,
},
entryPaths: []string{"/Users/user/project/src/dir/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportMissingES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import fn, {x as a, y as b} from './foo'
console.log(fn(a, b))
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: No matching export in "foo.js" for import "default"
entry.js: ERROR: No matching export in "foo.js" for import "y"
`,
})
}
func TestImportMissingUnusedES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import fn, {x as a, y as b} from './foo'
`,
"/foo.js": `
export const x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: ERROR: No matching export in "foo.js" for import "default"
entry.js: ERROR: No matching export in "foo.js" for import "y"
`,
})
}
func TestImportMissingCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import fn, {x as a, y as b} from './foo'
console.log(fn(a, b))
`,
"/foo.js": `
exports.x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportMissingNeitherES6NorCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/named.js": `
import fn, {x as a, y as b} from './foo'
console.log(fn(a, b))
`,
"/star.js": `
import * as ns from './foo'
console.log(ns.default(ns.x, ns.y))
`,
"/star-capture.js": `
import * as ns from './foo'
console.log(ns)
`,
"/bare.js": `
import './foo'
`,
"/require.js": `
console.log(require('./foo'))
`,
"/import.js": `
console.log(import('./foo'))
`,
"/foo.js": `
console.log('no exports here')
`,
},
entryPaths: []string{
"/named.js",
"/star.js",
"/star-capture.js",
"/bare.js",
"/require.js",
"/import.js",
},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
},
expectedCompileLog: `named.js: WARNING: Import "x" will always be undefined because the file "foo.js" has no exports
named.js: WARNING: Import "y" will always be undefined because the file "foo.js" has no exports
star.js: WARNING: Import "x" will always be undefined because the file "foo.js" has no exports
star.js: WARNING: Import "y" will always be undefined because the file "foo.js" has no exports
`,
})
}
func TestExportMissingES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import * as ns from './foo'
console.log(ns)
`,
"/foo.js": `
export {buton} from './bar'
`,
"/bar.js": `
export const button = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedCompileLog: `foo.js: ERROR: No matching export in "bar.js" for import "buton"
bar.js: NOTE: Did you mean to import "button" instead?
`,
})
}
func TestDotImport(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import {x} from '.'
console.log(x)
`,
"/index.js": `
exports.x = 123
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireWithTemplate(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
console.log(require('./b'))
console.log(require(` + "`./b`" + `))
`,
"/b.js": `
exports.x = 123
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestDynamicImportWithTemplateIIFE(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import('./b').then(ns => console.log(ns))
import(` + "`./b`" + `).then(ns => console.log(ns))
`,
"/b.js": `
exports.x = 123
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireAndDynamicImportInvalidTemplate(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
require(tag` + "`./b`" + `)
require(` + "`./${b}`" + `)
try {
require(tag` + "`./b`" + `)
require(` + "`./${b}`" + `)
} catch {
}
(async () => {
import(tag` + "`./b`" + `)
import(` + "`./${b}`" + `)
await import(tag` + "`./b`" + `)
await import(` + "`./${b}`" + `)
try {
import(tag` + "`./b`" + `)
import(` + "`./${b}`" + `)
await import(tag` + "`./b`" + `)
await import(` + "`./${b}`" + `)
} catch {
}
})()
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestDynamicImportWithExpressionCJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import('foo')
import(foo())
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
func TestMinifiedDynamicImportWithExpressionCJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import('foo')
import(foo())
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
MinifyWhitespace: true,
},
})
}
func TestConditionalRequireResolve(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
require.resolve(x ? 'a' : y ? 'b' : 'c')
require.resolve(x ? y ? 'a' : 'b' : c)
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformNode,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"a": true,
"b": true,
"c": true,
}},
},
},
})
}
func TestConditionalRequire(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
require(x ? 'a' : y ? './b' : 'c')
require(x ? y ? 'a' : './b' : c)
`,
"/b.js": `
exports.foo = 213
`,
},
entryPaths: []string{"/a.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"a": true,
"c": true,
}},
},
},
})
}
func TestConditionalImport(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/a.js": `
import(x ? 'a' : y ? './import' : 'c')
`,
"/b.js": `
import(x ? y ? 'a' : './import' : c)
`,
"/import.js": `
exports.foo = 213
`,
},
entryPaths: []string{"/a.js", "/b.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{Exact: map[string]bool{
"a": true,
"c": true,
}},
},
},
})
}
func TestRequireBadArgumentCount(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
require()
require("a", "b")
try {
require()
require("a", "b")
} catch {
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireJson(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.json'))
`,
"/test.json": `
{
"a": true,
"b": 123,
"c": [null]
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireTxt(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.txt'))
`,
"/test.txt": `This is a test.`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireBadExtension(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log(require('./test.bad'))
`,
"/test.bad": `This is a test.`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: No loader is configured for ".bad" files: test.bad
`,
})
}
func TestFalseRequire(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
(require => require('/test.txt'))()
`,
"/test.txt": `This is a test.`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequireWithoutCall(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
const req = require
req('./entry')
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestNestedRequireWithoutCall(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
(() => {
const req = require
req('./entry')
})()
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// Test a workaround for the "debug" library
func TestRequireWithCallInsideTry(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
try {
const supportsColor = require('supports-color');
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [];
}
} catch (error) {
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
// Test a workaround for the "moment" library
func TestRequireWithoutCallInsideTry(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
try {
oldLocale = globalLocale._abbr;
var aliasedRequire = require;
aliasedRequire('./locale/' + name);
getSetGlobalLocale(oldLocale);
} catch (e) {}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRequirePropertyAccessCommonJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
// These shouldn't warn since the format is CommonJS
console.log(Object.keys(require.cache))
console.log(Object.keys(require.extensions))
delete require.cache['fs']
delete require.extensions['.json']
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformNode,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
})
}
// Test a workaround for code using "await import()"
func TestAwaitImportInsideTry(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
async function main(name) {
try {
return await import(name)
} catch {
}
}
main('fs')
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestImportInsideTry(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
let x
try {
x = import('nope1')
x = await import('nope2')
} catch {
}
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Could not resolve "nope1"
NOTE: You can mark the path "nope1" as external to exclude it from the bundle, which will remove this error and leave the unresolved path in the bundle. You can also add ".catch()" here to handle this failure at run-time instead of bundle-time.
`,
})
}
// Test a workaround for code using "import().catch()"
func TestImportThenCatch(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import(name).then(pass, fail)
import(name).then(pass).catch(fail)
import(name).catch(fail)
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestSourceMap(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {bar} from './bar'
import data from './data.txt'
function foo() { bar() }
foo()
console.log(data)
`,
"/Users/user/project/src/bar.js": `
export function bar() { throw new Error('test') }
`,
// Someone wanted data from the text loader to show up in the source map: https://github.com/evanw/esbuild/issues/2041
"/Users/user/project/src/data.txt": `#2041`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
SourceMap: config.SourceMapLinkedWithComment,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
// This test covers a bug where a "var" in a nested scope did not correctly
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_packagejson_test.go | internal/bundler_tests/bundler_packagejson_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var packagejson_suite = suite{
name: "packagejson",
}
func TestPackageJsonMain(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./custom-main.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/custom-main.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBadMain(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./does-not-exist.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonSyntaxErrorComment(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
// Single-line comment
"a": 1
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
expectedScanLog: `Users/user/project/node_modules/demo-pkg/package.json: ERROR: JSON does not support comments
`,
})
}
func TestPackageJsonSyntaxErrorTrailingComma(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"a": 1,
"b": 2,
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
expectedScanLog: `Users/user/project/node_modules/demo-pkg/package.json: ERROR: JSON does not support trailing commas
`,
})
}
func TestPackageJsonModule(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.esm.js": `
export default function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserString(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": "./browser"
}
`,
"/Users/user/project/node_modules/demo-pkg/browser.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapRelativeToRelative(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main",
"browser": {
"./main.js": "./main-browser",
"./lib/util.js": "./lib/util-browser"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
const util = require('./lib/util')
module.exports = function() {
return ['main', util]
}
`,
"/Users/user/project/node_modules/demo-pkg/main-browser.js": `
const util = require('./lib/util')
module.exports = function() {
return ['main-browser', util]
}
`,
"/Users/user/project/node_modules/demo-pkg/lib/util.js": `
module.exports = 'util'
`,
"/Users/user/project/node_modules/demo-pkg/lib/util-browser.js": `
module.exports = 'util-browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapRelativeToModule(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main",
"browser": {
"./util.js": "util-browser"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
const util = require('./util')
module.exports = function() {
return ['main', util]
}
`,
"/Users/user/project/node_modules/demo-pkg/util.js": `
module.exports = 'util'
`,
"/Users/user/project/node_modules/util-browser/index.js": `
module.exports = 'util-browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapRelativeDisabled(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main",
"browser": {
"./util-node.js": false
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
const util = require('./util-node')
module.exports = function(obj) {
return util.inspect(obj)
}
`,
"/Users/user/project/node_modules/demo-pkg/util-node.js": `
module.exports = require('util')
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapModuleToRelative(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"node-pkg": "./node-pkg-browser"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/node-pkg-browser.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
const fn = require('node-pkg')
module.exports = function() {
return fn()
}
`,
"/Users/user/project/node_modules/node-pkg/index.js": `
module.exports = function() {
return 234
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapModuleToModule(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"node-pkg": "node-pkg-browser"
}
}
`,
"/Users/user/project/node_modules/node-pkg-browser/index.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
const fn = require('node-pkg')
module.exports = function() {
return fn()
}
`,
"/Users/user/project/node_modules/node-pkg/index.js": `
module.exports = function() {
return 234
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapModuleDisabled(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"node-pkg": false
}
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
const fn = require('node-pkg')
module.exports = function() {
return fn()
}
`,
"/Users/user/project/node_modules/node-pkg/index.js": `
module.exports = function() {
return 234
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapNativeModuleDisabled(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"fs": false
}
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
const fs = require('fs')
module.exports = function() {
return fs.readFile()
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserMapAvoidMissing(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import 'component-classes'
`,
"/Users/user/project/node_modules/component-classes/package.json": `
{
"browser": {
"indexof": "component-indexof"
}
}
`,
"/Users/user/project/node_modules/component-classes/index.js": `
try {
var index = require('indexof');
} catch (err) {
var index = require('component-indexof');
}
`,
"/Users/user/project/node_modules/component-indexof/index.js": `
module.exports = function() {
return 234
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserOverModuleBrowser(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js",
"browser": "./main.browser.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.esm.js": `
export default function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformBrowser,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserOverMainNode(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js",
"browser": "./main.browser.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.esm.js": `
export default function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.js": `
module.exports = function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformNode,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserWithModuleBrowser(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js",
"browser": {
"./main.js": "./main.browser.js",
"./main.esm.js": "./main.browser.esm.js"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.esm.js": `
export default function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.esm.js": `
export default function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformBrowser,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserWithMainNode(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js",
"browser": {
"./main.js": "./main.browser.js",
"./main.esm.js": "./main.browser.esm.js"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.esm.js": `
export default function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.js": `
module.exports = function() {
return 123
}
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.esm.js": `
export default function() {
return 123
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
Platform: config.PlatformNode,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserNodeModulesNoExt(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {browser as a} from 'demo-pkg/no-ext'
import {node as b} from 'demo-pkg/no-ext.js'
import {browser as c} from 'demo-pkg/ext'
import {browser as d} from 'demo-pkg/ext.js'
console.log(a)
console.log(b)
console.log(c)
console.log(d)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"./no-ext": "./no-ext-browser.js",
"./ext.js": "./ext-browser.js"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/no-ext.js": `
export let node = 'node'
`,
"/Users/user/project/node_modules/demo-pkg/no-ext-browser.js": `
export let browser = 'browser'
`,
"/Users/user/project/node_modules/demo-pkg/ext.js": `
export let node = 'node'
`,
"/Users/user/project/node_modules/demo-pkg/ext-browser.js": `
export let browser = 'browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserNodeModulesIndexNoExt(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {browser as a} from 'demo-pkg/no-ext'
import {node as b} from 'demo-pkg/no-ext/index.js'
import {browser as c} from 'demo-pkg/ext'
import {browser as d} from 'demo-pkg/ext/index.js'
console.log(a)
console.log(b)
console.log(c)
console.log(d)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"browser": {
"./no-ext": "./no-ext-browser/index.js",
"./ext/index.js": "./ext-browser/index.js"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/no-ext/index.js": `
export let node = 'node'
`,
"/Users/user/project/node_modules/demo-pkg/no-ext-browser/index.js": `
export let browser = 'browser'
`,
"/Users/user/project/node_modules/demo-pkg/ext/index.js": `
export let node = 'node'
`,
"/Users/user/project/node_modules/demo-pkg/ext-browser/index.js": `
export let browser = 'browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserNoExt(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {browser as a} from './demo-pkg/no-ext'
import {node as b} from './demo-pkg/no-ext.js'
import {browser as c} from './demo-pkg/ext'
import {browser as d} from './demo-pkg/ext.js'
console.log(a)
console.log(b)
console.log(c)
console.log(d)
`,
"/Users/user/project/src/demo-pkg/package.json": `
{
"browser": {
"./no-ext": "./no-ext-browser.js",
"./ext.js": "./ext-browser.js"
}
}
`,
"/Users/user/project/src/demo-pkg/no-ext.js": `
export let node = 'node'
`,
"/Users/user/project/src/demo-pkg/no-ext-browser.js": `
export let browser = 'browser'
`,
"/Users/user/project/src/demo-pkg/ext.js": `
export let node = 'node'
`,
"/Users/user/project/src/demo-pkg/ext-browser.js": `
export let browser = 'browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserIndexNoExt(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {browser as a} from './demo-pkg/no-ext'
import {node as b} from './demo-pkg/no-ext/index.js'
import {browser as c} from './demo-pkg/ext'
import {browser as d} from './demo-pkg/ext/index.js'
console.log(a)
console.log(b)
console.log(c)
console.log(d)
`,
"/Users/user/project/src/demo-pkg/package.json": `
{
"browser": {
"./no-ext": "./no-ext-browser/index.js",
"./ext/index.js": "./ext-browser/index.js"
}
}
`,
"/Users/user/project/src/demo-pkg/no-ext/index.js": `
export let node = 'node'
`,
"/Users/user/project/src/demo-pkg/no-ext-browser/index.js": `
export let browser = 'browser'
`,
"/Users/user/project/src/demo-pkg/ext/index.js": `
export let node = 'node'
`,
"/Users/user/project/src/demo-pkg/ext-browser/index.js": `
export let browser = 'browser'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
// See https://github.com/evanw/esbuild/issues/2002
func TestPackageJsonBrowserIssue2002A(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `require('pkg/sub')`,
"/Users/user/project/src/node_modules/pkg/package.json": `{
"browser": {
"./sub": "./sub/foo.js"
}
}`,
"/Users/user/project/src/node_modules/pkg/sub/foo.js": `require('sub')`,
"/Users/user/project/src/node_modules/sub/package.json": `{ "main": "./bar" }`,
"/Users/user/project/src/node_modules/sub/bar.js": `works()`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonBrowserIssue2002B(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `require('pkg/sub')`,
"/Users/user/project/src/node_modules/pkg/package.json": `{
"browser": {
"./sub": "./sub/foo.js",
"./sub/sub": "./sub/bar.js"
}
}`,
"/Users/user/project/src/node_modules/pkg/sub/foo.js": `require('sub')`,
"/Users/user/project/src/node_modules/pkg/sub/bar.js": `works()`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
// See https://github.com/evanw/esbuild/issues/2239
func TestPackageJsonBrowserIssue2002C(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `require('pkg/sub')`,
"/Users/user/project/src/node_modules/pkg/package.json": `{
"browser": {
"./sub": "./sub/foo.js",
"./sub/sub.js": "./sub/bar.js"
}
}`,
"/Users/user/project/src/node_modules/pkg/sub/foo.js": `require('sub')`,
"/Users/user/project/src/node_modules/sub/index.js": `works()`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportOnly(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardRequireOnly(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireSameFile(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import value from 'demo-pkg'
console.log(value, require('demo-pkg'))
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireSeparateFiles(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import './test-main'
import './test-module'
`,
"/Users/user/project/src/test-main.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/src/test-module.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireForceModuleBeforeMain(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import './test-main'
import './test-module'
`,
"/Users/user/project/src/test-main.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/src/test-module.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
MainFields: []string{"module", "main"},
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireImplicitMain(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import './test-index'
import './test-module'
`,
"/Users/user/project/src/test-index.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/src/test-module.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = 'index'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireImplicitMainForceModuleBeforeMain(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import './test-index'
import './test-module'
`,
"/Users/user/project/src/test-index.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/src/test-module.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"module": "./module.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
module.exports = 'index'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
MainFields: []string{"module", "main"},
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonDualPackageHazardImportAndRequireBrowser(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import './test-main'
import './test-module'
`,
"/Users/user/project/src/test-main.js": `
console.log(require('demo-pkg'))
`,
"/Users/user/project/src/test-module.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./module.js",
"browser": {
"./main.js": "./main.browser.js",
"./module.js": "./module.browser.js"
}
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = 'main'
`,
"/Users/user/project/node_modules/demo-pkg/module.js": `
export default 'module'
`,
"/Users/user/project/node_modules/demo-pkg/main.browser.js": `
module.exports = 'browser main'
`,
"/Users/user/project/node_modules/demo-pkg/module.browser.js": `
export default 'browser module'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonMainFieldsA(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"a": "./a.js",
"b": "./b.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/a.js": `
module.exports = 'a'
`,
"/Users/user/project/node_modules/demo-pkg/b.js": `
export default 'b'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
MainFields: []string{"a", "b"},
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonMainFieldsB(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import value from 'demo-pkg'
console.log(value)
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"a": "./a.js",
"b": "./b.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/a.js": `
module.exports = 'a'
`,
"/Users/user/project/node_modules/demo-pkg/b.js": `
export default 'b'
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
MainFields: []string{"b", "a"},
AbsOutputFile: "/Users/user/project/out.js",
},
})
}
func TestPackageJsonNeutralNoDefaultMainFields(t *testing.T) {
packagejson_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import fn from 'demo-pkg'
console.log(fn())
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "./main.js",
"module": "./main.esm.js"
}
`,
"/Users/user/project/node_modules/demo-pkg/main.js": `
module.exports = function() {
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_dce_test.go | internal/bundler_tests/bundler_dce_test.go | package bundler_tests
import (
"regexp"
"testing"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
)
var dce_suite = suite{
name: "dce",
}
func TestPackageJsonSideEffectsFalseKeepNamedImportES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log(foo)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseKeepNamedImportCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log(foo)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseKeepStarImportES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import * as ns from "demo-pkg"
console.log(ns)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseKeepStarImportCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import * as ns from "demo-pkg"
console.log(ns)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsTrueKeepES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": true
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsTrueKeepCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": true
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseKeepBareImportAndRequireES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
require('demo-pkg')
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `Users/user/project/src/entry.js: WARNING: Ignoring this import because "Users/user/project/node_modules/demo-pkg/index.js" was marked as having no side effects
Users/user/project/node_modules/demo-pkg/package.json: NOTE: "sideEffects" is false in the enclosing "package.json" file:
`,
})
}
func TestPackageJsonSideEffectsFalseKeepBareImportAndRequireCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
require('demo-pkg')
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `Users/user/project/src/entry.js: WARNING: Ignoring this import because "Users/user/project/node_modules/demo-pkg/index.js" was marked as having no side effects
Users/user/project/node_modules/demo-pkg/package.json: NOTE: "sideEffects" is false in the enclosing "package.json" file:
`,
})
}
func TestPackageJsonSideEffectsFalseRemoveBareImportES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `Users/user/project/src/entry.js: WARNING: Ignoring this import because "Users/user/project/node_modules/demo-pkg/index.js" was marked as having no side effects
Users/user/project/node_modules/demo-pkg/package.json: NOTE: "sideEffects" is false in the enclosing "package.json" file:
`,
})
}
func TestPackageJsonSideEffectsFalseRemoveBareImportCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `Users/user/project/src/entry.js: WARNING: Ignoring this import because "Users/user/project/node_modules/demo-pkg/index.js" was marked as having no side effects
Users/user/project/node_modules/demo-pkg/package.json: NOTE: "sideEffects" is false in the enclosing "package.json" file:
`,
})
}
func TestPackageJsonSideEffectsFalseRemoveNamedImportES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseRemoveNamedImportCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseRemoveStarImportES6(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import * as ns from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseRemoveStarImportCommonJS(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import * as ns from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
exports.foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayRemove(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": []
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayKeep(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": ["./index.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayKeepMainUseModule(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-main.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MainFields: []string{"module"},
},
})
}
func TestPackageJsonSideEffectsArrayKeepMainUseMain(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-main.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MainFields: []string{"main"},
},
})
}
func TestPackageJsonSideEffectsArrayKeepMainImplicitModule(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-main.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayKeepMainImplicitMain(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
import "./require-demo-pkg"
console.log('unused import')
`,
"/Users/user/project/src/require-demo-pkg.js": `
// This causes "index-main.js" to be selected
require('demo-pkg')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-main.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayKeepModuleUseModule(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-module.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MainFields: []string{"module"},
},
})
}
func TestPackageJsonSideEffectsArrayKeepModuleUseMain(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-module.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
MainFields: []string{"main"},
},
})
}
func TestPackageJsonSideEffectsArrayKeepModuleImplicitModule(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-module.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayKeepModuleImplicitMain(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
import "./require-demo-pkg"
console.log('unused import')
`,
"/Users/user/project/src/require-demo-pkg.js": `
// This causes "index-main.js" to be selected
require('demo-pkg')
`,
"/Users/user/project/node_modules/demo-pkg/index-main.js": `
export const foo = 123
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/index-module.js": `
export const foo = 123
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"main": "index-main.js",
"module": "index-module.js",
"sideEffects": ["./index-module.js"]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsArrayGlob(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg/keep/this/file"
import "demo-pkg/remove/this/file"
`,
"/Users/user/project/node_modules/demo-pkg/keep/this/file.js": `
console.log('this should be kept')
`,
"/Users/user/project/node_modules/demo-pkg/remove/this/file.js": `
console.log('TEST FAILED')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": [
"./ke?p/*/file.js",
"./remove/this/file.j",
"./re?ve/this/file.js"
]
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
expectedScanLog: `Users/user/project/src/entry.js: WARNING: Ignoring this import because "Users/user/project/node_modules/demo-pkg/remove/this/file.js" was marked as having no side effects
Users/user/project/node_modules/demo-pkg/package.json: NOTE: It was excluded from the "sideEffects" array in the enclosing "package.json" file:
`,
})
}
func TestPackageJsonSideEffectsNestedDirectoryRemove(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg/a/b/c"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
"/Users/user/project/node_modules/demo-pkg/a/b/c/index.js": `
export const foo = 123
console.log('hello')
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsKeepExportDefaultExpr(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import foo from "demo-pkg"
console.log(foo)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export default exprWithSideEffects()
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseNoWarningInNodeModulesIssue999(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import "demo-pkg"
console.log('used import')
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
import "demo-pkg2"
console.log('unused import')
`,
"/Users/user/project/node_modules/demo-pkg2/index.js": `
export const foo = 123
console.log('hello')
`,
"/Users/user/project/node_modules/demo-pkg2/package.json": `
{
"sideEffects": false
}
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseIntermediateFilesUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export {foo} from "./foo.js"
throw 'REMOVE THIS'
`,
"/Users/user/project/node_modules/demo-pkg/foo.js": `
export const foo = 123
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{ "sideEffects": false }
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseIntermediateFilesUsed(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "demo-pkg"
console.log(foo)
`,
"/Users/user/project/node_modules/demo-pkg/index.js": `
export {foo} from "./foo.js"
throw 'keep this'
`,
"/Users/user/project/node_modules/demo-pkg/foo.js": `
export const foo = 123
`,
"/Users/user/project/node_modules/demo-pkg/package.json": `
{ "sideEffects": false }
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseIntermediateFilesChainAll(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "a"
console.log(foo)
`,
"/Users/user/project/node_modules/a/index.js": `
export {foo} from "b"
`,
"/Users/user/project/node_modules/a/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/b/index.js": `
export {foo} from "c"
throw 'keep this'
`,
"/Users/user/project/node_modules/b/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/c/index.js": `
export {foo} from "d"
`,
"/Users/user/project/node_modules/c/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/d/index.js": `
export const foo = 123
`,
"/Users/user/project/node_modules/d/package.json": `
{ "sideEffects": false }
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseIntermediateFilesChainOne(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "a"
console.log(foo)
`,
"/Users/user/project/node_modules/a/index.js": `
export {foo} from "b"
`,
"/Users/user/project/node_modules/b/index.js": `
export {foo} from "c"
throw 'keep this'
`,
"/Users/user/project/node_modules/b/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/c/index.js": `
export {foo} from "d"
`,
"/Users/user/project/node_modules/d/index.js": `
export const foo = 123
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseIntermediateFilesDiamond(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import {foo} from "a"
console.log(foo)
`,
"/Users/user/project/node_modules/a/index.js": `
export * from "b1"
export * from "b2"
`,
"/Users/user/project/node_modules/b1/index.js": `
export {foo} from "c"
throw 'keep this 1'
`,
"/Users/user/project/node_modules/b1/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/b2/index.js": `
export {foo} from "c"
throw 'keep this 2'
`,
"/Users/user/project/node_modules/b2/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/c/index.js": `
export {foo} from "d"
`,
"/Users/user/project/node_modules/d/index.js": `
export const foo = 123
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseOneFork(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import("a").then(x => assert(x.foo === "foo"))
`,
"/Users/user/project/node_modules/a/index.js": `
export {foo} from "b"
`,
"/Users/user/project/node_modules/b/index.js": `
export {foo, bar} from "c"
export {baz} from "d"
`,
"/Users/user/project/node_modules/b/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/c/index.js": `
export let foo = "foo"
export let bar = "bar"
`,
"/Users/user/project/node_modules/d/index.js": `
export let baz = "baz"
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestPackageJsonSideEffectsFalseAllFork(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import("a").then(x => assert(x.foo === "foo"))
`,
"/Users/user/project/node_modules/a/index.js": `
export {foo} from "b"
`,
"/Users/user/project/node_modules/b/index.js": `
export {foo, bar} from "c"
export {baz} from "d"
`,
"/Users/user/project/node_modules/b/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/c/index.js": `
export let foo = "foo"
export let bar = "bar"
`,
"/Users/user/project/node_modules/c/package.json": `
{ "sideEffects": false }
`,
"/Users/user/project/node_modules/d/index.js": `
export let baz = "baz"
`,
"/Users/user/project/node_modules/d/package.json": `
{ "sideEffects": false }
`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestJSONLoaderRemoveUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import unused from "./example.json"
console.log('unused import')
`,
"/example.json": `{"data": true}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestTextLoaderRemoveUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import unused from "./example.txt"
console.log('unused import')
`,
"/example.txt": `some data`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestBase64LoaderRemoveUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import unused from "./example.data"
console.log('unused import')
`,
"/example.data": `some data`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".data": config.LoaderBase64,
},
},
})
}
func TestDataURLLoaderRemoveUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import unused from "./example.data"
console.log('unused import')
`,
"/example.data": `some data`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".data": config.LoaderDataURL,
},
},
})
}
func TestFileLoaderRemoveUnused(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import unused from "./example.data"
console.log('unused import')
`,
"/example.data": `some data`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".data": config.LoaderFile,
},
},
})
}
func TestRemoveUnusedImportMeta(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
function foo() {
console.log(import.meta.url, import.meta.path)
}
console.log('foo is unused')
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputFile: "/out.js",
},
})
}
func TestRemoveUnusedPureCommentCalls(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
function bar() {}
let bare = foo(bar);
let at_yes = /* @__PURE__ */ foo(bar);
let at_no = /* @__PURE__ */ foo(bar());
let new_at_yes = /* @__PURE__ */ new foo(bar);
let new_at_no = /* @__PURE__ */ new foo(bar());
let nospace_at_yes = /*@__PURE__*/ foo(bar);
let nospace_at_no = /*@__PURE__*/ foo(bar());
let nospace_new_at_yes = /*@__PURE__*/ new foo(bar);
let nospace_new_at_no = /*@__PURE__*/ new foo(bar());
let num_yes = /* #__PURE__ */ foo(bar);
let num_no = /* #__PURE__ */ foo(bar());
let new_num_yes = /* #__PURE__ */ new foo(bar);
let new_num_no = /* #__PURE__ */ new foo(bar());
let nospace_num_yes = /*#__PURE__*/ foo(bar);
let nospace_num_no = /*#__PURE__*/ foo(bar());
let nospace_new_num_yes = /*#__PURE__*/ new foo(bar);
let nospace_new_num_no = /*#__PURE__*/ new foo(bar());
let dot_yes = /* @__PURE__ */ foo(sideEffect()).dot(bar);
let dot_no = /* @__PURE__ */ foo(sideEffect()).dot(bar());
let new_dot_yes = /* @__PURE__ */ new foo(sideEffect()).dot(bar);
let new_dot_no = /* @__PURE__ */ new foo(sideEffect()).dot(bar());
let nested_yes = [1, /* @__PURE__ */ foo(bar), 2];
let nested_no = [1, /* @__PURE__ */ foo(bar()), 2];
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/bundler_tests/bundler_importphase_test.go | internal/bundler_tests/bundler_importphase_test.go | package bundler_tests
import (
"testing"
"github.com/evanw/esbuild/internal/config"
)
var importphase_suite = suite{
name: "importphase",
}
func TestImportDeferExternalESM(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
})
}
func TestImportDeferExternalCommonJS(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
expectedScanLog: `entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
`,
})
}
func TestImportDeferExternalIIFE(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
expectedScanLog: `entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
`,
})
}
func TestImportDeferInternalESM(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
entry.js: ERROR: Bundling with deferred imports is not supported unless they are external
`,
})
}
func TestImportDeferInternalCommonJS(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "cjs" output format is not supported
`,
})
}
func TestImportDeferInternalIIFE(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import defer * as foo0 from './foo.json'
import defer * as foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.defer('./foo.json'),
import.defer('./foo.json', { with: { type: 'json' } }),
import.defer(` + "`./${foo}.json`" + `),
import.defer(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
entry.js: ERROR: Bundling deferred imports with the "iife" output format is not supported
`,
})
}
func TestImportSourceExternalESM(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
})
}
func TestImportSourceExternalCommonJS(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
expectedScanLog: `entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
`,
})
}
func TestImportSourceExternalIIFE(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
ExternalSettings: config.ExternalSettings{
PreResolve: config.ExternalMatchers{
Patterns: []config.WildcardPattern{{Suffix: ".json"}},
},
},
},
expectedScanLog: `entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
`,
})
}
func TestImportSourceInternalESM(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
entry.js: ERROR: Bundling with source phase imports is not supported unless they are external
`,
})
}
func TestImportSourceInternalCommonJS(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatCommonJS,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "cjs" output format is not supported
`,
})
}
func TestImportSourceInternalIIFE(t *testing.T) {
importphase_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
import source foo0 from './foo.json'
import source foo1 from './foo.json' with { type: 'json' }
console.log(
foo0,
foo1,
import.source('./foo.json'),
import.source('./foo.json', { with: { type: 'json' } }),
import.source(` + "`./${foo}.json`" + `),
import.source(` + "`./${foo}.json`" + `, { with: { type: 'json' } }),
)
`,
"/foo.json": `{}`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
entry.js: ERROR: Bundling source phase imports with the "iife" output format is not supported
`,
})
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/js_printer/js_printer.go | internal/js_printer/js_printer.go | package js_printer
import (
"bytes"
"fmt"
"math"
"math/big"
"strconv"
"strings"
"unicode/utf8"
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/helpers"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/renamer"
"github.com/evanw/esbuild/internal/sourcemap"
)
var positiveInfinity = math.Inf(1)
var negativeInfinity = math.Inf(-1)
const hexChars = "0123456789ABCDEF"
const firstASCII = 0x20
const lastASCII = 0x7E
const firstHighSurrogate = 0xD800
const lastHighSurrogate = 0xDBFF
const firstLowSurrogate = 0xDC00
const lastLowSurrogate = 0xDFFF
func QuoteIdentifier(js []byte, name string, unsupportedFeatures compat.JSFeature) []byte {
isASCII := false
asciiStart := 0
for i, c := range name {
if c >= firstASCII && c <= lastASCII {
// Fast path: a run of ASCII characters
if !isASCII {
isASCII = true
asciiStart = i
}
} else {
// Slow path: escape non-ACSII characters
if isASCII {
js = append(js, name[asciiStart:i]...)
isASCII = false
}
if c <= 0xFFFF {
js = append(js, '\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15])
} else if !unsupportedFeatures.Has(compat.UnicodeEscapes) {
js = append(js, fmt.Sprintf("\\u{%X}", c)...)
} else {
panic("Internal error: Cannot encode identifier: Unicode escapes are unsupported")
}
}
}
if isASCII {
// Print one final run of ASCII characters
js = append(js, name[asciiStart:]...)
}
return js
}
func (p *printer) printUnquotedUTF16(text []uint16, quote rune, flags printQuotedFlags) {
temp := make([]byte, utf8.UTFMax)
js := p.js
i := 0
n := len(text)
// Only compute the line length if necessary
var startLineLength int
wrapLongLines := false
if p.options.LineLimit > 0 && (flags&printQuotedNoWrap) == 0 {
startLineLength = p.currentLineLength()
if startLineLength > p.options.LineLimit {
startLineLength = p.options.LineLimit
}
wrapLongLines = true
}
for i < n {
// Wrap long lines that are over the limit using escaped newlines
if wrapLongLines && startLineLength+i >= p.options.LineLimit {
js = append(js, "\\\n"...)
startLineLength -= p.options.LineLimit
}
c := text[i]
i++
switch c {
// Special-case the null character since it may mess with code written in C
// that treats null characters as the end of the string.
case '\x00':
// We don't want "\x001" to be written as "\01"
if i < n && text[i] >= '0' && text[i] <= '9' {
js = append(js, "\\x00"...)
} else {
js = append(js, "\\0"...)
}
// Special-case the bell character since it may cause dumping this file to
// the terminal to make a sound, which is undesirable. Note that we can't
// use an octal literal to print this shorter since octal literals are not
// allowed in strict mode (or in template strings).
case '\x07':
js = append(js, "\\x07"...)
case '\b':
js = append(js, "\\b"...)
case '\f':
js = append(js, "\\f"...)
case '\n':
if quote == '`' {
startLineLength = -i // Printing a real newline resets the line length
js = append(js, '\n')
} else {
js = append(js, "\\n"...)
}
case '\r':
js = append(js, "\\r"...)
case '\v':
js = append(js, "\\v"...)
case '\x1B':
js = append(js, "\\x1B"...)
case '\\':
js = append(js, "\\\\"...)
case '/':
// Avoid generating the sequence "</script" in JS code
if !p.options.UnsupportedFeatures.Has(compat.InlineScript) && i >= 2 && text[i-2] == '<' && i+6 <= len(text) {
script := "script"
matches := true
for j := 0; j < 6; j++ {
a := text[i+j]
b := uint16(script[j])
if a >= 'A' && a <= 'Z' {
a += 'a' - 'A'
}
if a != b {
matches = false
break
}
}
if matches {
js = append(js, '\\')
}
}
js = append(js, '/')
case '\'':
if quote == '\'' {
js = append(js, '\\')
}
js = append(js, '\'')
case '"':
if quote == '"' {
js = append(js, '\\')
}
js = append(js, '"')
case '`':
if quote == '`' {
js = append(js, '\\')
}
js = append(js, '`')
case '$':
if quote == '`' && i < n && text[i] == '{' {
js = append(js, '\\')
}
js = append(js, '$')
case '\u2028':
js = append(js, "\\u2028"...)
case '\u2029':
js = append(js, "\\u2029"...)
case '\uFEFF':
js = append(js, "\\uFEFF"...)
default:
switch {
// Common case: just append a single byte
case c <= lastASCII:
js = append(js, byte(c))
// Is this a high surrogate?
case c >= firstHighSurrogate && c <= lastHighSurrogate:
// Is there a next character?
if i < n {
c2 := text[i]
// Is it a low surrogate?
if c2 >= firstLowSurrogate && c2 <= lastLowSurrogate {
r := (rune(c) << 10) + rune(c2) + (0x10000 - (firstHighSurrogate << 10) - firstLowSurrogate)
i++
// Escape this character if UTF-8 isn't allowed
if p.options.ASCIIOnly {
if !p.options.UnsupportedFeatures.Has(compat.UnicodeEscapes) {
js = append(js, fmt.Sprintf("\\u{%X}", r)...)
} else {
js = append(js,
'\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15],
'\\', 'u', hexChars[c2>>12], hexChars[(c2>>8)&15], hexChars[(c2>>4)&15], hexChars[c2&15],
)
}
continue
}
// Otherwise, encode to UTF-8
width := utf8.EncodeRune(temp, r)
js = append(js, temp[:width]...)
continue
}
}
// Write an unpaired high surrogate
js = append(js, '\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15])
// Is this an unpaired low surrogate or four-digit hex escape?
case (c >= firstLowSurrogate && c <= lastLowSurrogate) || (p.options.ASCIIOnly && c > 0xFF):
js = append(js, '\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15])
// Can this be a two-digit hex escape?
case p.options.ASCIIOnly:
js = append(js, '\\', 'x', hexChars[c>>4], hexChars[c&15])
// Otherwise, just encode to UTF-8
default:
width := utf8.EncodeRune(temp, rune(c))
js = append(js, temp[:width]...)
}
}
}
p.js = js
}
// JSX tag syntax doesn't support character escapes so non-ASCII identifiers
// must be printed as UTF-8 even when the charset is set to ASCII.
func (p *printer) printJSXTag(tagOrNil js_ast.Expr) {
switch e := tagOrNil.Data.(type) {
case *js_ast.EString:
p.addSourceMapping(tagOrNil.Loc)
p.print(helpers.UTF16ToString(e.Value))
case *js_ast.EIdentifier:
name := p.renamer.NameForSymbol(e.Ref)
p.addSourceMappingForName(tagOrNil.Loc, name, e.Ref)
p.print(name)
case *js_ast.EDot:
p.printJSXTag(e.Target)
p.print(".")
p.addSourceMapping(e.NameLoc)
p.print(e.Name)
default:
if tagOrNil.Data != nil {
p.printExpr(tagOrNil, js_ast.LLowest, 0)
}
}
}
type printer struct {
symbols ast.SymbolMap
astHelpers js_ast.HelperContext
renamer renamer.Renamer
importRecords []ast.ImportRecord
callTarget js_ast.E
exprComments map[logger.Loc][]string
printedExprComments map[logger.Loc]bool
hasLegalComment map[string]struct{}
extractedLegalComments []string
js []byte
jsonMetadataImports []string
binaryExprStack []binaryExprVisitor
options Options
builder sourcemap.ChunkBuilder
printNextIndentAsSpace bool
stmtStart int
exportDefaultStart int
arrowExprStart int
forOfInitStart int
withNesting int
prevOpEnd int
needSpaceBeforeDot int
prevRegExpEnd int
noLeadingNewlineHere int
oldLineStart int
oldLineEnd int
intToBytesBuffer [64]byte
needsSemicolon bool
wasLazyExport bool
prevOp js_ast.OpCode
moduleType js_ast.ModuleType
}
func (p *printer) print(text string) {
p.js = append(p.js, text...)
}
// This is the same as "print(string(bytes))" without any unnecessary temporary
// allocations
func (p *printer) printBytes(bytes []byte) {
p.js = append(p.js, bytes...)
}
type printQuotedFlags uint8
const (
printQuotedAllowBacktick printQuotedFlags = 1 << iota
printQuotedNoWrap
)
func (p *printer) printQuotedUTF8(text string, flags printQuotedFlags) {
p.printQuotedUTF16(helpers.StringToUTF16(text), flags)
}
func (p *printer) addSourceMapping(loc logger.Loc) {
if p.options.AddSourceMappings {
p.builder.AddSourceMapping(loc, "", p.js)
}
}
func (p *printer) addSourceMappingForName(loc logger.Loc, name string, ref ast.Ref) {
if p.options.AddSourceMappings {
if originalName := p.symbols.Get(ast.FollowSymbols(p.symbols, ref)).OriginalName; originalName != name {
p.builder.AddSourceMapping(loc, originalName, p.js)
} else {
p.builder.AddSourceMapping(loc, "", p.js)
}
}
}
func (p *printer) printIndent() {
if p.options.MinifyWhitespace {
return
}
if p.printNextIndentAsSpace {
p.print(" ")
p.printNextIndentAsSpace = false
return
}
indent := p.options.Indent
if p.options.LineLimit > 0 && indent*2 >= p.options.LineLimit {
indent = p.options.LineLimit / 2
}
for i := 0; i < indent; i++ {
p.print(" ")
}
}
func (p *printer) mangledPropName(ref ast.Ref) string {
ref = ast.FollowSymbols(p.symbols, ref)
if name, ok := p.options.MangledProps[ref]; ok {
return name
}
return p.renamer.NameForSymbol(ref)
}
func (p *printer) tryToGetImportedEnumValue(target js_ast.Expr, name string) (js_ast.TSEnumValue, bool) {
if id, ok := target.Data.(*js_ast.EImportIdentifier); ok {
ref := ast.FollowSymbols(p.symbols, id.Ref)
if symbol := p.symbols.Get(ref); symbol.Kind == ast.SymbolTSEnum {
if enum, ok := p.options.TSEnums[ref]; ok {
value, ok := enum[name]
return value, ok
}
}
}
return js_ast.TSEnumValue{}, false
}
func (p *printer) tryToGetImportedEnumValueUTF16(target js_ast.Expr, name []uint16) (js_ast.TSEnumValue, string, bool) {
if id, ok := target.Data.(*js_ast.EImportIdentifier); ok {
ref := ast.FollowSymbols(p.symbols, id.Ref)
if symbol := p.symbols.Get(ref); symbol.Kind == ast.SymbolTSEnum {
if enum, ok := p.options.TSEnums[ref]; ok {
name := helpers.UTF16ToString(name)
value, ok := enum[name]
return value, name, ok
}
}
}
return js_ast.TSEnumValue{}, "", false
}
func (p *printer) printClauseAlias(loc logger.Loc, alias string) {
if js_ast.IsIdentifier(alias) {
p.printSpaceBeforeIdentifier()
p.addSourceMapping(loc)
p.printIdentifier(alias)
} else {
p.addSourceMapping(loc)
p.printQuotedUTF8(alias, 0)
}
}
// Note: The functions below check whether something can be printed as an
// identifier or if it needs to be quoted (e.g. "x.y" vs. "x['y']") using the
// ES5 identifier validity test to maximize cross-platform portability. Even
// though newer JavaScript environments can handle more Unicode characters,
// there isn't a published document that says which Unicode versions are
// supported by which browsers. Even if a character is considered valid in the
// latest version of Unicode, we don't know if the browser we're targeting
// contains an older version of Unicode or not. So for safety, we quote
// anything that isn't guaranteed to be compatible with ES5, the oldest
// JavaScript language target that we support.
func CanEscapeIdentifier(name string, UnsupportedFeatures compat.JSFeature, asciiOnly bool) bool {
return js_ast.IsIdentifierES5AndESNext(name) && (!asciiOnly ||
!UnsupportedFeatures.Has(compat.UnicodeEscapes) ||
!helpers.ContainsNonBMPCodePoint(name))
}
func (p *printer) canPrintIdentifier(name string) bool {
return js_ast.IsIdentifierES5AndESNext(name) && (!p.options.ASCIIOnly ||
!p.options.UnsupportedFeatures.Has(compat.UnicodeEscapes) ||
!helpers.ContainsNonBMPCodePoint(name))
}
func (p *printer) canPrintIdentifierUTF16(name []uint16) bool {
return js_ast.IsIdentifierES5AndESNextUTF16(name) && (!p.options.ASCIIOnly ||
!p.options.UnsupportedFeatures.Has(compat.UnicodeEscapes) ||
!helpers.ContainsNonBMPCodePointUTF16(name))
}
func (p *printer) printIdentifier(name string) {
if p.options.ASCIIOnly {
p.js = QuoteIdentifier(p.js, name, p.options.UnsupportedFeatures)
} else {
p.print(name)
}
}
// This is the same as "printIdentifier(StringToUTF16(bytes))" without any
// unnecessary temporary allocations
func (p *printer) printIdentifierUTF16(name []uint16) {
var temp [utf8.UTFMax]byte
n := len(name)
for i := 0; i < n; i++ {
c := rune(name[i])
if c >= firstHighSurrogate && c <= lastHighSurrogate && i+1 < n {
if c2 := rune(name[i+1]); c2 >= firstLowSurrogate && c2 <= lastLowSurrogate {
c = (c << 10) + c2 + (0x10000 - (firstHighSurrogate << 10) - firstLowSurrogate)
i++
}
}
if p.options.ASCIIOnly && c > lastASCII {
if c <= 0xFFFF {
p.js = append(p.js, '\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15])
} else if !p.options.UnsupportedFeatures.Has(compat.UnicodeEscapes) {
p.js = append(p.js, fmt.Sprintf("\\u{%X}", c)...)
} else {
panic("Internal error: Cannot encode identifier: Unicode escapes are unsupported")
}
continue
}
width := utf8.EncodeRune(temp[:], c)
p.js = append(p.js, temp[:width]...)
}
}
func (p *printer) printNumber(value float64, level js_ast.L) {
absValue := math.Abs(value)
if value != value {
p.printSpaceBeforeIdentifier()
if p.withNesting != 0 {
// "with (x) NaN" really means "x.NaN" so avoid identifiers when "with" is present
wrap := level >= js_ast.LMultiply
if wrap {
p.print("(")
}
if p.options.MinifyWhitespace {
p.print("0/0")
} else {
p.print("0 / 0")
}
if wrap {
p.print(")")
}
} else {
p.print("NaN")
}
} else if value == positiveInfinity || value == negativeInfinity {
// "with (x) Infinity" really means "x.Infinity" so avoid identifiers when "with" is present
wrap := ((p.options.MinifySyntax || p.withNesting != 0) && level >= js_ast.LMultiply) ||
(value == negativeInfinity && level >= js_ast.LPrefix)
if wrap {
p.print("(")
}
if value == negativeInfinity {
p.printSpaceBeforeOperator(js_ast.UnOpNeg)
p.print("-")
} else {
p.printSpaceBeforeIdentifier()
}
if !p.options.MinifySyntax && p.withNesting == 0 {
p.print("Infinity")
} else if p.options.MinifyWhitespace {
p.print("1/0")
} else {
p.print("1 / 0")
}
if wrap {
p.print(")")
}
} else {
if !math.Signbit(value) {
p.printSpaceBeforeIdentifier()
p.printNonNegativeFloat(absValue)
} else if level >= js_ast.LPrefix {
// Expressions such as "(-1).toString" need to wrap negative numbers.
// Instead of testing for "value < 0" we test for "signbit(value)" and
// "!isNaN(value)" because we need this to be true for "-0" and "-0 < 0"
// is false.
p.print("(-")
p.printNonNegativeFloat(absValue)
p.print(")")
} else {
p.printSpaceBeforeOperator(js_ast.UnOpNeg)
p.print("-")
p.printNonNegativeFloat(absValue)
}
}
}
func (p *printer) willPrintExprCommentsAtLoc(loc logger.Loc) bool {
return !p.options.MinifyWhitespace && p.exprComments[loc] != nil && !p.printedExprComments[loc]
}
func (p *printer) willPrintExprCommentsForAnyOf(exprs []js_ast.Expr) bool {
for _, expr := range exprs {
if p.willPrintExprCommentsAtLoc(expr.Loc) {
return true
}
}
return false
}
func (p *printer) printBinding(binding js_ast.Binding) {
switch b := binding.Data.(type) {
case *js_ast.BMissing:
p.addSourceMapping(binding.Loc)
case *js_ast.BIdentifier:
name := p.renamer.NameForSymbol(b.Ref)
p.printSpaceBeforeIdentifier()
p.addSourceMappingForName(binding.Loc, name, b.Ref)
p.printIdentifier(name)
case *js_ast.BArray:
isMultiLine := (len(b.Items) > 0 && !b.IsSingleLine) || p.willPrintExprCommentsAtLoc(b.CloseBracketLoc)
if !p.options.MinifyWhitespace && !isMultiLine {
for _, item := range b.Items {
if p.willPrintExprCommentsAtLoc(item.Loc) {
isMultiLine = true
break
}
}
}
p.addSourceMapping(binding.Loc)
p.print("[")
if len(b.Items) > 0 || isMultiLine {
if isMultiLine {
p.options.Indent++
}
for i, item := range b.Items {
if i != 0 {
p.print(",")
}
if p.options.LineLimit <= 0 || !p.printNewlinePastLineLimit() {
if isMultiLine {
p.printNewline()
p.printIndent()
} else if i != 0 {
p.printSpace()
}
}
p.printExprCommentsAtLoc(item.Loc)
if b.HasSpread && i+1 == len(b.Items) {
p.addSourceMapping(item.Loc)
p.print("...")
p.printExprCommentsAtLoc(item.Binding.Loc)
}
p.printBinding(item.Binding)
if item.DefaultValueOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(item.DefaultValueOrNil, js_ast.LComma, 0)
}
// Make sure there's a comma after trailing missing items
if _, ok := item.Binding.Data.(*js_ast.BMissing); ok && i == len(b.Items)-1 {
p.print(",")
}
}
if isMultiLine {
p.printNewline()
p.printExprCommentsAfterCloseTokenAtLoc(b.CloseBracketLoc)
p.options.Indent--
p.printIndent()
}
}
p.addSourceMapping(b.CloseBracketLoc)
p.print("]")
case *js_ast.BObject:
isMultiLine := (len(b.Properties) > 0 && !b.IsSingleLine) || p.willPrintExprCommentsAtLoc(b.CloseBraceLoc)
if !p.options.MinifyWhitespace && !isMultiLine {
for _, property := range b.Properties {
if p.willPrintExprCommentsAtLoc(property.Loc) {
isMultiLine = true
break
}
}
}
p.addSourceMapping(binding.Loc)
p.print("{")
if len(b.Properties) > 0 || isMultiLine {
if isMultiLine {
p.options.Indent++
}
for i, property := range b.Properties {
if i != 0 {
p.print(",")
}
if p.options.LineLimit <= 0 || !p.printNewlinePastLineLimit() {
if isMultiLine {
p.printNewline()
p.printIndent()
} else {
p.printSpace()
}
}
p.printExprCommentsAtLoc(property.Loc)
if property.IsSpread {
p.addSourceMapping(property.Loc)
p.print("...")
p.printExprCommentsAtLoc(property.Value.Loc)
} else {
if property.IsComputed {
p.addSourceMapping(property.Loc)
isMultiLine := p.willPrintExprCommentsAtLoc(property.Key.Loc) || p.willPrintExprCommentsAtLoc(property.CloseBracketLoc)
p.print("[")
if isMultiLine {
p.printNewline()
p.options.Indent++
p.printIndent()
}
p.printExpr(property.Key, js_ast.LComma, 0)
if isMultiLine {
p.printNewline()
p.printExprCommentsAfterCloseTokenAtLoc(property.CloseBracketLoc)
p.options.Indent--
p.printIndent()
}
if property.CloseBracketLoc.Start > property.Loc.Start {
p.addSourceMapping(property.CloseBracketLoc)
}
p.print("]:")
p.printSpace()
p.printBinding(property.Value)
if property.DefaultValueOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(property.DefaultValueOrNil, js_ast.LComma, 0)
}
continue
}
if str, ok := property.Key.Data.(*js_ast.EString); ok && !property.PreferQuotedKey && p.canPrintIdentifierUTF16(str.Value) {
// Use a shorthand property if the names are the same
if id, ok := property.Value.Data.(*js_ast.BIdentifier); ok &&
!p.willPrintExprCommentsAtLoc(property.Value.Loc) &&
helpers.UTF16EqualsString(str.Value, p.renamer.NameForSymbol(id.Ref)) {
if p.options.AddSourceMappings {
p.addSourceMappingForName(property.Key.Loc, helpers.UTF16ToString(str.Value), id.Ref)
}
p.printIdentifierUTF16(str.Value)
if property.DefaultValueOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(property.DefaultValueOrNil, js_ast.LComma, 0)
}
continue
}
p.addSourceMapping(property.Key.Loc)
p.printIdentifierUTF16(str.Value)
} else if mangled, ok := property.Key.Data.(*js_ast.ENameOfSymbol); ok {
name := p.mangledPropName(mangled.Ref)
if p.canPrintIdentifier(name) {
p.addSourceMappingForName(property.Key.Loc, name, mangled.Ref)
p.printIdentifier(name)
// Use a shorthand property if the names are the same
if id, ok := property.Value.Data.(*js_ast.BIdentifier); ok &&
!p.willPrintExprCommentsAtLoc(property.Value.Loc) &&
name == p.renamer.NameForSymbol(id.Ref) {
if property.DefaultValueOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(property.DefaultValueOrNil, js_ast.LComma, 0)
}
continue
}
} else {
p.addSourceMapping(property.Key.Loc)
p.printQuotedUTF8(name, 0)
}
} else {
p.printExpr(property.Key, js_ast.LLowest, 0)
}
p.print(":")
p.printSpace()
}
p.printBinding(property.Value)
if property.DefaultValueOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(property.DefaultValueOrNil, js_ast.LComma, 0)
}
}
if isMultiLine {
p.printNewline()
p.printExprCommentsAfterCloseTokenAtLoc(b.CloseBraceLoc)
p.options.Indent--
p.printIndent()
} else {
// This block is only reached if len(b.Properties) > 0
p.printSpace()
}
}
p.addSourceMapping(b.CloseBraceLoc)
p.print("}")
default:
panic(fmt.Sprintf("Unexpected binding of type %T", binding.Data))
}
}
func (p *printer) printSpace() {
if !p.options.MinifyWhitespace {
p.print(" ")
}
}
func (p *printer) printNewline() {
if !p.options.MinifyWhitespace {
p.print("\n")
}
}
func (p *printer) currentLineLength() int {
js := p.js
n := len(js)
stop := p.oldLineEnd
// Update "oldLineStart" to the start of the current line
for i := n; i > stop; i-- {
if c := js[i-1]; c == '\r' || c == '\n' {
p.oldLineStart = i
break
}
}
p.oldLineEnd = n
return n - p.oldLineStart
}
func (p *printer) printNewlinePastLineLimit() bool {
if p.currentLineLength() < p.options.LineLimit {
return false
}
p.print("\n")
p.printIndent()
return true
}
func (p *printer) printSpaceBeforeOperator(next js_ast.OpCode) {
if p.prevOpEnd == len(p.js) {
prev := p.prevOp
// "+ + y" => "+ +y"
// "+ ++ y" => "+ ++y"
// "x + + y" => "x+ +y"
// "x ++ + y" => "x+++y"
// "x + ++ y" => "x+ ++y"
// "-- >" => "-- >"
// "< ! --" => "<! --"
if ((prev == js_ast.BinOpAdd || prev == js_ast.UnOpPos) && (next == js_ast.BinOpAdd || next == js_ast.UnOpPos || next == js_ast.UnOpPreInc)) ||
((prev == js_ast.BinOpSub || prev == js_ast.UnOpNeg) && (next == js_ast.BinOpSub || next == js_ast.UnOpNeg || next == js_ast.UnOpPreDec)) ||
(prev == js_ast.UnOpPostDec && next == js_ast.BinOpGt) ||
(prev == js_ast.UnOpNot && next == js_ast.UnOpPreDec && len(p.js) > 1 && p.js[len(p.js)-2] == '<') {
p.print(" ")
}
}
}
func (p *printer) printSemicolonAfterStatement() {
if !p.options.MinifyWhitespace {
p.print(";\n")
} else {
p.needsSemicolon = true
}
}
func (p *printer) printSemicolonIfNeeded() {
if p.needsSemicolon {
p.print(";")
p.needsSemicolon = false
}
}
func (p *printer) printSpaceBeforeIdentifier() {
if c, _ := utf8.DecodeLastRune(p.js); js_ast.IsIdentifierContinue(c) || p.prevRegExpEnd == len(p.js) {
p.print(" ")
}
}
type fnArgsOpts struct {
openParenLoc logger.Loc
addMappingForOpenParenLoc bool
hasRestArg bool
isArrow bool
}
func (p *printer) printFnArgs(args []js_ast.Arg, opts fnArgsOpts) {
wrap := true
// Minify "(a) => {}" as "a=>{}"
if p.options.MinifyWhitespace && !opts.hasRestArg && opts.isArrow && len(args) == 1 {
if _, ok := args[0].Binding.Data.(*js_ast.BIdentifier); ok && args[0].DefaultOrNil.Data == nil {
wrap = false
}
}
if wrap {
if opts.addMappingForOpenParenLoc {
p.addSourceMapping(opts.openParenLoc)
}
p.print("(")
}
for i, arg := range args {
if i != 0 {
p.print(",")
p.printSpace()
}
p.printDecorators(arg.Decorators, printSpaceAfterDecorator)
if opts.hasRestArg && i+1 == len(args) {
p.print("...")
}
p.printBinding(arg.Binding)
if arg.DefaultOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(arg.DefaultOrNil, js_ast.LComma, 0)
}
}
if wrap {
p.print(")")
}
}
func (p *printer) printFn(fn js_ast.Fn) {
p.printFnArgs(fn.Args, fnArgsOpts{hasRestArg: fn.HasRestArg})
p.printSpace()
p.printBlock(fn.Body.Loc, fn.Body.Block)
}
type printAfterDecorator uint8
const (
printNewlineAfterDecorator printAfterDecorator = iota
printSpaceAfterDecorator
)
func (p *printer) printDecorators(decorators []js_ast.Decorator, defaultMode printAfterDecorator) (omitIndentAfter bool) {
oldMode := defaultMode
for _, decorator := range decorators {
wrap := false
wasCallTarget := false
expr := decorator.Value
mode := defaultMode
if decorator.OmitNewlineAfter {
mode = printSpaceAfterDecorator
}
outer:
for {
isCallTarget := wasCallTarget
wasCallTarget = false
switch e := expr.Data.(type) {
case *js_ast.EIdentifier:
// "@foo"
break outer
case *js_ast.ECall:
// "@foo()"
expr = e.Target
wasCallTarget = true
continue
case *js_ast.EDot:
// "@foo.bar"
if p.canPrintIdentifier(e.Name) {
expr = e.Target
continue
}
// "@foo.\u30FF" => "@(foo['\u30FF'])"
break
case *js_ast.EIndex:
if _, ok := e.Index.Data.(*js_ast.EPrivateIdentifier); ok {
// "@foo.#bar"
expr = e.Target
continue
}
// "@(foo[bar])"
break
case *js_ast.EImportIdentifier:
ref := ast.FollowSymbols(p.symbols, e.Ref)
symbol := p.symbols.Get(ref)
if symbol.ImportItemStatus == ast.ImportItemMissing {
// "@(void 0)"
break
}
if symbol.NamespaceAlias != nil && isCallTarget && e.WasOriginallyIdentifier {
// "@((0, import_ns.fn)())"
break
}
if value := p.options.ConstValues[ref]; value.Kind != js_ast.ConstValueNone {
// "@(<inlined constant>)"
break
}
// "@foo"
// "@import_ns.fn"
break outer
default:
// "@(foo + bar)"
// "@(() => {})"
break
}
wrap = true
break outer
}
p.addSourceMapping(decorator.AtLoc)
if oldMode == printNewlineAfterDecorator {
p.printIndent()
}
p.print("@")
if wrap {
p.print("(")
}
p.printExpr(decorator.Value, js_ast.LLowest, 0)
if wrap {
p.print(")")
}
switch mode {
case printNewlineAfterDecorator:
p.printNewline()
case printSpaceAfterDecorator:
p.printSpace()
}
oldMode = mode
}
omitIndentAfter = oldMode == printSpaceAfterDecorator
return
}
func (p *printer) printClass(class js_ast.Class) {
if class.ExtendsOrNil.Data != nil {
p.print(" extends")
p.printSpace()
p.printExpr(class.ExtendsOrNil, js_ast.LNew-1, 0)
}
p.printSpace()
p.addSourceMapping(class.BodyLoc)
p.print("{")
p.printNewline()
p.options.Indent++
for _, item := range class.Properties {
p.printSemicolonIfNeeded()
omitIndent := p.printDecorators(item.Decorators, printNewlineAfterDecorator)
if !omitIndent {
p.printIndent()
}
if item.Kind == js_ast.PropertyClassStaticBlock {
p.addSourceMapping(item.Loc)
p.print("static")
p.printSpace()
p.printBlock(item.ClassStaticBlock.Loc, item.ClassStaticBlock.Block)
p.printNewline()
continue
}
p.printProperty(item)
// Need semicolons after class fields
if item.ValueOrNil.Data == nil {
p.printSemicolonAfterStatement()
} else {
p.printNewline()
}
}
p.needsSemicolon = false
p.printExprCommentsAfterCloseTokenAtLoc(class.CloseBraceLoc)
p.options.Indent--
p.printIndent()
if class.CloseBraceLoc.Start > class.BodyLoc.Start {
p.addSourceMapping(class.CloseBraceLoc)
}
p.print("}")
}
func (p *printer) printProperty(property js_ast.Property) {
p.printExprCommentsAtLoc(property.Loc)
if property.Kind == js_ast.PropertySpread {
p.addSourceMapping(property.Loc)
p.print("...")
p.printExpr(property.ValueOrNil, js_ast.LComma, 0)
return
}
// Handle key syntax compression for cross-module constant inlining of enums
var keyFlags printExprFlags
if p.options.MinifySyntax && property.Flags.Has(js_ast.PropertyIsComputed) {
property.Key = p.lateConstantFoldUnaryOrBinaryOrIfExpr(property.Key)
keyFlags |= parentWasUnaryOrBinaryOrIfTest
if key, ok := property.Key.Data.(*js_ast.EInlinedEnum); ok {
property.Key = key.Value
}
// Remove the computed flag if it's no longer needed
switch key := property.Key.Data.(type) {
case *js_ast.ENumber:
property.Flags &= ^js_ast.PropertyIsComputed
case *js_ast.EString:
if !helpers.UTF16EqualsString(key.Value, "__proto__") &&
!helpers.UTF16EqualsString(key.Value, "constructor") &&
!helpers.UTF16EqualsString(key.Value, "prototype") {
property.Flags &= ^js_ast.PropertyIsComputed
}
}
}
if property.Flags.Has(js_ast.PropertyIsStatic) {
p.printSpaceBeforeIdentifier()
p.addSourceMapping(property.Loc)
p.print("static")
p.printSpace()
}
switch property.Kind {
case js_ast.PropertyGetter:
p.printSpaceBeforeIdentifier()
p.addSourceMapping(property.Loc)
p.print("get")
p.printSpace()
case js_ast.PropertySetter:
p.printSpaceBeforeIdentifier()
p.addSourceMapping(property.Loc)
p.print("set")
p.printSpace()
case js_ast.PropertyAutoAccessor:
p.printSpaceBeforeIdentifier()
p.addSourceMapping(property.Loc)
p.print("accessor")
p.printSpace()
}
if fn, ok := property.ValueOrNil.Data.(*js_ast.EFunction); property.Kind.IsMethodDefinition() && ok {
if fn.Fn.IsAsync {
p.printSpaceBeforeIdentifier()
p.addSourceMapping(property.Loc)
p.print("async")
p.printSpace()
}
if fn.Fn.IsGenerator {
p.addSourceMapping(property.Loc)
p.print("*")
}
}
isComputed := property.Flags.Has(js_ast.PropertyIsComputed)
// Automatically print numbers that would cause a syntax error as computed properties
if !isComputed {
if key, ok := property.Key.Data.(*js_ast.ENumber); ok {
if math.Signbit(key.Value) || (key.Value == positiveInfinity && p.options.MinifySyntax) {
// "{ -1: 0 }" must be printed as "{ [-1]: 0 }"
// "{ 1/0: 0 }" must be printed as "{ [1/0]: 0 }"
isComputed = true
}
}
}
if isComputed {
p.addSourceMapping(property.Loc)
isMultiLine := p.willPrintExprCommentsAtLoc(property.Key.Loc) || p.willPrintExprCommentsAtLoc(property.CloseBracketLoc)
p.print("[")
if isMultiLine {
p.printNewline()
p.options.Indent++
p.printIndent()
}
p.printExpr(property.Key, js_ast.LComma, keyFlags)
if isMultiLine {
p.printNewline()
p.printExprCommentsAfterCloseTokenAtLoc(property.CloseBracketLoc)
p.options.Indent--
p.printIndent()
}
if property.CloseBracketLoc.Start > property.Loc.Start {
p.addSourceMapping(property.CloseBracketLoc)
}
p.print("]")
if property.ValueOrNil.Data != nil {
if fn, ok := property.ValueOrNil.Data.(*js_ast.EFunction); property.Kind.IsMethodDefinition() && ok {
p.printFn(fn.Fn)
return
}
p.print(":")
p.printSpace()
p.printExprWithoutLeadingNewline(property.ValueOrNil, js_ast.LComma, 0)
}
if property.InitializerOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
p.printExprWithoutLeadingNewline(property.InitializerOrNil, js_ast.LComma, 0)
}
return
}
switch key := property.Key.Data.(type) {
case *js_ast.EPrivateIdentifier:
name := p.renamer.NameForSymbol(key.Ref)
p.addSourceMappingForName(property.Key.Loc, name, key.Ref)
p.printIdentifier(name)
case *js_ast.ENameOfSymbol:
name := p.mangledPropName(key.Ref)
if p.canPrintIdentifier(name) {
p.printSpaceBeforeIdentifier()
p.addSourceMappingForName(property.Key.Loc, name, key.Ref)
p.printIdentifier(name)
// Use a shorthand property if the names are the same
if !p.options.UnsupportedFeatures.Has(compat.ObjectExtensions) && property.ValueOrNil.Data != nil && !p.willPrintExprCommentsAtLoc(property.ValueOrNil.Loc) {
switch e := property.ValueOrNil.Data.(type) {
case *js_ast.EIdentifier:
if name == p.renamer.NameForSymbol(e.Ref) {
if property.InitializerOrNil.Data != nil {
p.printSpace()
p.print("=")
p.printSpace()
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/js_printer/js_printer_test.go | internal/js_printer/js_printer_test.go | package js_printer
import (
"strings"
"testing"
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/js_parser"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/renamer"
"github.com/evanw/esbuild/internal/test"
)
func expectPrintedCommon(t *testing.T, name string, contents string, expected string, options config.Options) {
t.Helper()
t.Run(name, func(t *testing.T) {
t.Helper()
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
tree, ok := js_parser.Parse(log, test.SourceForTest(contents), js_parser.OptionsFromConfig(&options))
msgs := log.Done()
var text strings.Builder
for _, msg := range msgs {
if msg.Kind != logger.Error {
continue
}
text.WriteString(msg.String(logger.OutputOptions{}, logger.TerminalInfo{}))
}
test.AssertEqualWithDiff(t, text.String(), "")
if !ok {
t.Fatal("Parse error")
}
symbols := ast.NewSymbolMap(1)
symbols.SymbolsForSource[0] = tree.Symbols
r := renamer.NewNoOpRenamer(symbols)
js := Print(tree, symbols, r, Options{
ASCIIOnly: options.ASCIIOnly,
MinifySyntax: options.MinifySyntax,
MinifyWhitespace: options.MinifyWhitespace,
UnsupportedFeatures: options.UnsupportedJSFeatures,
}).JS
test.AssertEqualWithDiff(t, string(js), expected)
})
}
func expectPrinted(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents, contents, expected, config.Options{})
}
func expectPrintedMinify(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [minified]", contents, expected, config.Options{
MinifyWhitespace: true,
})
}
func expectPrintedMangle(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [mangled]", contents, expected, config.Options{
MinifySyntax: true,
})
}
func expectPrintedMangleMinify(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [mangled, minified]", contents, expected, config.Options{
MinifySyntax: true,
MinifyWhitespace: true,
})
}
func expectPrintedASCII(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [ascii]", contents, expected, config.Options{
ASCIIOnly: true,
})
}
func expectPrintedMinifyASCII(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [ascii]", contents, expected, config.Options{
MinifyWhitespace: true,
ASCIIOnly: true,
})
}
func expectPrintedTarget(t *testing.T, esVersion int, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents, contents, expected, config.Options{
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(map[compat.Engine]compat.Semver{
compat.ES: {Parts: []int{esVersion}},
}),
})
}
func expectPrintedTargetMinify(t *testing.T, esVersion int, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [minified]", contents, expected, config.Options{
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(map[compat.Engine]compat.Semver{
compat.ES: {Parts: []int{esVersion}},
}),
MinifyWhitespace: true,
})
}
func expectPrintedTargetMangle(t *testing.T, esVersion int, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [mangled]", contents, expected, config.Options{
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(map[compat.Engine]compat.Semver{
compat.ES: {Parts: []int{esVersion}},
}),
MinifySyntax: true,
})
}
func expectPrintedTargetASCII(t *testing.T, esVersion int, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [ascii]", contents, expected, config.Options{
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(map[compat.Engine]compat.Semver{
compat.ES: {Parts: []int{esVersion}},
}),
ASCIIOnly: true,
})
}
func expectPrintedJSX(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents, contents, expected, config.Options{
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
})
}
func expectPrintedJSXASCII(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents, contents, expected, config.Options{
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
ASCIIOnly: true,
})
}
func expectPrintedJSXMinify(t *testing.T, contents string, expected string) {
t.Helper()
expectPrintedCommon(t, contents+" [minified]", contents, expected, config.Options{
JSX: config.JSXOptions{
Parse: true,
Preserve: true,
},
MinifyWhitespace: true,
})
}
func TestNumber(t *testing.T) {
// Check "1eN"
expectPrinted(t, "x = 1e-100", "x = 1e-100;\n")
expectPrinted(t, "x = 1e-4", "x = 1e-4;\n")
expectPrinted(t, "x = 1e-3", "x = 1e-3;\n")
expectPrinted(t, "x = 1e-2", "x = 0.01;\n")
expectPrinted(t, "x = 1e-1", "x = 0.1;\n")
expectPrinted(t, "x = 1e0", "x = 1;\n")
expectPrinted(t, "x = 1e1", "x = 10;\n")
expectPrinted(t, "x = 1e2", "x = 100;\n")
expectPrinted(t, "x = 1e3", "x = 1e3;\n")
expectPrinted(t, "x = 1e4", "x = 1e4;\n")
expectPrinted(t, "x = 1e100", "x = 1e100;\n")
expectPrintedMinify(t, "x = 1e-100", "x=1e-100;")
expectPrintedMinify(t, "x = 1e-5", "x=1e-5;")
expectPrintedMinify(t, "x = 1e-4", "x=1e-4;")
expectPrintedMinify(t, "x = 1e-3", "x=.001;")
expectPrintedMinify(t, "x = 1e-2", "x=.01;")
expectPrintedMinify(t, "x = 1e-1", "x=.1;")
expectPrintedMinify(t, "x = 1e0", "x=1;")
expectPrintedMinify(t, "x = 1e1", "x=10;")
expectPrintedMinify(t, "x = 1e2", "x=100;")
expectPrintedMinify(t, "x = 1e3", "x=1e3;")
expectPrintedMinify(t, "x = 1e4", "x=1e4;")
expectPrintedMinify(t, "x = 1e100", "x=1e100;")
// Check "12eN"
expectPrinted(t, "x = 12e-100", "x = 12e-100;\n")
expectPrinted(t, "x = 12e-5", "x = 12e-5;\n")
expectPrinted(t, "x = 12e-4", "x = 12e-4;\n")
expectPrinted(t, "x = 12e-3", "x = 0.012;\n")
expectPrinted(t, "x = 12e-2", "x = 0.12;\n")
expectPrinted(t, "x = 12e-1", "x = 1.2;\n")
expectPrinted(t, "x = 12e0", "x = 12;\n")
expectPrinted(t, "x = 12e1", "x = 120;\n")
expectPrinted(t, "x = 12e2", "x = 1200;\n")
expectPrinted(t, "x = 12e3", "x = 12e3;\n")
expectPrinted(t, "x = 12e4", "x = 12e4;\n")
expectPrinted(t, "x = 12e100", "x = 12e100;\n")
expectPrintedMinify(t, "x = 12e-100", "x=12e-100;")
expectPrintedMinify(t, "x = 12e-6", "x=12e-6;")
expectPrintedMinify(t, "x = 12e-5", "x=12e-5;")
expectPrintedMinify(t, "x = 12e-4", "x=.0012;")
expectPrintedMinify(t, "x = 12e-3", "x=.012;")
expectPrintedMinify(t, "x = 12e-2", "x=.12;")
expectPrintedMinify(t, "x = 12e-1", "x=1.2;")
expectPrintedMinify(t, "x = 12e0", "x=12;")
expectPrintedMinify(t, "x = 12e1", "x=120;")
expectPrintedMinify(t, "x = 12e2", "x=1200;")
expectPrintedMinify(t, "x = 12e3", "x=12e3;")
expectPrintedMinify(t, "x = 12e4", "x=12e4;")
expectPrintedMinify(t, "x = 12e100", "x=12e100;")
// Check cases for "A.BeX" => "ABeY" simplification
expectPrinted(t, "x = 123456789", "x = 123456789;\n")
expectPrinted(t, "x = 1123456789", "x = 1123456789;\n")
expectPrinted(t, "x = 10123456789", "x = 10123456789;\n")
expectPrinted(t, "x = 100123456789", "x = 100123456789;\n")
expectPrinted(t, "x = 1000123456789", "x = 1000123456789;\n")
expectPrinted(t, "x = 10000123456789", "x = 10000123456789;\n")
expectPrinted(t, "x = 100000123456789", "x = 100000123456789;\n")
expectPrinted(t, "x = 1000000123456789", "x = 1000000123456789;\n")
expectPrinted(t, "x = 10000000123456789", "x = 10000000123456788;\n")
expectPrinted(t, "x = 100000000123456789", "x = 100000000123456780;\n")
expectPrinted(t, "x = 1000000000123456789", "x = 1000000000123456800;\n")
expectPrinted(t, "x = 10000000000123456789", "x = 10000000000123458e3;\n")
expectPrinted(t, "x = 100000000000123456789", "x = 10000000000012345e4;\n")
// Check numbers around the ends of various integer ranges. These were
// crashing in the WebAssembly build due to a bug in the Go runtime.
// int32
expectPrinted(t, "x = 0x7fff_ffff", "x = 2147483647;\n")
expectPrinted(t, "x = 0x8000_0000", "x = 2147483648;\n")
expectPrinted(t, "x = 0x8000_0001", "x = 2147483649;\n")
expectPrinted(t, "x = -0x7fff_ffff", "x = -2147483647;\n")
expectPrinted(t, "x = -0x8000_0000", "x = -2147483648;\n")
expectPrinted(t, "x = -0x8000_0001", "x = -2147483649;\n")
// uint32
expectPrinted(t, "x = 0xffff_ffff", "x = 4294967295;\n")
expectPrinted(t, "x = 0x1_0000_0000", "x = 4294967296;\n")
expectPrinted(t, "x = 0x1_0000_0001", "x = 4294967297;\n")
expectPrinted(t, "x = -0xffff_ffff", "x = -4294967295;\n")
expectPrinted(t, "x = -0x1_0000_0000", "x = -4294967296;\n")
expectPrinted(t, "x = -0x1_0000_0001", "x = -4294967297;\n")
// int64
expectPrinted(t, "x = 0x7fff_ffff_ffff_fdff", "x = 9223372036854775e3;\n")
expectPrinted(t, "x = 0x8000_0000_0000_0000", "x = 9223372036854776e3;\n")
expectPrinted(t, "x = 0x8000_0000_0000_3000", "x = 9223372036854788e3;\n")
expectPrinted(t, "x = -0x7fff_ffff_ffff_fdff", "x = -9223372036854775e3;\n")
expectPrinted(t, "x = -0x8000_0000_0000_0000", "x = -9223372036854776e3;\n")
expectPrinted(t, "x = -0x8000_0000_0000_3000", "x = -9223372036854788e3;\n")
// uint64
expectPrinted(t, "x = 0xffff_ffff_ffff_fbff", "x = 1844674407370955e4;\n")
expectPrinted(t, "x = 0x1_0000_0000_0000_0000", "x = 18446744073709552e3;\n")
expectPrinted(t, "x = 0x1_0000_0000_0000_1000", "x = 18446744073709556e3;\n")
expectPrinted(t, "x = -0xffff_ffff_ffff_fbff", "x = -1844674407370955e4;\n")
expectPrinted(t, "x = -0x1_0000_0000_0000_0000", "x = -18446744073709552e3;\n")
expectPrinted(t, "x = -0x1_0000_0000_0000_1000", "x = -18446744073709556e3;\n")
// Check the hex vs. decimal decision boundary when minifying
expectPrinted(t, "x = 999999999999", "x = 999999999999;\n")
expectPrinted(t, "x = 1000000000001", "x = 1000000000001;\n")
expectPrinted(t, "x = 0x0FFF_FFFF_FFFF_FF80", "x = 1152921504606846800;\n")
expectPrinted(t, "x = 0x1000_0000_0000_0000", "x = 1152921504606847e3;\n")
expectPrinted(t, "x = 0xFFFF_FFFF_FFFF_F000", "x = 18446744073709548e3;\n")
expectPrinted(t, "x = 0xFFFF_FFFF_FFFF_F800", "x = 1844674407370955e4;\n")
expectPrinted(t, "x = 0xFFFF_FFFF_FFFF_FFFF", "x = 18446744073709552e3;\n")
expectPrintedMinify(t, "x = 999999999999", "x=999999999999;")
expectPrintedMinify(t, "x = 1000000000001", "x=0xe8d4a51001;")
expectPrintedMinify(t, "x = 0x0FFF_FFFF_FFFF_FF80", "x=0xfffffffffffff80;")
expectPrintedMinify(t, "x = 0x1000_0000_0000_0000", "x=1152921504606847e3;")
expectPrintedMinify(t, "x = 0xFFFF_FFFF_FFFF_F000", "x=0xfffffffffffff000;")
expectPrintedMinify(t, "x = 0xFFFF_FFFF_FFFF_F800", "x=1844674407370955e4;")
expectPrintedMinify(t, "x = 0xFFFF_FFFF_FFFF_FFFF", "x=18446744073709552e3;")
// Check printing a space in between a number and a subsequent "."
expectPrintedMinify(t, "x = 0.0001 .y", "x=1e-4.y;")
expectPrintedMinify(t, "x = 0.001 .y", "x=.001.y;")
expectPrintedMinify(t, "x = 0.01 .y", "x=.01.y;")
expectPrintedMinify(t, "x = 0.1 .y", "x=.1.y;")
expectPrintedMinify(t, "x = 0 .y", "x=0 .y;")
expectPrintedMinify(t, "x = 10 .y", "x=10 .y;")
expectPrintedMinify(t, "x = 100 .y", "x=100 .y;")
expectPrintedMinify(t, "x = 1000 .y", "x=1e3.y;")
expectPrintedMinify(t, "x = 12345 .y", "x=12345 .y;")
expectPrintedMinify(t, "x = 0xFFFF_0000_FFFF_0000 .y", "x=0xffff0000ffff0000.y;")
}
func TestArray(t *testing.T) {
expectPrinted(t, "[]", "[];\n")
expectPrinted(t, "[,]", "[,];\n")
expectPrinted(t, "[,,]", "[, ,];\n")
}
func TestSplat(t *testing.T) {
expectPrinted(t, "[...(a, b)]", "[...(a, b)];\n")
expectPrinted(t, "x(...(a, b))", "x(...(a, b));\n")
expectPrinted(t, "({...(a, b)})", "({ ...(a, b) });\n")
}
func TestNew(t *testing.T) {
expectPrinted(t, "new x", "new x();\n")
expectPrinted(t, "new x()", "new x();\n")
expectPrinted(t, "new (x)", "new x();\n")
expectPrinted(t, "new (x())", "new (x())();\n")
expectPrinted(t, "new (new x())", "new new x()();\n")
expectPrinted(t, "new (x + x)", "new (x + x)();\n")
expectPrinted(t, "(new x)()", "new x()();\n")
expectPrinted(t, "new foo().bar", "new foo().bar;\n")
expectPrinted(t, "new (foo().bar)", "new (foo()).bar();\n")
expectPrinted(t, "new (foo()).bar", "new (foo()).bar();\n")
expectPrinted(t, "new foo()[bar]", "new foo()[bar];\n")
expectPrinted(t, "new (foo()[bar])", "new (foo())[bar]();\n")
expectPrinted(t, "new (foo())[bar]", "new (foo())[bar]();\n")
expectPrinted(t, "new (import('foo').bar)", "new (import(\"foo\")).bar();\n")
expectPrinted(t, "new (import('foo')).bar", "new (import(\"foo\")).bar();\n")
expectPrinted(t, "new (import('foo')[bar])", "new (import(\"foo\"))[bar]();\n")
expectPrinted(t, "new (import('foo'))[bar]", "new (import(\"foo\"))[bar]();\n")
expectPrintedMinify(t, "new x", "new x;")
expectPrintedMinify(t, "new x.y", "new x.y;")
expectPrintedMinify(t, "(new x).y", "new x().y;")
expectPrintedMinify(t, "new x().y", "new x().y;")
expectPrintedMinify(t, "new x() + y", "new x+y;")
expectPrintedMinify(t, "new x() ** 2", "new x**2;")
// Test preservation of Webpack-specific comments
expectPrinted(t, "new Worker(// webpackFoo: 1\n // webpackBar: 2\n 'path');", "new Worker(\n // webpackFoo: 1\n // webpackBar: 2\n \"path\"\n);\n")
expectPrinted(t, "new Worker(/* webpackFoo: 1 */ /* webpackBar: 2 */ 'path');", "new Worker(\n /* webpackFoo: 1 */\n /* webpackBar: 2 */\n \"path\"\n);\n")
expectPrinted(t, "new Worker(\n /* multi\n * line\n * webpackBar: */ 'path');", "new Worker(\n /* multi\n * line\n * webpackBar: */\n \"path\"\n);\n")
expectPrinted(t, "new Worker(/* webpackFoo: 1 */ 'path' /* webpackBar:2 */);", "new Worker(\n /* webpackFoo: 1 */\n \"path\"\n /* webpackBar:2 */\n);\n")
expectPrinted(t, "new Worker(/* webpackFoo: 1 */ 'path' /* webpackBar:2 */ ,);", "new Worker(\n /* webpackFoo: 1 */\n \"path\"\n);\n") // Not currently handled
expectPrinted(t, "new Worker(/* webpackFoo: 1 */ 'path', /* webpackBar:2 */ );", "new Worker(\n /* webpackFoo: 1 */\n \"path\"\n /* webpackBar:2 */\n);\n")
expectPrinted(t, "new Worker(new URL('path', /* webpackFoo: these can go anywhere */ import.meta.url))",
"new Worker(new URL(\n \"path\",\n /* webpackFoo: these can go anywhere */\n import.meta.url\n));\n")
}
func TestCall(t *testing.T) {
expectPrinted(t, "x()()()", "x()()();\n")
expectPrinted(t, "x().y()[z]()", "x().y()[z]();\n")
expectPrinted(t, "(--x)();", "(--x)();\n")
expectPrinted(t, "(x--)();", "(x--)();\n")
expectPrinted(t, "eval(x)", "eval(x);\n")
expectPrinted(t, "eval?.(x)", "eval?.(x);\n")
expectPrinted(t, "(eval)(x)", "eval(x);\n")
expectPrinted(t, "(eval)?.(x)", "eval?.(x);\n")
expectPrinted(t, "eval(x, y)", "eval(x, y);\n")
expectPrinted(t, "eval?.(x, y)", "eval?.(x, y);\n")
expectPrinted(t, "(1, eval)(x)", "(1, eval)(x);\n")
expectPrinted(t, "(1, eval)?.(x)", "(1, eval)?.(x);\n")
expectPrintedMangle(t, "(1 ? eval : 2)(x)", "(0, eval)(x);\n")
expectPrintedMangle(t, "(1 ? eval : 2)?.(x)", "eval?.(x);\n")
expectPrintedMinify(t, "eval?.(x)", "eval?.(x);")
expectPrintedMinify(t, "eval(x,y)", "eval(x,y);")
expectPrintedMinify(t, "eval?.(x,y)", "eval?.(x,y);")
expectPrintedMinify(t, "(1, eval)(x)", "(1,eval)(x);")
expectPrintedMinify(t, "(1, eval)?.(x)", "(1,eval)?.(x);")
expectPrintedMangleMinify(t, "(1 ? eval : 2)(x)", "(0,eval)(x);")
expectPrintedMangleMinify(t, "(1 ? eval : 2)?.(x)", "eval?.(x);")
}
func TestMember(t *testing.T) {
expectPrinted(t, "x.y[z]", "x.y[z];\n")
expectPrinted(t, "((x+1).y+1)[z]", "((x + 1).y + 1)[z];\n")
}
func TestComma(t *testing.T) {
expectPrinted(t, "1, 2, 3", "1, 2, 3;\n")
expectPrinted(t, "(1, 2), 3", "1, 2, 3;\n")
expectPrinted(t, "1, (2, 3)", "1, 2, 3;\n")
expectPrinted(t, "a ? (b, c) : (d, e)", "a ? (b, c) : (d, e);\n")
expectPrinted(t, "let x = (a, b)", "let x = (a, b);\n")
expectPrinted(t, "(x = a), b", "x = a, b;\n")
expectPrinted(t, "x = (a, b)", "x = (a, b);\n")
expectPrinted(t, "x((1, 2))", "x((1, 2));\n")
}
func TestUnary(t *testing.T) {
expectPrinted(t, "+(x--)", "+x--;\n")
expectPrinted(t, "-(x++)", "-x++;\n")
}
func TestNullish(t *testing.T) {
// "??" can't directly contain "||" or "&&"
expectPrinted(t, "(a && b) ?? c", "(a && b) ?? c;\n")
expectPrinted(t, "(a || b) ?? c", "(a || b) ?? c;\n")
expectPrinted(t, "a ?? (b && c)", "a ?? (b && c);\n")
expectPrinted(t, "a ?? (b || c)", "a ?? (b || c);\n")
// "||" and "&&" can't directly contain "??"
expectPrinted(t, "a && (b ?? c)", "a && (b ?? c);\n")
expectPrinted(t, "a || (b ?? c)", "a || (b ?? c);\n")
expectPrinted(t, "(a ?? b) && c", "(a ?? b) && c;\n")
expectPrinted(t, "(a ?? b) || c", "(a ?? b) || c;\n")
}
func TestString(t *testing.T) {
expectPrinted(t, "let x = ''", "let x = \"\";\n")
expectPrinted(t, "let x = '\b'", "let x = \"\\b\";\n")
expectPrinted(t, "let x = '\f'", "let x = \"\\f\";\n")
expectPrinted(t, "let x = '\t'", "let x = \"\t\";\n")
expectPrinted(t, "let x = '\v'", "let x = \"\\v\";\n")
expectPrinted(t, "let x = '\\n'", "let x = \"\\n\";\n")
expectPrinted(t, "let x = '\\''", "let x = \"'\";\n")
expectPrinted(t, "let x = '\\\"'", "let x = '\"';\n")
expectPrinted(t, "let x = '\\'\"'", "let x = `'\"`;\n")
expectPrinted(t, "let x = '\\\\'", "let x = \"\\\\\";\n")
expectPrinted(t, "let x = '\x00'", "let x = \"\\0\";\n")
expectPrinted(t, "let x = '\x00!'", "let x = \"\\0!\";\n")
expectPrinted(t, "let x = '\x001'", "let x = \"\\x001\";\n")
expectPrinted(t, "let x = '\\0'", "let x = \"\\0\";\n")
expectPrinted(t, "let x = '\\0!'", "let x = \"\\0!\";\n")
expectPrinted(t, "let x = '\x07'", "let x = \"\\x07\";\n")
expectPrinted(t, "let x = '\x07!'", "let x = \"\\x07!\";\n")
expectPrinted(t, "let x = '\x071'", "let x = \"\\x071\";\n")
expectPrinted(t, "let x = '\\7'", "let x = \"\\x07\";\n")
expectPrinted(t, "let x = '\\7!'", "let x = \"\\x07!\";\n")
expectPrinted(t, "let x = '\\01'", "let x = \"\x01\";\n")
expectPrinted(t, "let x = '\x10'", "let x = \"\x10\";\n")
expectPrinted(t, "let x = '\\x10'", "let x = \"\x10\";\n")
expectPrinted(t, "let x = '\x1B'", "let x = \"\\x1B\";\n")
expectPrinted(t, "let x = '\\x1B'", "let x = \"\\x1B\";\n")
expectPrinted(t, "let x = '\uABCD'", "let x = \"\uABCD\";\n")
expectPrinted(t, "let x = '\\uABCD'", "let x = \"\uABCD\";\n")
expectPrinted(t, "let x = '\U000123AB'", "let x = \"\U000123AB\";\n")
expectPrinted(t, "let x = '\\u{123AB}'", "let x = \"\U000123AB\";\n")
expectPrinted(t, "let x = '\\uD808\\uDFAB'", "let x = \"\U000123AB\";\n")
expectPrinted(t, "let x = '\\uD808'", "let x = \"\\uD808\";\n")
expectPrinted(t, "let x = '\\uD808X'", "let x = \"\\uD808X\";\n")
expectPrinted(t, "let x = '\\uDFAB'", "let x = \"\\uDFAB\";\n")
expectPrinted(t, "let x = '\\uDFABX'", "let x = \"\\uDFABX\";\n")
expectPrinted(t, "let x = '\\x80'", "let x = \"\U00000080\";\n")
expectPrinted(t, "let x = '\\xFF'", "let x = \"\U000000FF\";\n")
expectPrinted(t, "let x = '\\xF0\\x9F\\x8D\\x95'", "let x = \"\U000000F0\U0000009F\U0000008D\U00000095\";\n")
expectPrinted(t, "let x = '\\uD801\\uDC02\\uDC03\\uD804'", "let x = \"\U00010402\\uDC03\\uD804\";\n")
}
func TestTemplate(t *testing.T) {
expectPrinted(t, "let x = `\\0`", "let x = `\\0`;\n")
expectPrinted(t, "let x = `\\x01`", "let x = `\x01`;\n")
expectPrinted(t, "let x = `\\0${0}`", "let x = `\\0${0}`;\n")
expectPrinted(t, "let x = `\\x01${0}`", "let x = `\x01${0}`;\n")
expectPrinted(t, "let x = `${0}\\0`", "let x = `${0}\\0`;\n")
expectPrinted(t, "let x = `${0}\\x01`", "let x = `${0}\x01`;\n")
expectPrinted(t, "let x = `${0}\\0${1}`", "let x = `${0}\\0${1}`;\n")
expectPrinted(t, "let x = `${0}\\x01${1}`", "let x = `${0}\x01${1}`;\n")
expectPrinted(t, "let x = String.raw`\\1`", "let x = String.raw`\\1`;\n")
expectPrinted(t, "let x = String.raw`\\x01`", "let x = String.raw`\\x01`;\n")
expectPrinted(t, "let x = String.raw`\\1${0}`", "let x = String.raw`\\1${0}`;\n")
expectPrinted(t, "let x = String.raw`\\x01${0}`", "let x = String.raw`\\x01${0}`;\n")
expectPrinted(t, "let x = String.raw`${0}\\1`", "let x = String.raw`${0}\\1`;\n")
expectPrinted(t, "let x = String.raw`${0}\\x01`", "let x = String.raw`${0}\\x01`;\n")
expectPrinted(t, "let x = String.raw`${0}\\1${1}`", "let x = String.raw`${0}\\1${1}`;\n")
expectPrinted(t, "let x = String.raw`${0}\\x01${1}`", "let x = String.raw`${0}\\x01${1}`;\n")
expectPrinted(t, "let x = `${y}`", "let x = `${y}`;\n")
expectPrinted(t, "let x = `$(y)`", "let x = `$(y)`;\n")
expectPrinted(t, "let x = `{y}$`", "let x = `{y}$`;\n")
expectPrinted(t, "let x = `$}y{`", "let x = `$}y{`;\n")
expectPrinted(t, "let x = `\\${y}`", "let x = `\\${y}`;\n")
expectPrinted(t, "let x = `$\\{y}`", "let x = `\\${y}`;\n")
expectPrinted(t, "await tag`x`", "await tag`x`;\n")
expectPrinted(t, "await (tag`x`)", "await tag`x`;\n")
expectPrinted(t, "(await tag)`x`", "(await tag)`x`;\n")
expectPrinted(t, "await tag`${x}`", "await tag`${x}`;\n")
expectPrinted(t, "await (tag`${x}`)", "await tag`${x}`;\n")
expectPrinted(t, "(await tag)`${x}`", "(await tag)`${x}`;\n")
expectPrinted(t, "new tag`x`", "new tag`x`();\n")
expectPrinted(t, "new (tag`x`)", "new tag`x`();\n")
expectPrinted(t, "new tag()`x`", "new tag()`x`;\n")
expectPrinted(t, "(new tag)`x`", "new tag()`x`;\n")
expectPrintedMinify(t, "new tag`x`", "new tag`x`;")
expectPrintedMinify(t, "new (tag`x`)", "new tag`x`;")
expectPrintedMinify(t, "new tag()`x`", "new tag()`x`;")
expectPrintedMinify(t, "(new tag)`x`", "new tag()`x`;")
expectPrinted(t, "new tag`${x}`", "new tag`${x}`();\n")
expectPrinted(t, "new (tag`${x}`)", "new tag`${x}`();\n")
expectPrinted(t, "new tag()`${x}`", "new tag()`${x}`;\n")
expectPrinted(t, "(new tag)`${x}`", "new tag()`${x}`;\n")
expectPrintedMinify(t, "new tag`${x}`", "new tag`${x}`;")
expectPrintedMinify(t, "new (tag`${x}`)", "new tag`${x}`;")
expectPrintedMinify(t, "new tag()`${x}`", "new tag()`${x}`;")
expectPrintedMinify(t, "(new tag)`${x}`", "new tag()`${x}`;")
}
func TestObject(t *testing.T) {
expectPrinted(t, "let x = {'(':')'}", "let x = { \"(\": \")\" };\n")
expectPrinted(t, "({})", "({});\n")
expectPrinted(t, "({}.x)", "({}).x;\n")
expectPrinted(t, "({} = {})", "({} = {});\n")
expectPrinted(t, "(x, {} = {})", "x, {} = {};\n")
expectPrinted(t, "let x = () => ({})", "let x = () => ({});\n")
expectPrinted(t, "let x = () => ({}.x)", "let x = () => ({}).x;\n")
expectPrinted(t, "let x = () => ({} = {})", "let x = () => ({} = {});\n")
expectPrinted(t, "let x = () => (x, {} = {})", "let x = () => (x, {} = {});\n")
// "{ __proto__: __proto__ }" must not become "{ __proto__ }"
expectPrinted(t, "function foo(__proto__) { return { __proto__: __proto__ } }", "function foo(__proto__) {\n return { __proto__: __proto__ };\n}\n")
expectPrinted(t, "function foo(__proto__) { return { '__proto__': __proto__ } }", "function foo(__proto__) {\n return { \"__proto__\": __proto__ };\n}\n")
expectPrinted(t, "function foo(__proto__) { return { ['__proto__']: __proto__ } }", "function foo(__proto__) {\n return { [\"__proto__\"]: __proto__ };\n}\n")
expectPrinted(t, "import { __proto__ } from 'foo'; let foo = () => ({ __proto__: __proto__ })", "import { __proto__ } from \"foo\";\nlet foo = () => ({ __proto__: __proto__ });\n")
expectPrinted(t, "import { __proto__ } from 'foo'; let foo = () => ({ '__proto__': __proto__ })", "import { __proto__ } from \"foo\";\nlet foo = () => ({ \"__proto__\": __proto__ });\n")
expectPrinted(t, "import { __proto__ } from 'foo'; let foo = () => ({ ['__proto__']: __proto__ })", "import { __proto__ } from \"foo\";\nlet foo = () => ({ [\"__proto__\"]: __proto__ });\n")
// Don't use ES6+ features (such as a shorthand or computed property name) in ES5
expectPrintedTarget(t, 5, "function foo(__proto__) { return { __proto__ } }", "function foo(__proto__) {\n return { __proto__: __proto__ };\n}\n")
}
func TestSwitch(t *testing.T) {
// Ideally comments on case clauses would be preserved
expectPrinted(t, "switch (x) { /* 1 */ case 1: /* 2 */ case 2: /* default */ default: break }",
"switch (x) {\n /* 1 */\n case 1:\n /* 2 */\n case 2:\n /* default */\n default:\n break;\n}\n")
}
func TestFor(t *testing.T) {
// Make sure "in" expressions are forbidden in the right places
expectPrinted(t, "for ((a in b);;);", "for ((a in b); ; ) ;\n")
expectPrinted(t, "for (a ? b : (c in d);;);", "for (a ? b : (c in d); ; ) ;\n")
expectPrinted(t, "for ((a ? b : c in d).foo;;);", "for ((a ? b : c in d).foo; ; ) ;\n")
expectPrinted(t, "for (var x = (a in b);;);", "for (var x = (a in b); ; ) ;\n")
expectPrinted(t, "for (x = (a in b);;);", "for (x = (a in b); ; ) ;\n")
expectPrinted(t, "for (x == (a in b);;);", "for (x == (a in b); ; ) ;\n")
expectPrinted(t, "for (1 * (x == a in b);;);", "for (1 * (x == a in b); ; ) ;\n")
expectPrinted(t, "for (a ? b : x = (c in d);;);", "for (a ? b : x = (c in d); ; ) ;\n")
expectPrinted(t, "for (var x = y = (a in b);;);", "for (var x = y = (a in b); ; ) ;\n")
expectPrinted(t, "for ([a in b];;);", "for ([a in b]; ; ) ;\n")
expectPrinted(t, "for (x(a in b);;);", "for (x(a in b); ; ) ;\n")
expectPrinted(t, "for (x[a in b];;);", "for (x[a in b]; ; ) ;\n")
expectPrinted(t, "for (x?.[a in b];;);", "for (x?.[a in b]; ; ) ;\n")
expectPrinted(t, "for ((x => a in b);;);", "for (((x) => a in b); ; ) ;\n")
// Make sure for-of loops with commas are wrapped in parentheses
expectPrinted(t, "for (let a in b, c);", "for (let a in b, c) ;\n")
expectPrinted(t, "for (let a of (b, c));", "for (let a of (b, c)) ;\n")
}
func TestFunction(t *testing.T) {
expectPrinted(t,
"function foo(a = (b, c), ...d) {}",
"function foo(a = (b, c), ...d) {\n}\n")
expectPrinted(t,
"function foo({[1 + 2]: a = 3} = {[1 + 2]: 3}) {}",
"function foo({ [1 + 2]: a = 3 } = { [1 + 2]: 3 }) {\n}\n")
expectPrinted(t,
"function foo([a = (1, 2), ...[b, ...c]] = [1, [2, 3]]) {}",
"function foo([a = (1, 2), ...[b, ...c]] = [1, [2, 3]]) {\n}\n")
expectPrinted(t,
"function foo([] = []) {}",
"function foo([] = []) {\n}\n")
expectPrinted(t,
"function foo([,] = [,]) {}",
"function foo([,] = [,]) {\n}\n")
expectPrinted(t,
"function foo([,,] = [,,]) {}",
"function foo([, ,] = [, ,]) {\n}\n")
}
func TestCommentsAndParentheses(t *testing.T) {
expectPrinted(t, "(/* foo */ { x() { foo() } }.x());", "/* foo */\n({ x() {\n foo();\n} }).x();\n")
expectPrinted(t, "(/* foo */ function f() { foo(f) }());", "/* foo */\n(function f() {\n foo(f);\n})();\n")
expectPrinted(t, "(/* foo */ class x { static y() { foo(x) } }.y());", "/* foo */\n(class x {\n static y() {\n foo(x);\n }\n}).y();\n")
expectPrinted(t, "(/* @__PURE__ */ (() => foo())());", "/* @__PURE__ */ (() => foo())();\n")
expectPrinted(t, "export default (/* foo */ function f() {});", "export default (\n /* foo */\n (function f() {\n })\n);\n")
expectPrinted(t, "export default (/* foo */ class x {});", "export default (\n /* foo */\n class x {\n }\n);\n")
expectPrinted(t, "x = () => (/* foo */ {});", "x = () => (\n /* foo */\n {}\n);\n")
expectPrinted(t, "for ((/* foo */ let).x of y) ;", "for (\n /* foo */\n (let).x of y\n) ;\n")
expectPrinted(t, "for (/* foo */ (let).x of y) ;", "for (\n /* foo */\n (let).x of y\n) ;\n")
expectPrinted(t, "function *x() { yield (/* foo */ y) }", "function* x() {\n yield (\n /* foo */\n y\n );\n}\n")
}
func TestPureComment(t *testing.T) {
expectPrinted(t,
"(function() { foo() })",
"(function() {\n foo();\n});\n")
expectPrinted(t,
"(function() { foo() })()",
"(function() {\n foo();\n})();\n")
expectPrinted(t,
"/*@__PURE__*/(function() { foo() })()",
"/* @__PURE__ */ (function() {\n foo();\n})();\n")
expectPrinted(t,
"new (function() {})",
"new (function() {\n})();\n")
expectPrinted(t,
"new (function() {})()",
"new (function() {\n})();\n")
expectPrinted(t,
"/*@__PURE__*/new (function() {})()",
"/* @__PURE__ */ new (function() {\n})();\n")
expectPrinted(t,
"export default (function() { foo() })",
"export default (function() {\n foo();\n});\n")
expectPrinted(t,
"export default (function() { foo() })()",
"export default (function() {\n foo();\n})();\n")
expectPrinted(t,
"export default /*@__PURE__*/(function() { foo() })()",
"export default /* @__PURE__ */ (function() {\n foo();\n})();\n")
}
func TestGenerator(t *testing.T) {
expectPrinted(t,
"function* foo() {}",
"function* foo() {\n}\n")
expectPrinted(t,
"(function* () {})",
"(function* () {\n});\n")
expectPrinted(t,
"(function* foo() {})",
"(function* foo() {\n});\n")
expectPrinted(t,
"class Foo { *foo() {} }",
"class Foo {\n *foo() {\n }\n}\n")
expectPrinted(t,
"class Foo { static *foo() {} }",
"class Foo {\n static *foo() {\n }\n}\n")
expectPrinted(t,
"class Foo { *[foo]() {} }",
"class Foo {\n *[foo]() {\n }\n}\n")
expectPrinted(t,
"class Foo { static *[foo]() {} }",
"class Foo {\n static *[foo]() {\n }\n}\n")
expectPrinted(t,
"(class { *foo() {} })",
"(class {\n *foo() {\n }\n});\n")
expectPrinted(t,
"(class { static *foo() {} })",
"(class {\n static *foo() {\n }\n});\n")
expectPrinted(t,
"(class { *[foo]() {} })",
"(class {\n *[foo]() {\n }\n});\n")
expectPrinted(t,
"(class { static *[foo]() {} })",
"(class {\n static *[foo]() {\n }\n});\n")
}
func TestArrow(t *testing.T) {
expectPrinted(t, "() => {}", "() => {\n};\n")
expectPrinted(t, "x => (x, 0)", "(x) => (x, 0);\n")
expectPrinted(t, "x => {y}", "(x) => {\n y;\n};\n")
expectPrinted(t,
"(a = (b, c), ...d) => {}",
"(a = (b, c), ...d) => {\n};\n")
expectPrinted(t,
"({[1 + 2]: a = 3} = {[1 + 2]: 3}) => {}",
"({ [1 + 2]: a = 3 } = { [1 + 2]: 3 }) => {\n};\n")
expectPrinted(t,
"([a = (1, 2), ...[b, ...c]] = [1, [2, 3]]) => {}",
"([a = (1, 2), ...[b, ...c]] = [1, [2, 3]]) => {\n};\n")
expectPrinted(t,
"([] = []) => {}",
"([] = []) => {\n};\n")
expectPrinted(t,
"([,] = [,]) => {}",
"([,] = [,]) => {\n};\n")
expectPrinted(t,
"([,,] = [,,]) => {}",
"([, ,] = [, ,]) => {\n};\n")
expectPrinted(t,
"a = () => {}",
"a = () => {\n};\n")
expectPrinted(t,
"a || (() => {})",
"a || (() => {\n});\n")
expectPrinted(t,
"({a = b, c = d}) => {}",
"({ a = b, c = d }) => {\n};\n")
expectPrinted(t,
"([{a = b, c = d} = {}] = []) => {}",
"([{ a = b, c = d } = {}] = []) => {\n};\n")
expectPrinted(t,
"({a: [b = c] = []} = {}) => {}",
"({ a: [b = c] = [] } = {}) => {\n};\n")
// These are not arrow functions but initially look like one
expectPrinted(t, "(a = b, c)", "a = b, c;\n")
expectPrinted(t, "([...a = b])", "[...a = b];\n")
expectPrinted(t, "([...a, ...b])", "[...a, ...b];\n")
expectPrinted(t, "({a: b, c() {}})", "({ a: b, c() {\n} });\n")
expectPrinted(t, "({a: b, get c() {}})", "({ a: b, get c() {\n} });\n")
expectPrinted(t, "({a: b, set c(x) {}})", "({ a: b, set c(x) {\n} });\n")
}
func TestClass(t *testing.T) {
expectPrinted(t, "class Foo extends (a, b) {}", "class Foo extends (a, b) {\n}\n")
expectPrinted(t, "class Foo { get foo() {} }", "class Foo {\n get foo() {\n }\n}\n")
expectPrinted(t, "class Foo { set foo(x) {} }", "class Foo {\n set foo(x) {\n }\n}\n")
expectPrinted(t, "class Foo { static foo() {} }", "class Foo {\n static foo() {\n }\n}\n")
expectPrinted(t, "class Foo { static get foo() {} }", "class Foo {\n static get foo() {\n }\n}\n")
expectPrinted(t, "class Foo { static set foo(x) {} }", "class Foo {\n static set foo(x) {\n }\n}\n")
}
func TestAutoAccessors(t *testing.T) {
expectPrinted(t, "class Foo { accessor x; static accessor y }", "class Foo {\n accessor x;\n static accessor y;\n}\n")
expectPrinted(t, "class Foo { accessor [x]; static accessor [y] }", "class Foo {\n accessor [x];\n static accessor [y];\n}\n")
expectPrintedMinify(t, "class Foo { accessor x; static accessor y }", "class Foo{accessor x;static accessor y}")
expectPrintedMinify(t, "class Foo { accessor [x]; static accessor [y] }", "class Foo{accessor[x];static accessor[y]}")
}
func TestPrivateIdentifiers(t *testing.T) {
expectPrinted(t, "class Foo { #foo; foo() { return #foo in this } }", "class Foo {\n #foo;\n foo() {\n return #foo in this;\n }\n}\n")
expectPrintedMinify(t, "class Foo { #foo; foo() { return #foo in this } }", "class Foo{#foo;foo(){return#foo in this}}")
}
func TestDecorators(t *testing.T) {
example := "class Foo {\n@w\nw; @x x; @a1\n@b1@b2\n@c1@c2@c3\ny = @y1 @y2 class {}; @a1\n@b1@b2\n@c1@c2@c3 z =\n@z1\n@z2\nclass {}}"
expectPrinted(t, example, "class Foo {\n @w\n w;\n @x x;\n @a1\n @b1 @b2\n @c1 @c2 @c3\n "+
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/compat/compat.go | internal/compat/compat.go | package compat
import (
"strconv"
"strings"
"github.com/evanw/esbuild/internal/ast"
)
type v struct {
major uint16
minor uint8
patch uint8
}
type Semver struct {
// "1.2.3-alpha" => { Parts: {1, 2, 3}, PreRelease: "-alpha" }
Parts []int
PreRelease string
}
func (v Semver) String() string {
b := strings.Builder{}
for _, part := range v.Parts {
if b.Len() > 0 {
b.WriteRune('.')
}
b.WriteString(strconv.Itoa(part))
}
b.WriteString(v.PreRelease)
return b.String()
}
// Returns <0 if "a < b"
// Returns 0 if "a == b"
// Returns >0 if "a > b"
func compareVersions(a v, b Semver) int {
diff := int(a.major)
if len(b.Parts) > 0 {
diff -= b.Parts[0]
}
if diff == 0 {
diff = int(a.minor)
if len(b.Parts) > 1 {
diff -= b.Parts[1]
}
}
if diff == 0 {
diff = int(a.patch)
if len(b.Parts) > 2 {
diff -= b.Parts[2]
}
}
if diff == 0 && len(b.PreRelease) != 0 {
return 1 // "1.0.0" > "1.0.0-alpha"
}
return diff
}
// The start is inclusive and the end is exclusive
type versionRange struct {
start v
end v // Use 0.0.0 for "no end"
}
func isVersionSupported(ranges []versionRange, version Semver) bool {
for _, r := range ranges {
if compareVersions(r.start, version) <= 0 && (r.end == (v{}) || compareVersions(r.end, version) > 0) {
return true
}
}
return false
}
func SymbolFeature(kind ast.SymbolKind) JSFeature {
switch kind {
case ast.SymbolPrivateField:
return ClassPrivateField
case ast.SymbolPrivateMethod:
return ClassPrivateMethod
case ast.SymbolPrivateGet, ast.SymbolPrivateSet, ast.SymbolPrivateGetSetPair:
return ClassPrivateAccessor
case ast.SymbolPrivateStaticField:
return ClassPrivateStaticField
case ast.SymbolPrivateStaticMethod:
return ClassPrivateStaticMethod
case ast.SymbolPrivateStaticGet, ast.SymbolPrivateStaticSet, ast.SymbolPrivateStaticGetSetPair:
return ClassPrivateStaticAccessor
default:
return 0
}
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/compat/js_table.go | internal/compat/js_table.go | // This file was automatically generated by "js_table.ts"
package compat
type Engine uint8
const (
Chrome Engine = iota
Deno
Edge
ES
Firefox
Hermes
IE
IOS
Node
Opera
Rhino
Safari
)
func (e Engine) String() string {
switch e {
case Chrome:
return "chrome"
case Deno:
return "deno"
case Edge:
return "edge"
case ES:
return "es"
case Firefox:
return "firefox"
case Hermes:
return "hermes"
case IE:
return "ie"
case IOS:
return "ios"
case Node:
return "node"
case Opera:
return "opera"
case Rhino:
return "rhino"
case Safari:
return "safari"
}
return ""
}
func (e Engine) IsBrowser() bool {
switch e {
case Chrome, Edge, Firefox, IE, IOS, Opera, Safari:
return true
}
return false
}
type JSFeature uint64
const (
ArbitraryModuleNamespaceNames JSFeature = 1 << iota
ArraySpread
Arrow
AsyncAwait
AsyncGenerator
Bigint
Class
ClassField
ClassPrivateAccessor
ClassPrivateBrandCheck
ClassPrivateField
ClassPrivateMethod
ClassPrivateStaticAccessor
ClassPrivateStaticField
ClassPrivateStaticMethod
ClassStaticBlocks
ClassStaticField
ConstAndLet
Decorators
DefaultArgument
Destructuring
DynamicImport
ExponentOperator
ExportStarAs
ForAwait
ForOf
FromBase64
FunctionNameConfigurable
FunctionOrClassPropertyAccess
Generator
Hashbang
ImportAssertions
ImportAttributes
ImportDefer
ImportMeta
ImportSource
InlineScript
LogicalAssignment
NestedRestBinding
NewTarget
NodeColonPrefixImport
NodeColonPrefixRequire
NullishCoalescing
ObjectAccessors
ObjectExtensions
ObjectRestSpread
OptionalCatchBinding
OptionalChain
RegexpDotAllFlag
RegexpLookbehindAssertions
RegexpMatchIndices
RegexpNamedCaptureGroups
RegexpSetNotation
RegexpStickyAndUnicodeFlags
RegexpUnicodePropertyEscapes
RestArgument
TemplateLiteral
TopLevelAwait
TypeofExoticObjectIsObject
UnicodeEscapes
Using
)
var StringToJSFeature = map[string]JSFeature{
"arbitrary-module-namespace-names": ArbitraryModuleNamespaceNames,
"array-spread": ArraySpread,
"arrow": Arrow,
"async-await": AsyncAwait,
"async-generator": AsyncGenerator,
"bigint": Bigint,
"class": Class,
"class-field": ClassField,
"class-private-accessor": ClassPrivateAccessor,
"class-private-brand-check": ClassPrivateBrandCheck,
"class-private-field": ClassPrivateField,
"class-private-method": ClassPrivateMethod,
"class-private-static-accessor": ClassPrivateStaticAccessor,
"class-private-static-field": ClassPrivateStaticField,
"class-private-static-method": ClassPrivateStaticMethod,
"class-static-blocks": ClassStaticBlocks,
"class-static-field": ClassStaticField,
"const-and-let": ConstAndLet,
"decorators": Decorators,
"default-argument": DefaultArgument,
"destructuring": Destructuring,
"dynamic-import": DynamicImport,
"exponent-operator": ExponentOperator,
"export-star-as": ExportStarAs,
"for-await": ForAwait,
"for-of": ForOf,
"from-base64": FromBase64,
"function-name-configurable": FunctionNameConfigurable,
"function-or-class-property-access": FunctionOrClassPropertyAccess,
"generator": Generator,
"hashbang": Hashbang,
"import-assertions": ImportAssertions,
"import-attributes": ImportAttributes,
"import-defer": ImportDefer,
"import-meta": ImportMeta,
"import-source": ImportSource,
"inline-script": InlineScript,
"logical-assignment": LogicalAssignment,
"nested-rest-binding": NestedRestBinding,
"new-target": NewTarget,
"node-colon-prefix-import": NodeColonPrefixImport,
"node-colon-prefix-require": NodeColonPrefixRequire,
"nullish-coalescing": NullishCoalescing,
"object-accessors": ObjectAccessors,
"object-extensions": ObjectExtensions,
"object-rest-spread": ObjectRestSpread,
"optional-catch-binding": OptionalCatchBinding,
"optional-chain": OptionalChain,
"regexp-dot-all-flag": RegexpDotAllFlag,
"regexp-lookbehind-assertions": RegexpLookbehindAssertions,
"regexp-match-indices": RegexpMatchIndices,
"regexp-named-capture-groups": RegexpNamedCaptureGroups,
"regexp-set-notation": RegexpSetNotation,
"regexp-sticky-and-unicode-flags": RegexpStickyAndUnicodeFlags,
"regexp-unicode-property-escapes": RegexpUnicodePropertyEscapes,
"rest-argument": RestArgument,
"template-literal": TemplateLiteral,
"top-level-await": TopLevelAwait,
"typeof-exotic-object-is-object": TypeofExoticObjectIsObject,
"unicode-escapes": UnicodeEscapes,
"using": Using,
}
func (features JSFeature) Has(feature JSFeature) bool {
return (features & feature) != 0
}
func (features JSFeature) ApplyOverrides(overrides JSFeature, mask JSFeature) JSFeature {
return (features & ^mask) | (overrides & mask)
}
var jsTable = map[JSFeature]map[Engine][]versionRange{
ArbitraryModuleNamespaceNames: {
Chrome: {{start: v{90, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{87, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Node: {{start: v{16, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
ArraySpread: {
// Note: The latest version of "IE" failed 15 tests including: spread syntax for iterable objects: spreading non-iterables is a runtime error
// Note: The latest version of "Rhino" failed 15 tests including: spread syntax for iterable objects: spreading non-iterables is a runtime error
Chrome: {{start: v{46, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{13, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{36, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{5, 0, 0}}},
Opera: {{start: v{33, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
Arrow: {
// Note: The latest version of "Hermes" failed 3 tests including: arrow functions: lexical "super" binding in constructors
// Note: The latest version of "IE" failed 13 tests including: arrow functions: "this" unchanged by call or apply
// Note: The latest version of "Rhino" failed 3 tests including: arrow functions: lexical "new.target" binding
Chrome: {{start: v{49, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{13, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{45, 0, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{36, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
AsyncAwait: {
// Note: The latest version of "Hermes" failed 4 tests including: async functions: async arrow functions
// Note: The latest version of "IE" failed 16 tests including: async functions: async arrow functions
// Note: The latest version of "Rhino" failed 16 tests including: async functions: async arrow functions
Chrome: {{start: v{55, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{15, 0, 0}}},
ES: {{start: v{2017, 0, 0}}},
Firefox: {{start: v{52, 0, 0}}},
IOS: {{start: v{11, 0, 0}}},
Node: {{start: v{7, 6, 0}}},
Opera: {{start: v{42, 0, 0}}},
Safari: {{start: v{11, 0, 0}}},
},
AsyncGenerator: {
// Note: The latest version of "Hermes" failed this test: Asynchronous Iterators: async generators
// Note: The latest version of "IE" failed this test: Asynchronous Iterators: async generators
// Note: The latest version of "Rhino" failed this test: Asynchronous Iterators: async generators
Chrome: {{start: v{63, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{57, 0, 0}}},
IOS: {{start: v{12, 0, 0}}},
Node: {{start: v{10, 0, 0}}},
Opera: {{start: v{50, 0, 0}}},
Safari: {{start: v{12, 0, 0}}},
},
Bigint: {
// Note: The latest version of "IE" failed this test: BigInt: basic functionality
Chrome: {{start: v{67, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2020, 0, 0}}},
Firefox: {{start: v{68, 0, 0}}},
Hermes: {{start: v{0, 12, 0}}},
IOS: {{start: v{14, 0, 0}}},
Node: {{start: v{10, 4, 0}}},
Opera: {{start: v{54, 0, 0}}},
Rhino: {{start: v{1, 7, 14}}},
Safari: {{start: v{14, 0, 0}}},
},
Class: {
// Note: The latest version of "Hermes" failed 24 tests including: class: accessor properties
// Note: The latest version of "IE" failed 24 tests including: class: accessor properties
// Note: The latest version of "Rhino" failed 24 tests including: class: accessor properties
Chrome: {{start: v{49, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{13, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{45, 0, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{36, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
ClassField: {
// Note: The latest version of "Hermes" failed 2 tests including: instance class fields: computed instance class fields
// Note: The latest version of "IE" failed 2 tests including: instance class fields: computed instance class fields
// Note: The latest version of "Rhino" failed 2 tests including: instance class fields: computed instance class fields
Chrome: {{start: v{73, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{69, 0, 0}}},
IOS: {{start: v{14, 0, 0}}},
Node: {{start: v{12, 0, 0}}},
Opera: {{start: v{60, 0, 0}}},
Safari: {{start: v{14, 0, 0}}},
},
ClassPrivateAccessor: {
// Note: The latest version of "Hermes" failed this test: private class methods: private accessor properties
// Note: The latest version of "IE" failed this test: private class methods: private accessor properties
// Note: The latest version of "Rhino" failed this test: private class methods: private accessor properties
Chrome: {{start: v{84, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{84, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{14, 6, 0}}},
Opera: {{start: v{70, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
ClassPrivateBrandCheck: {
// Note: The latest version of "Hermes" failed this test: Ergonomic brand checks for private fields
// Note: The latest version of "IE" failed this test: Ergonomic brand checks for private fields
// Note: The latest version of "Rhino" failed this test: Ergonomic brand checks for private fields
Chrome: {{start: v{91, 0, 0}}},
Deno: {{start: v{1, 9, 0}}},
Edge: {{start: v{91, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{16, 4, 0}}},
Opera: {{start: v{77, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
ClassPrivateField: {
// Note: The latest version of "Hermes" failed 4 tests including: instance class fields: optional deep private instance class fields access
// Note: The latest version of "IE" failed 4 tests including: instance class fields: optional deep private instance class fields access
// Note: The latest version of "Rhino" failed 4 tests including: instance class fields: optional deep private instance class fields access
Chrome: {{start: v{84, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{84, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Node: {{start: v{14, 6, 0}}},
Opera: {{start: v{70, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
ClassPrivateMethod: {
// Note: The latest version of "Hermes" failed this test: private class methods: private instance methods
// Note: The latest version of "IE" failed this test: private class methods: private instance methods
// Note: The latest version of "Rhino" failed this test: private class methods: private instance methods
Chrome: {{start: v{84, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{84, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{14, 6, 0}}},
Opera: {{start: v{70, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
ClassPrivateStaticAccessor: {
// Note: The latest version of "Hermes" failed this test: private class methods: private static accessor properties
// Note: The latest version of "IE" failed this test: private class methods: private static accessor properties
// Note: The latest version of "Rhino" failed this test: private class methods: private static accessor properties
Chrome: {{start: v{84, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{84, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{14, 6, 0}}},
Opera: {{start: v{70, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
ClassPrivateStaticField: {
// Note: The latest version of "Hermes" failed this test: static class fields: private static class fields
// Note: The latest version of "IE" failed this test: static class fields: private static class fields
// Note: The latest version of "Rhino" failed this test: static class fields: private static class fields
Chrome: {{start: v{74, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Node: {{start: v{12, 0, 0}}},
Opera: {{start: v{62, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
ClassPrivateStaticMethod: {
// Note: The latest version of "Hermes" failed this test: private class methods: private static methods
// Note: The latest version of "IE" failed this test: private class methods: private static methods
// Note: The latest version of "Rhino" failed this test: private class methods: private static methods
Chrome: {{start: v{84, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{84, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{90, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{14, 6, 0}}},
Opera: {{start: v{70, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
ClassStaticBlocks: {
Chrome: {{start: v{91, 0, 0}}},
Deno: {{start: v{1, 14, 0}}},
Edge: {{start: v{94, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{93, 0, 0}}},
IOS: {{start: v{16, 4, 0}}},
Node: {{start: v{16, 11, 0}}},
Opera: {{start: v{80, 0, 0}}},
Safari: {{start: v{16, 4, 0}}},
},
ClassStaticField: {
// Note: The latest version of "Hermes" failed 2 tests including: static class fields: computed static class fields
// Note: The latest version of "IE" failed 2 tests including: static class fields: computed static class fields
// Note: The latest version of "Rhino" failed 2 tests including: static class fields: computed static class fields
Chrome: {{start: v{73, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{75, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Node: {{start: v{12, 0, 0}}},
Opera: {{start: v{60, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
ConstAndLet: {
// Note: The latest version of "Hermes" failed 20 tests including: const: for loop statement scope
// Note: The latest version of "IE" failed 6 tests including: const: for-in loop iteration scope
// Note: The latest version of "Rhino" failed 22 tests including: const: cannot be in statements
Chrome: {{start: v{49, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{14, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{51, 0, 0}}},
IOS: {{start: v{11, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{36, 0, 0}}},
Safari: {{start: v{11, 0, 0}}},
},
Decorators: {},
DefaultArgument: {
// Note: The latest version of "Hermes" failed 2 tests including: default function parameters: separate scope
// Note: The latest version of "IE" failed 7 tests including: default function parameters: arguments object interaction
// Note: The latest version of "Rhino" failed 2 tests including: default function parameters: separate scope
Chrome: {{start: v{49, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{14, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{53, 0, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{36, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
Destructuring: {
// Note: The latest version of "Hermes" failed 3 tests including: destructuring, declarations: defaults, let temporal dead zone
// Note: The latest version of "IE" failed 71 tests including: destructuring, assignment: chained iterable destructuring
// Note: The latest version of "Rhino" failed 28 tests including: destructuring, assignment: computed properties
Chrome: {{start: v{51, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{18, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{53, 0, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 5, 0}}},
Opera: {{start: v{38, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
DynamicImport: {
Chrome: {{start: v{63, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{67, 0, 0}}},
IOS: {{start: v{11, 0, 0}}},
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{13, 2, 0}}},
Opera: {{start: v{50, 0, 0}}},
Safari: {{start: v{11, 1, 0}}},
},
ExponentOperator: {
// Note: The latest version of "IE" failed 3 tests including: exponentiation (**) operator: assignment
Chrome: {{start: v{52, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{14, 0, 0}}},
ES: {{start: v{2016, 0, 0}}},
Firefox: {{start: v{52, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 3, 0}}},
Node: {{start: v{7, 0, 0}}},
Opera: {{start: v{39, 0, 0}}},
Rhino: {{start: v{1, 7, 14}}},
Safari: {{start: v{10, 1, 0}}},
},
ExportStarAs: {
Chrome: {{start: v{72, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2020, 0, 0}}},
Firefox: {{start: v{80, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Node: {{start: v{13, 2, 0}}},
Opera: {{start: v{60, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
ForAwait: {
// Note: The latest version of "Hermes" failed this test: Asynchronous Iterators: for-await-of loops
// Note: The latest version of "IE" failed this test: Asynchronous Iterators: for-await-of loops
// Note: The latest version of "Rhino" failed this test: Asynchronous Iterators: for-await-of loops
Chrome: {{start: v{63, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{57, 0, 0}}},
IOS: {{start: v{12, 0, 0}}},
Node: {{start: v{10, 0, 0}}},
Opera: {{start: v{50, 0, 0}}},
Safari: {{start: v{12, 0, 0}}},
},
ForOf: {
// Note: The latest version of "IE" failed 9 tests including: for..of loops: iterator closing, break
// Note: The latest version of "Rhino" failed 2 tests including: for..of loops: iterator closing, break
Chrome: {{start: v{51, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{15, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{53, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 5, 0}}},
Opera: {{start: v{38, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
FromBase64: {
Chrome: {{start: v{140, 0, 0}}},
Deno: {{start: v{2, 5, 0}}},
Edge: {{start: v{140, 0, 0}}},
Firefox: {{start: v{133, 0, 0}}},
IOS: {{start: v{18, 2, 0}}},
Node: {{start: v{25, 0, 0}}},
Opera: {{start: v{124, 0, 0}}},
Safari: {{start: v{18, 2, 0}}},
},
FunctionNameConfigurable: {
// Note: The latest version of "IE" failed this test: function "name" property: isn't writable, is configurable
Chrome: {{start: v{43, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{12, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{38, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{4, 0, 0}}},
Opera: {{start: v{30, 0, 0}}},
Rhino: {{start: v{1, 7, 15}}},
Safari: {{start: v{10, 0, 0}}},
},
FunctionOrClassPropertyAccess: {
Chrome: {{start: v{0, 0, 0}}},
Deno: {{start: v{0, 0, 0}}},
Edge: {{start: v{0, 0, 0}}},
ES: {{start: v{0, 0, 0}}},
Firefox: {{start: v{0, 0, 0}}},
Hermes: {{start: v{0, 0, 0}}},
IE: {{start: v{0, 0, 0}}},
IOS: {{start: v{0, 0, 0}}},
Node: {{start: v{0, 0, 0}}},
Opera: {{start: v{0, 0, 0}}},
Rhino: {{start: v{0, 0, 0}}},
Safari: {{start: v{16, 3, 0}}},
},
Generator: {
// Note: The latest version of "Hermes" failed 3 tests including: generators: computed shorthand generators, classes
// Note: The latest version of "IE" failed 27 tests including: generators: %GeneratorPrototype%
// Note: The latest version of "Rhino" failed 8 tests including: generators: %GeneratorPrototype%
Chrome: {{start: v{50, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{13, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{53, 0, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{37, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
Hashbang: {
// Note: The latest version of "IE" failed this test: Hashbang Grammar
Chrome: {{start: v{74, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2023, 0, 0}}},
Firefox: {{start: v{67, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{13, 4, 0}}},
Node: {{start: v{12, 5, 0}}},
Opera: {{start: v{62, 0, 0}}},
Rhino: {{start: v{1, 7, 15}}},
Safari: {{start: v{13, 1, 0}}},
},
ImportAssertions: {
Chrome: {{start: v{91, 0, 0}}},
Deno: {{start: v{1, 17, 0}}},
Edge: {{start: v{91, 0, 0}}},
Node: {{start: v{16, 14, 0}, end: v{22, 0, 0}}},
},
ImportAttributes: {
Chrome: {{start: v{123, 0, 0}}},
Deno: {{start: v{1, 37, 0}}},
Edge: {{start: v{123, 0, 0}}},
Firefox: {{start: v{138, 0, 0}}},
IOS: {{start: v{17, 2, 0}}},
Node: {{start: v{18, 20, 0}, end: v{19, 0, 0}}, {start: v{20, 10, 0}}},
Opera: {{start: v{109, 0, 0}}},
Safari: {{start: v{17, 2, 0}}},
},
ImportDefer: {},
ImportMeta: {
Chrome: {{start: v{64, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2020, 0, 0}}},
Firefox: {{start: v{62, 0, 0}}},
IOS: {{start: v{12, 0, 0}}},
Node: {{start: v{10, 4, 0}}},
Opera: {{start: v{51, 0, 0}}},
Safari: {{start: v{11, 1, 0}}},
},
ImportSource: {},
InlineScript: {},
LogicalAssignment: {
// Note: The latest version of "IE" failed 9 tests including: Logical Assignment: &&= basic support
// Note: The latest version of "Rhino" failed 3 tests including: Logical Assignment: &&= setter not unecessarily invoked
Chrome: {{start: v{85, 0, 0}}},
Deno: {{start: v{1, 2, 0}}},
Edge: {{start: v{85, 0, 0}}},
ES: {{start: v{2021, 0, 0}}},
Firefox: {{start: v{79, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{14, 0, 0}}},
Node: {{start: v{15, 0, 0}}},
Opera: {{start: v{71, 0, 0}}},
Safari: {{start: v{14, 0, 0}}},
},
NestedRestBinding: {
// Note: The latest version of "IE" failed 2 tests including: nested rest destructuring, declarations
// Note: The latest version of "Rhino" failed 2 tests including: nested rest destructuring, declarations
Chrome: {{start: v{49, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{14, 0, 0}}},
ES: {{start: v{2016, 0, 0}}},
Firefox: {{start: v{47, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 3, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{36, 0, 0}}},
Safari: {{start: v{10, 1, 0}}},
},
NewTarget: {
// Note: The latest version of "IE" failed 2 tests including: new.target: assignment is an early error
// Note: The latest version of "Rhino" failed 2 tests including: new.target: assignment is an early error
Chrome: {{start: v{46, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{14, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{41, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{5, 0, 0}}},
Opera: {{start: v{33, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
NodeColonPrefixImport: {
ES: {{start: v{0, 0, 0}}},
Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}},
},
NodeColonPrefixRequire: {
ES: {{start: v{0, 0, 0}}},
Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}},
},
NullishCoalescing: {
// Note: The latest version of "IE" failed this test: nullish coalescing operator (??)
Chrome: {{start: v{80, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{80, 0, 0}}},
ES: {{start: v{2020, 0, 0}}},
Firefox: {{start: v{72, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{13, 4, 0}}},
Node: {{start: v{14, 0, 0}}},
Opera: {{start: v{67, 0, 0}}},
Rhino: {{start: v{1, 8, 0}}},
Safari: {{start: v{13, 1, 0}}},
},
ObjectAccessors: {
Chrome: {{start: v{5, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{12, 0, 0}}},
ES: {{start: v{5, 0, 0}}},
Firefox: {{start: v{2, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IE: {{start: v{9, 0, 0}}},
IOS: {{start: v{6, 0, 0}}},
Node: {{start: v{0, 4, 0}}},
Opera: {{start: v{10, 10, 0}}},
Rhino: {{start: v{1, 7, 13}}},
Safari: {{start: v{3, 1, 0}}},
},
ObjectExtensions: {
// Note: The latest version of "IE" failed 6 tests including: object literal extensions: computed accessors
Chrome: {{start: v{44, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{12, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{34, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{10, 0, 0}}},
Node: {{start: v{4, 0, 0}}},
Opera: {{start: v{31, 0, 0}}},
Rhino: {{start: v{1, 8, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
ObjectRestSpread: {
// Note: The latest version of "IE" failed 2 tests including: object rest/spread properties: object rest properties
// Note: The latest version of "Rhino" failed 2 tests including: object rest/spread properties: object rest properties
Chrome: {{start: v{60, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{55, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{11, 3, 0}}},
Node: {{start: v{8, 3, 0}}},
Opera: {{start: v{47, 0, 0}}},
Safari: {{start: v{11, 1, 0}}},
},
OptionalCatchBinding: {
// Note: The latest version of "IE" failed 3 tests including: optional catch binding: await
// Note: The latest version of "Rhino" failed this test: optional catch binding: await
Chrome: {{start: v{66, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2019, 0, 0}}},
Firefox: {{start: v{58, 0, 0}}},
Hermes: {{start: v{0, 12, 0}}},
IOS: {{start: v{11, 3, 0}}},
Node: {{start: v{10, 0, 0}}},
Opera: {{start: v{53, 0, 0}}},
Safari: {{start: v{11, 1, 0}}},
},
OptionalChain: {
// Note: The latest version of "IE" failed 5 tests including: optional chaining operator (?.): optional bracket access
// Note: The latest version of "Rhino" failed this test: optional chaining operator (?.): spread parameters after optional chaining
Chrome: {{start: v{91, 0, 0}}},
Deno: {{start: v{1, 9, 0}}},
Edge: {{start: v{91, 0, 0}}},
ES: {{start: v{2020, 0, 0}}},
Firefox: {{start: v{74, 0, 0}}},
Hermes: {{start: v{0, 12, 0}}},
IOS: {{start: v{13, 4, 0}}},
Node: {{start: v{16, 1, 0}}},
Opera: {{start: v{77, 0, 0}}},
Safari: {{start: v{13, 1, 0}}},
},
RegexpDotAllFlag: {
// Note: The latest version of "IE" failed this test: s (dotAll) flag for regular expressions
Chrome: {{start: v{62, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{78, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{11, 3, 0}}},
Node: {{start: v{8, 10, 0}}},
Opera: {{start: v{49, 0, 0}}},
Rhino: {{start: v{1, 7, 15}}},
Safari: {{start: v{11, 1, 0}}},
},
RegexpLookbehindAssertions: {
// Note: The latest version of "IE" failed this test: RegExp Lookbehind Assertions
// Note: The latest version of "Rhino" failed this test: RegExp Lookbehind Assertions
Chrome: {{start: v{62, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{78, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{16, 4, 0}}},
Node: {{start: v{8, 10, 0}}},
Opera: {{start: v{49, 0, 0}}},
Safari: {{start: v{16, 4, 0}}},
},
RegexpMatchIndices: {
Chrome: {{start: v{90, 0, 0}}},
Deno: {{start: v{1, 8, 0}}},
Edge: {{start: v{90, 0, 0}}},
ES: {{start: v{2022, 0, 0}}},
Firefox: {{start: v{88, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Node: {{start: v{16, 0, 0}}},
Opera: {{start: v{76, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
RegexpNamedCaptureGroups: {
// Note: The latest version of "Hermes" failed this test: RegExp named capture groups
// Note: The latest version of "IE" failed this test: RegExp named capture groups
// Note: The latest version of "Rhino" failed this test: RegExp named capture groups
Chrome: {{start: v{64, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
ES: {{start: v{2018, 0, 0}}},
Firefox: {{start: v{78, 0, 0}}},
IOS: {{start: v{11, 3, 0}}},
Node: {{start: v{10, 0, 0}}},
Opera: {{start: v{51, 0, 0}}},
Safari: {{start: v{11, 1, 0}}},
},
RegexpSetNotation: {
ES: {{start: v{2024, 0, 0}}},
},
RegexpStickyAndUnicodeFlags: {
// Note: The latest version of "IE" failed 6 tests including: RegExp "y" and "u" flags: "u" flag
// Note: The latest version of "Rhino" failed 4 tests including: RegExp "y" and "u" flags: "u" flag
Chrome: {{start: v{50, 0, 0}}},
Deno: {{start: v{1, 0, 0}}},
Edge: {{start: v{13, 0, 0}}},
ES: {{start: v{2015, 0, 0}}},
Firefox: {{start: v{46, 0, 0}}},
Hermes: {{start: v{0, 7, 0}}},
IOS: {{start: v{12, 0, 0}}},
Node: {{start: v{6, 0, 0}}},
Opera: {{start: v{37, 0, 0}}},
Safari: {{start: v{12, 0, 0}}},
},
RegexpUnicodePropertyEscapes: {
// Note: The latest version of "Chrome" failed 2 tests including: RegExp Unicode Property Escapes: Unicode 16.0
// Note: The latest version of "Edge" failed 2 tests including: RegExp Unicode Property Escapes: Unicode 16.0
// Note: The latest version of "Firefox" failed this test: RegExp Unicode Property Escapes: Unicode 17.0
// Note: The latest version of "Hermes" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11
// Note: The latest version of "IE" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11
// Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 17.0
// Note: The latest version of "Node" failed this test: RegExp Unicode Property Escapes: Unicode 17.0
// Note: The latest version of "Rhino" failed 9 tests including: RegExp Unicode Property Escapes: Unicode 11
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/compat/compat_test.go | internal/compat/compat_test.go | package compat
import (
"fmt"
"testing"
"github.com/evanw/esbuild/internal/test"
)
func TestCompareVersions(t *testing.T) {
t.Helper()
check := func(a v, b Semver, expected rune) {
t.Helper()
at := fmt.Sprintf("%d.%d.%d", a.major, a.minor, a.patch)
bt := b.String()
t.Run(fmt.Sprintf("%q ? %q", at, bt), func(t *testing.T) {
observed := '='
if result := compareVersions(a, b); result < 0 {
observed = '<'
} else if result > 0 {
observed = '>'
}
if observed != expected {
test.AssertEqual(t, fmt.Sprintf("%c", observed), fmt.Sprintf("%c", expected))
}
})
}
check(v{0, 0, 0}, Semver{}, '=')
check(v{1, 0, 0}, Semver{}, '>')
check(v{0, 1, 0}, Semver{}, '>')
check(v{0, 0, 1}, Semver{}, '>')
check(v{0, 0, 0}, Semver{Parts: []int{1}}, '<')
check(v{0, 0, 0}, Semver{Parts: []int{0, 1}}, '<')
check(v{0, 0, 0}, Semver{Parts: []int{0, 0, 1}}, '<')
check(v{0, 4, 0}, Semver{Parts: []int{0, 5, 0}}, '<')
check(v{0, 5, 0}, Semver{Parts: []int{0, 5, 0}}, '=')
check(v{0, 6, 0}, Semver{Parts: []int{0, 5, 0}}, '>')
check(v{0, 5, 0}, Semver{Parts: []int{0, 5, 1}}, '<')
check(v{0, 5, 0}, Semver{Parts: []int{0, 5, 0}}, '=')
check(v{0, 5, 1}, Semver{Parts: []int{0, 5, 0}}, '>')
check(v{0, 5, 0}, Semver{Parts: []int{0, 5}}, '=')
check(v{0, 5, 1}, Semver{Parts: []int{0, 5}}, '>')
check(v{1, 0, 0}, Semver{Parts: []int{1}}, '=')
check(v{1, 1, 0}, Semver{Parts: []int{1}}, '>')
check(v{1, 0, 1}, Semver{Parts: []int{1}}, '>')
check(v{1, 2, 0}, Semver{Parts: []int{1, 2}, PreRelease: "-pre"}, '>')
check(v{1, 2, 1}, Semver{Parts: []int{1, 2}, PreRelease: "-pre"}, '>')
check(v{1, 1, 0}, Semver{Parts: []int{1, 2}, PreRelease: "-pre"}, '<')
check(v{1, 2, 3}, Semver{Parts: []int{1, 2, 3}, PreRelease: "-pre"}, '>')
check(v{1, 2, 2}, Semver{Parts: []int{1, 2, 3}, PreRelease: "-pre"}, '<')
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/compat/css_table.go | internal/compat/css_table.go | // This file was automatically generated by "css_table.ts"
package compat
import (
"github.com/evanw/esbuild/internal/css_ast"
)
type CSSFeature uint16
const (
ColorFunctions CSSFeature = 1 << iota
GradientDoublePosition
GradientInterpolation
GradientMidpoints
HWB
HexRGBA
InlineStyle
InsetProperty
IsPseudoClass
MediaRange
Modern_RGB_HSL
Nesting
RebeccaPurple
)
var StringToCSSFeature = map[string]CSSFeature{
"color-functions": ColorFunctions,
"gradient-double-position": GradientDoublePosition,
"gradient-interpolation": GradientInterpolation,
"gradient-midpoints": GradientMidpoints,
"hwb": HWB,
"hex-rgba": HexRGBA,
"inline-style": InlineStyle,
"inset-property": InsetProperty,
"is-pseudo-class": IsPseudoClass,
"media-range": MediaRange,
"modern-rgb-hsl": Modern_RGB_HSL,
"nesting": Nesting,
"rebecca-purple": RebeccaPurple,
}
func (features CSSFeature) Has(feature CSSFeature) bool {
return (features & feature) != 0
}
func (features CSSFeature) ApplyOverrides(overrides CSSFeature, mask CSSFeature) CSSFeature {
return (features & ^mask) | (overrides & mask)
}
var cssTable = map[CSSFeature]map[Engine][]versionRange{
ColorFunctions: {
Chrome: {{start: v{111, 0, 0}}},
Edge: {{start: v{111, 0, 0}}},
Firefox: {{start: v{113, 0, 0}}},
IOS: {{start: v{15, 4, 0}}},
Opera: {{start: v{97, 0, 0}}},
Safari: {{start: v{15, 4, 0}}},
},
GradientDoublePosition: {
Chrome: {{start: v{72, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
Firefox: {{start: v{83, 0, 0}}},
IOS: {{start: v{12, 2, 0}}},
Opera: {{start: v{60, 0, 0}}},
Safari: {{start: v{12, 1, 0}}},
},
GradientInterpolation: {
Chrome: {{start: v{111, 0, 0}}},
Edge: {{start: v{111, 0, 0}}},
IOS: {{start: v{16, 2, 0}}},
Opera: {{start: v{97, 0, 0}}},
Safari: {{start: v{16, 2, 0}}},
},
GradientMidpoints: {
Chrome: {{start: v{40, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
Firefox: {{start: v{36, 0, 0}}},
IOS: {{start: v{7, 0, 0}}},
Opera: {{start: v{27, 0, 0}}},
Safari: {{start: v{7, 0, 0}}},
},
HWB: {
Chrome: {{start: v{101, 0, 0}}},
Edge: {{start: v{101, 0, 0}}},
Firefox: {{start: v{96, 0, 0}}},
IOS: {{start: v{15, 0, 0}}},
Opera: {{start: v{87, 0, 0}}},
Safari: {{start: v{15, 0, 0}}},
},
HexRGBA: {
Chrome: {{start: v{62, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
Firefox: {{start: v{49, 0, 0}}},
IOS: {{start: v{9, 3, 0}}},
Opera: {{start: v{49, 0, 0}}},
Safari: {{start: v{10, 0, 0}}},
},
InlineStyle: {},
InsetProperty: {
Chrome: {{start: v{87, 0, 0}}},
Edge: {{start: v{87, 0, 0}}},
Firefox: {{start: v{66, 0, 0}}},
IOS: {{start: v{14, 5, 0}}},
Opera: {{start: v{73, 0, 0}}},
Safari: {{start: v{14, 1, 0}}},
},
IsPseudoClass: {
Chrome: {{start: v{88, 0, 0}}},
Edge: {{start: v{88, 0, 0}}},
Firefox: {{start: v{78, 0, 0}}},
IOS: {{start: v{14, 0, 0}}},
Opera: {{start: v{75, 0, 0}}},
Safari: {{start: v{14, 0, 0}}},
},
MediaRange: {
Chrome: {{start: v{104, 0, 0}}},
Edge: {{start: v{104, 0, 0}}},
Firefox: {{start: v{63, 0, 0}}},
IOS: {{start: v{16, 4, 0}}},
Opera: {{start: v{91, 0, 0}}},
Safari: {{start: v{16, 4, 0}}},
},
Modern_RGB_HSL: {
Chrome: {{start: v{66, 0, 0}}},
Edge: {{start: v{79, 0, 0}}},
Firefox: {{start: v{52, 0, 0}}},
IOS: {{start: v{12, 2, 0}}},
Opera: {{start: v{53, 0, 0}}},
Safari: {{start: v{12, 1, 0}}},
},
Nesting: {
Chrome: {{start: v{120, 0, 0}}},
Edge: {{start: v{120, 0, 0}}},
Firefox: {{start: v{117, 0, 0}}},
IOS: {{start: v{17, 2, 0}}},
Opera: {{start: v{106, 0, 0}}},
Safari: {{start: v{17, 2, 0}}},
},
RebeccaPurple: {
Chrome: {{start: v{38, 0, 0}}},
Edge: {{start: v{12, 0, 0}}},
Firefox: {{start: v{33, 0, 0}}},
IE: {{start: v{11, 0, 0}}},
IOS: {{start: v{8, 0, 0}}},
Opera: {{start: v{25, 0, 0}}},
Safari: {{start: v{9, 0, 0}}},
},
}
// Return all features that are not available in at least one environment
func UnsupportedCSSFeatures(constraints map[Engine]Semver) (unsupported CSSFeature) {
for feature, engines := range cssTable {
if feature == InlineStyle {
continue // This is purely user-specified
}
for engine, version := range constraints {
if !engine.IsBrowser() {
// Specifying "--target=es2020" shouldn't affect CSS
continue
}
if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) {
unsupported |= feature
}
}
}
return
}
type CSSPrefix uint8
const (
KhtmlPrefix CSSPrefix = 1 << iota
MozPrefix
MsPrefix
OPrefix
WebkitPrefix
NoPrefix CSSPrefix = 0
)
type prefixData struct {
// Note: In some cases, earlier versions did not require a prefix but later
// ones do. This is the case for Microsoft Edge for example, which switched
// the underlying browser engine from a custom one to the one from Chrome.
// However, we assume that users specifying a browser version for CSS mean
// "works in this version or newer", so we still add a prefix when a target
// is an old Edge version.
engine Engine
withoutPrefix v
prefix CSSPrefix
}
var cssPrefixTable = map[css_ast.D][]prefixData{
css_ast.DAppearance: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{84, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{84, 0, 0}},
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{80, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{73, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DBackdropFilter: {
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{18, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{18, 0, 0}},
},
css_ast.DBackgroundClip: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: MsPrefix, withoutPrefix: v{15, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{5, 0, 0}},
},
css_ast.DBoxDecorationBreak: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{116, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DClipPath: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{55, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{42, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{13, 1, 0}},
},
css_ast.DFontKerning: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{33, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{20, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{9, 1, 0}},
},
css_ast.DHeight: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Firefox, prefix: WebkitPrefix},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DHyphens: {
{engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}},
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{43, 0, 0}},
{engine: IE, prefix: MsPrefix},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{17, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{17, 0, 0}},
},
css_ast.DInitialLetter: {
{engine: IOS, prefix: WebkitPrefix},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DMask: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskComposite: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskImage: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskOrigin: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskPosition: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskRepeat: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaskSize: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DMaxHeight: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DMaxWidth: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DMinHeight: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DMinWidth: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DPosition: {
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{13, 0, 0}},
},
css_ast.DPrintColorAdjust: {
{engine: Chrome, prefix: WebkitPrefix},
{engine: Edge, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}},
},
css_ast.DTabSize: {
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{91, 0, 0}},
{engine: Opera, prefix: OPrefix, withoutPrefix: v{15, 0, 0}},
},
css_ast.DTextDecorationColor: {
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{36, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}},
},
css_ast.DTextDecorationLine: {
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{36, 0, 0}},
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}},
},
css_ast.DTextDecorationSkip: {
{engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{12, 2, 0}},
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{12, 1, 0}},
},
css_ast.DTextEmphasisColor: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}},
},
css_ast.DTextEmphasisPosition: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}},
},
css_ast.DTextEmphasisStyle: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{99, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{85, 0, 0}},
},
css_ast.DTextOrientation: {
{engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}},
},
css_ast.DTextSizeAdjust: {
{engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
},
css_ast.DUserSelect: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{54, 0, 0}},
{engine: Edge, prefix: MsPrefix, withoutPrefix: v{79, 0, 0}},
{engine: Firefox, prefix: MozPrefix, withoutPrefix: v{69, 0, 0}},
{engine: IE, prefix: MsPrefix},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{41, 0, 0}},
{engine: Safari, prefix: KhtmlPrefix, withoutPrefix: v{3, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
css_ast.DWidth: {
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{138, 0, 0}},
{engine: Firefox, prefix: WebkitPrefix},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{122, 0, 0}},
{engine: Safari, prefix: WebkitPrefix},
},
}
func CSSPrefixData(constraints map[Engine]Semver) (entries map[css_ast.D]CSSPrefix) {
for property, items := range cssPrefixTable {
prefixes := NoPrefix
for engine, version := range constraints {
if !engine.IsBrowser() {
// Specifying "--target=es2020" shouldn't affect CSS
continue
}
for _, item := range items {
if item.engine == engine && (item.withoutPrefix == v{} || compareVersions(item.withoutPrefix, version) > 0) {
prefixes |= item.prefix
}
}
}
if prefixes != NoPrefix {
if entries == nil {
entries = make(map[css_ast.D]CSSPrefix)
}
entries[property] = prefixes
}
}
return
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger_linux.go | internal/logger/logger_linux.go | //go:build linux
// +build linux
package logger
import (
"os"
"golang.org/x/sys/unix"
)
const SupportsColorEscapes = true
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
fd := file.Fd()
// Is this file descriptor a terminal?
if _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS); err == nil {
info.IsTTY = true
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
// Get the width of the window
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
info.Width = int(w.Col)
info.Height = int(w.Row)
}
}
return
}
func writeStringWithColor(file *os.File, text string) {
file.WriteString(text)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/msg_ids.go | internal/logger/msg_ids.go | package logger
// Most non-error log messages are given a message ID that can be used to set
// the log level for that message. Errors do not get a message ID because you
// cannot turn errors into non-errors (otherwise the build would incorrectly
// succeed). Some internal log messages do not get a message ID because they
// are part of verbose and/or internal debugging output. These messages use
// "MsgID_None" instead.
type MsgID = uint8
const (
MsgID_None MsgID = iota
// JavaScript
MsgID_JS_AssertToWith
MsgID_JS_AssertTypeJSON
MsgID_JS_AssignToConstant
MsgID_JS_AssignToDefine
MsgID_JS_AssignToImport
MsgID_JS_BigInt
MsgID_JS_CallImportNamespace
MsgID_JS_ClassNameWillThrow
MsgID_JS_CommonJSVariableInESM
MsgID_JS_DeleteSuperProperty
MsgID_JS_DirectEval
MsgID_JS_DuplicateCase
MsgID_JS_DuplicateClassMember
MsgID_JS_DuplicateObjectKey
MsgID_JS_EmptyImportMeta
MsgID_JS_EqualsNaN
MsgID_JS_EqualsNegativeZero
MsgID_JS_EqualsNewObject
MsgID_JS_HTMLCommentInJS
MsgID_JS_ImpossibleTypeof
MsgID_JS_IndirectRequire
MsgID_JS_PrivateNameWillThrow
MsgID_JS_SemicolonAfterReturn
MsgID_JS_SuspiciousBooleanNot
MsgID_JS_SuspiciousDefine
MsgID_JS_SuspiciousLogicalOperator
MsgID_JS_SuspiciousNullishCoalescing
MsgID_JS_ThisIsUndefinedInESM
MsgID_JS_UnsupportedDynamicImport
MsgID_JS_UnsupportedJSXComment
MsgID_JS_UnsupportedRegExp
MsgID_JS_UnsupportedRequireCall
// CSS
MsgID_CSS_CSSSyntaxError
MsgID_CSS_InvalidAtCharset
MsgID_CSS_InvalidAtImport
MsgID_CSS_InvalidAtLayer
MsgID_CSS_InvalidCalc
MsgID_CSS_JSCommentInCSS
MsgID_CSS_UndefinedComposesFrom
MsgID_CSS_UnsupportedAtCharset
MsgID_CSS_UnsupportedAtNamespace
MsgID_CSS_UnsupportedCSSProperty
MsgID_CSS_UnsupportedCSSNesting
// Bundler
MsgID_Bundler_AmbiguousReexport
MsgID_Bundler_DifferentPathCase
MsgID_Bundler_EmptyGlob
MsgID_Bundler_IgnoredBareImport
MsgID_Bundler_IgnoredDynamicImport
MsgID_Bundler_ImportIsUndefined
MsgID_Bundler_RequireResolveNotExternal
// Source maps
MsgID_SourceMap_InvalidSourceMappings
MsgID_SourceMap_MissingSourceMap
MsgID_SourceMap_UnsupportedSourceMapComment
// package.json
MsgID_PackageJSON_FIRST // Keep this first
MsgID_PackageJSON_DeadCondition
MsgID_PackageJSON_InvalidBrowser
MsgID_PackageJSON_InvalidImportsOrExports
MsgID_PackageJSON_InvalidSideEffects
MsgID_PackageJSON_InvalidType
MsgID_PackageJSON_LAST // Keep this last
// tsconfig.json
MsgID_TSConfigJSON_FIRST // Keep this first
MsgID_TSConfigJSON_Cycle
MsgID_TSConfigJSON_InvalidImportsNotUsedAsValues
MsgID_TSConfigJSON_InvalidJSX
MsgID_TSConfigJSON_InvalidPaths
MsgID_TSConfigJSON_InvalidTarget
MsgID_TSConfigJSON_InvalidTopLevelOption
MsgID_TSConfigJSON_Missing
MsgID_TSConfigJSON_LAST // Keep this last
MsgID_END // Keep this at the end (used only for tests)
)
func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel) {
switch str {
// JS
case "assert-to-with":
overrides[MsgID_JS_AssertToWith] = logLevel
case "assert-type-json":
overrides[MsgID_JS_AssertTypeJSON] = logLevel
case "assign-to-constant":
overrides[MsgID_JS_AssignToConstant] = logLevel
case "assign-to-define":
overrides[MsgID_JS_AssignToDefine] = logLevel
case "assign-to-import":
overrides[MsgID_JS_AssignToImport] = logLevel
case "bigint":
overrides[MsgID_JS_BigInt] = logLevel
case "call-import-namespace":
overrides[MsgID_JS_CallImportNamespace] = logLevel
case "class-name-will-throw":
overrides[MsgID_JS_ClassNameWillThrow] = logLevel
case "commonjs-variable-in-esm":
overrides[MsgID_JS_CommonJSVariableInESM] = logLevel
case "delete-super-property":
overrides[MsgID_JS_DeleteSuperProperty] = logLevel
case "direct-eval":
overrides[MsgID_JS_DirectEval] = logLevel
case "duplicate-case":
overrides[MsgID_JS_DuplicateCase] = logLevel
case "duplicate-class-member":
overrides[MsgID_JS_DuplicateClassMember] = logLevel
case "duplicate-object-key":
overrides[MsgID_JS_DuplicateObjectKey] = logLevel
case "empty-import-meta":
overrides[MsgID_JS_EmptyImportMeta] = logLevel
case "equals-nan":
overrides[MsgID_JS_EqualsNaN] = logLevel
case "equals-negative-zero":
overrides[MsgID_JS_EqualsNegativeZero] = logLevel
case "equals-new-object":
overrides[MsgID_JS_EqualsNewObject] = logLevel
case "html-comment-in-js":
overrides[MsgID_JS_HTMLCommentInJS] = logLevel
case "impossible-typeof":
overrides[MsgID_JS_ImpossibleTypeof] = logLevel
case "indirect-require":
overrides[MsgID_JS_IndirectRequire] = logLevel
case "private-name-will-throw":
overrides[MsgID_JS_PrivateNameWillThrow] = logLevel
case "semicolon-after-return":
overrides[MsgID_JS_SemicolonAfterReturn] = logLevel
case "suspicious-boolean-not":
overrides[MsgID_JS_SuspiciousBooleanNot] = logLevel
case "suspicious-define":
overrides[MsgID_JS_SuspiciousDefine] = logLevel
case "suspicious-logical-operator":
overrides[MsgID_JS_SuspiciousLogicalOperator] = logLevel
case "suspicious-nullish-coalescing":
overrides[MsgID_JS_SuspiciousNullishCoalescing] = logLevel
case "this-is-undefined-in-esm":
overrides[MsgID_JS_ThisIsUndefinedInESM] = logLevel
case "unsupported-dynamic-import":
overrides[MsgID_JS_UnsupportedDynamicImport] = logLevel
case "unsupported-jsx-comment":
overrides[MsgID_JS_UnsupportedJSXComment] = logLevel
case "unsupported-regexp":
overrides[MsgID_JS_UnsupportedRegExp] = logLevel
case "unsupported-require-call":
overrides[MsgID_JS_UnsupportedRequireCall] = logLevel
// CSS
case "css-syntax-error":
overrides[MsgID_CSS_CSSSyntaxError] = logLevel
case "invalid-@charset":
overrides[MsgID_CSS_InvalidAtCharset] = logLevel
case "invalid-@import":
overrides[MsgID_CSS_InvalidAtImport] = logLevel
case "invalid-@layer":
overrides[MsgID_CSS_InvalidAtLayer] = logLevel
case "invalid-calc":
overrides[MsgID_CSS_InvalidCalc] = logLevel
case "js-comment-in-css":
overrides[MsgID_CSS_JSCommentInCSS] = logLevel
case "undefined-composes-from":
overrides[MsgID_CSS_UndefinedComposesFrom] = logLevel
case "unsupported-@charset":
overrides[MsgID_CSS_UnsupportedAtCharset] = logLevel
case "unsupported-@namespace":
overrides[MsgID_CSS_UnsupportedAtNamespace] = logLevel
case "unsupported-css-property":
overrides[MsgID_CSS_UnsupportedCSSProperty] = logLevel
case "unsupported-css-nesting":
overrides[MsgID_CSS_UnsupportedCSSNesting] = logLevel
// Bundler
case "ambiguous-reexport":
overrides[MsgID_Bundler_AmbiguousReexport] = logLevel
case "different-path-case":
overrides[MsgID_Bundler_DifferentPathCase] = logLevel
case "empty-glob":
overrides[MsgID_Bundler_EmptyGlob] = logLevel
case "ignored-bare-import":
overrides[MsgID_Bundler_IgnoredBareImport] = logLevel
case "ignored-dynamic-import":
overrides[MsgID_Bundler_IgnoredDynamicImport] = logLevel
case "import-is-undefined":
overrides[MsgID_Bundler_ImportIsUndefined] = logLevel
case "require-resolve-not-external":
overrides[MsgID_Bundler_RequireResolveNotExternal] = logLevel
// Source maps
case "invalid-source-mappings":
overrides[MsgID_SourceMap_InvalidSourceMappings] = logLevel
case "missing-source-map":
overrides[MsgID_SourceMap_MissingSourceMap] = logLevel
case "unsupported-source-map-comment":
overrides[MsgID_SourceMap_UnsupportedSourceMapComment] = logLevel
case "package.json":
for i := MsgID_PackageJSON_FIRST; i <= MsgID_PackageJSON_LAST; i++ {
overrides[i] = logLevel
}
case "tsconfig.json":
for i := MsgID_TSConfigJSON_FIRST; i <= MsgID_TSConfigJSON_LAST; i++ {
overrides[i] = logLevel
}
default:
// Ignore invalid entries since this message id may have
// been renamed/removed since when this code was written
}
}
func MsgIDToString(id MsgID) string {
switch id {
// JS
case MsgID_JS_AssertToWith:
return "assert-to-with"
case MsgID_JS_AssertTypeJSON:
return "assert-type-json"
case MsgID_JS_AssignToConstant:
return "assign-to-constant"
case MsgID_JS_AssignToDefine:
return "assign-to-define"
case MsgID_JS_AssignToImport:
return "assign-to-import"
case MsgID_JS_BigInt:
return "bigint"
case MsgID_JS_CallImportNamespace:
return "call-import-namespace"
case MsgID_JS_ClassNameWillThrow:
return "class-name-will-throw"
case MsgID_JS_CommonJSVariableInESM:
return "commonjs-variable-in-esm"
case MsgID_JS_DeleteSuperProperty:
return "delete-super-property"
case MsgID_JS_DirectEval:
return "direct-eval"
case MsgID_JS_DuplicateCase:
return "duplicate-case"
case MsgID_JS_DuplicateClassMember:
return "duplicate-class-member"
case MsgID_JS_DuplicateObjectKey:
return "duplicate-object-key"
case MsgID_JS_EmptyImportMeta:
return "empty-import-meta"
case MsgID_JS_EqualsNaN:
return "equals-nan"
case MsgID_JS_EqualsNegativeZero:
return "equals-negative-zero"
case MsgID_JS_EqualsNewObject:
return "equals-new-object"
case MsgID_JS_HTMLCommentInJS:
return "html-comment-in-js"
case MsgID_JS_ImpossibleTypeof:
return "impossible-typeof"
case MsgID_JS_IndirectRequire:
return "indirect-require"
case MsgID_JS_PrivateNameWillThrow:
return "private-name-will-throw"
case MsgID_JS_SemicolonAfterReturn:
return "semicolon-after-return"
case MsgID_JS_SuspiciousBooleanNot:
return "suspicious-boolean-not"
case MsgID_JS_SuspiciousDefine:
return "suspicious-define"
case MsgID_JS_SuspiciousLogicalOperator:
return "suspicious-logical-operator"
case MsgID_JS_SuspiciousNullishCoalescing:
return "suspicious-nullish-coalescing"
case MsgID_JS_ThisIsUndefinedInESM:
return "this-is-undefined-in-esm"
case MsgID_JS_UnsupportedDynamicImport:
return "unsupported-dynamic-import"
case MsgID_JS_UnsupportedJSXComment:
return "unsupported-jsx-comment"
case MsgID_JS_UnsupportedRegExp:
return "unsupported-regexp"
case MsgID_JS_UnsupportedRequireCall:
return "unsupported-require-call"
// CSS
case MsgID_CSS_CSSSyntaxError:
return "css-syntax-error"
case MsgID_CSS_InvalidAtCharset:
return "invalid-@charset"
case MsgID_CSS_InvalidAtImport:
return "invalid-@import"
case MsgID_CSS_InvalidAtLayer:
return "invalid-@layer"
case MsgID_CSS_InvalidCalc:
return "invalid-calc"
case MsgID_CSS_JSCommentInCSS:
return "js-comment-in-css"
case MsgID_CSS_UndefinedComposesFrom:
return "undefined-composes-from"
case MsgID_CSS_UnsupportedAtCharset:
return "unsupported-@charset"
case MsgID_CSS_UnsupportedAtNamespace:
return "unsupported-@namespace"
case MsgID_CSS_UnsupportedCSSProperty:
return "unsupported-css-property"
case MsgID_CSS_UnsupportedCSSNesting:
return "unsupported-css-nesting"
// Bundler
case MsgID_Bundler_AmbiguousReexport:
return "ambiguous-reexport"
case MsgID_Bundler_DifferentPathCase:
return "different-path-case"
case MsgID_Bundler_EmptyGlob:
return "empty-glob"
case MsgID_Bundler_IgnoredBareImport:
return "ignored-bare-import"
case MsgID_Bundler_IgnoredDynamicImport:
return "ignored-dynamic-import"
case MsgID_Bundler_ImportIsUndefined:
return "import-is-undefined"
case MsgID_Bundler_RequireResolveNotExternal:
return "require-resolve-not-external"
// Source maps
case MsgID_SourceMap_InvalidSourceMappings:
return "invalid-source-mappings"
case MsgID_SourceMap_MissingSourceMap:
return "missing-source-map"
case MsgID_SourceMap_UnsupportedSourceMapComment:
return "unsupported-source-map-comment"
default:
if id >= MsgID_PackageJSON_FIRST && id <= MsgID_PackageJSON_LAST {
return "package.json"
}
if id >= MsgID_TSConfigJSON_FIRST && id <= MsgID_TSConfigJSON_LAST {
return "tsconfig.json"
}
}
return ""
}
// Some message IDs are more diverse internally than externally (in case we
// want to expand the set of them later on). So just map these to the largest
// one arbitrarily since you can't tell the difference externally anyway.
func StringToMaximumMsgID(id string) MsgID {
overrides := make(map[MsgID]LogLevel)
maxID := MsgID_None
StringToMsgIDs(id, LevelInfo, overrides)
for id := range overrides {
if id > maxID {
maxID = id
}
}
return maxID
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger_test.go | internal/logger/logger_test.go | package logger_test
import (
"testing"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/test"
)
func TestMsgIDs(t *testing.T) {
for id := logger.MsgID_None; id <= logger.MsgID_END; id++ {
str := logger.MsgIDToString(id)
if str == "" {
continue
}
overrides := make(map[logger.MsgID]logger.LogLevel)
logger.StringToMsgIDs(str, logger.LevelError, overrides)
if len(overrides) == 0 {
t.Fatalf("Failed to find message id(s) for the string %q", str)
}
for k, v := range overrides {
test.AssertEqual(t, logger.MsgIDToString(k), str)
test.AssertEqual(t, v, logger.LevelError)
}
}
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger_other.go | internal/logger/logger_other.go | //go:build !darwin && !linux && !windows
// +build !darwin,!linux,!windows
package logger
import "os"
const SupportsColorEscapes = false
func GetTerminalInfo(*os.File) TerminalInfo {
return TerminalInfo{}
}
func writeStringWithColor(file *os.File, text string) {
file.WriteString(text)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger_windows.go | internal/logger/logger_windows.go | //go:build windows
// +build windows
package logger
import (
"os"
"strings"
"syscall"
"unsafe"
)
const SupportsColorEscapes = true
var kernel32 = syscall.NewLazyDLL("kernel32.dll")
var getConsoleMode = kernel32.NewProc("GetConsoleMode")
var setConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
var getConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
type consoleScreenBufferInfo struct {
dwSizeX int16
dwSizeY int16
dwCursorPositionX int16
dwCursorPositionY int16
wAttributes uint16
srWindowLeft int16
srWindowTop int16
srWindowRight int16
srWindowBottom int16
dwMaximumWindowSizeX int16
dwMaximumWindowSizeY int16
}
func GetTerminalInfo(file *os.File) TerminalInfo {
fd := file.Fd()
// Is this file descriptor a terminal?
var unused uint32
isTTY, _, _ := syscall.Syscall(getConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&unused)), 0)
// Get the width of the window
var info consoleScreenBufferInfo
syscall.Syscall(getConsoleScreenBufferInfo.Addr(), 2, fd, uintptr(unsafe.Pointer(&info)), 0)
return TerminalInfo{
IsTTY: isTTY != 0,
Width: int(info.dwSizeX) - 1,
Height: int(info.dwSizeY) - 1,
UseColorEscapes: !hasNoColorEnvironmentVariable(),
}
}
const (
FOREGROUND_BLUE uint8 = 1 << iota
FOREGROUND_GREEN
FOREGROUND_RED
FOREGROUND_INTENSITY
BACKGROUND_BLUE
BACKGROUND_GREEN
BACKGROUND_RED
BACKGROUND_INTENSITY
)
var windowsEscapeSequenceMap = map[string]uint8{
TerminalColors.Reset: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
TerminalColors.Dim: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
TerminalColors.Bold: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY,
// Apparently underlines only work with the CJK locale on Windows :(
TerminalColors.Underline: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE,
TerminalColors.Red: FOREGROUND_RED,
TerminalColors.Green: FOREGROUND_GREEN,
TerminalColors.Blue: FOREGROUND_BLUE,
TerminalColors.Cyan: FOREGROUND_GREEN | FOREGROUND_BLUE,
TerminalColors.Magenta: FOREGROUND_RED | FOREGROUND_BLUE,
TerminalColors.Yellow: FOREGROUND_RED | FOREGROUND_GREEN,
TerminalColors.RedBgRed: FOREGROUND_RED | BACKGROUND_RED,
TerminalColors.RedBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_RED,
TerminalColors.GreenBgGreen: FOREGROUND_GREEN | BACKGROUND_GREEN,
TerminalColors.GreenBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN,
TerminalColors.BlueBgBlue: FOREGROUND_BLUE | BACKGROUND_BLUE,
TerminalColors.BlueBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_BLUE,
TerminalColors.CyanBgCyan: FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_BLUE,
TerminalColors.CyanBgBlack: BACKGROUND_GREEN | BACKGROUND_BLUE,
TerminalColors.MagentaBgMagenta: FOREGROUND_RED | FOREGROUND_BLUE | BACKGROUND_RED | BACKGROUND_BLUE,
TerminalColors.MagentaBgBlack: BACKGROUND_RED | BACKGROUND_BLUE,
TerminalColors.YellowBgYellow: FOREGROUND_RED | FOREGROUND_GREEN | BACKGROUND_RED | BACKGROUND_GREEN,
TerminalColors.YellowBgBlack: BACKGROUND_RED | BACKGROUND_GREEN,
}
func writeStringWithColor(file *os.File, text string) {
fd := file.Fd()
i := 0
for i < len(text) {
// Find the escape
if text[i] != 033 {
i++
continue
}
// Find the 'm'
window := text[i:]
if len(window) > 8 {
window = window[:8]
}
m := strings.IndexByte(window, 'm')
if m == -1 {
i++
continue
}
m += i + 1
// Find the escape sequence
attributes, ok := windowsEscapeSequenceMap[text[i:m]]
if !ok {
i++
continue
}
// Write out the text before the escape sequence
file.WriteString(text[:i])
// Apply the escape sequence
text = text[m:]
i = 0
setConsoleTextAttribute.Call(fd, uintptr(attributes))
}
// Write out the remaining text
file.WriteString(text)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger.go | internal/logger/logger.go | package logger
// Logging is either done to stderr (via "NewStderrLog") or to an in-memory
// array (via "NewDeferLog"). In-memory arrays are used to capture messages
// from parsing individual files because during incremental builds, log
// messages for a given file can be replayed from memory if the file ends up
// not being reparsed.
//
// Errors are streamed asynchronously as they happen, each error contains the
// contents of the line with the error, and the error count is limited by
// default.
import (
"encoding/binary"
"fmt"
"os"
"runtime"
"sort"
"strings"
"sync"
"time"
"unicode/utf8"
)
const defaultTerminalWidth = 80
type Log struct {
AddMsg func(Msg)
HasErrors func() bool
Peek func() []Msg
Done func() []Msg
Level LogLevel
Overrides map[MsgID]LogLevel
}
type LogLevel int8
const (
LevelNone LogLevel = iota
LevelVerbose
LevelDebug
LevelInfo
LevelWarning
LevelError
LevelSilent
)
type MsgKind uint8
const (
Error MsgKind = iota
Warning
Info
Note
Debug
Verbose
)
func (kind MsgKind) String() string {
switch kind {
case Error:
return "ERROR"
case Warning:
return "WARNING"
case Info:
return "INFO"
case Note:
return "NOTE"
case Debug:
return "DEBUG"
case Verbose:
return "VERBOSE"
default:
panic("Internal error")
}
}
func (kind MsgKind) Icon() string {
// Special-case Windows command prompt, which only supports a few characters
if isProbablyWindowsCommandPrompt() {
switch kind {
case Error:
return "X"
case Warning:
return "▲"
case Info:
return "►"
case Note:
return "→"
case Debug:
return "●"
case Verbose:
return "♦"
default:
panic("Internal error")
}
}
switch kind {
case Error:
return "✘"
case Warning:
return "▲"
case Info:
return "▶"
case Note:
return "→"
case Debug:
return "●"
case Verbose:
return "⬥"
default:
panic("Internal error")
}
}
var windowsCommandPrompt struct {
mutex sync.Mutex
once bool
isProbablyCMD bool
}
func isProbablyWindowsCommandPrompt() bool {
windowsCommandPrompt.mutex.Lock()
defer windowsCommandPrompt.mutex.Unlock()
if !windowsCommandPrompt.once {
windowsCommandPrompt.once = true
// Assume we are running in Windows Command Prompt if we're on Windows. If
// so, we can't use emoji because it won't be supported. Except we can
// still use emoji if the WT_SESSION environment variable is present
// because that means we're running in the new Windows Terminal instead.
if runtime.GOOS == "windows" {
windowsCommandPrompt.isProbablyCMD = true
if _, ok := os.LookupEnv("WT_SESSION"); ok {
windowsCommandPrompt.isProbablyCMD = false
}
}
}
return windowsCommandPrompt.isProbablyCMD
}
type Msg struct {
Notes []MsgData
PluginName string
Data MsgData
Kind MsgKind
ID MsgID
}
type MsgData struct {
// Optional user-specified data that is passed through unmodified
UserDetail interface{}
Location *MsgLocation
Text string
DisableMaximumWidth bool
}
type MsgLocation struct {
File PrettyPaths
Namespace string
LineText string
Suggestion string
Line int // 1-based
Column int // 0-based, in bytes
Length int // in bytes
}
type Loc struct {
// This is the 0-based index of this location from the start of the file, in bytes
Start int32
}
type Range struct {
Loc Loc
Len int32
}
func (r Range) End() int32 {
return r.Loc.Start + r.Len
}
func (a *Range) ExpandBy(b Range) {
if a.Len == 0 {
*a = b
} else {
end := a.End()
if n := b.End(); n > end {
end = n
}
if b.Loc.Start < a.Loc.Start {
a.Loc.Start = b.Loc.Start
}
a.Len = end - a.Loc.Start
}
}
type Span struct {
Text string
Range Range
}
// This type is just so we can use Go's native sort function
type SortableMsgs []Msg
func (a SortableMsgs) Len() int { return len(a) }
func (a SortableMsgs) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
func (a SortableMsgs) Less(i int, j int) bool {
ai := a[i]
aj := a[j]
aiLoc := ai.Data.Location
ajLoc := aj.Data.Location
if aiLoc == nil || ajLoc == nil {
return aiLoc == nil && ajLoc != nil
}
if aiLoc.File != ajLoc.File {
return aiLoc.File.Abs < ajLoc.File.Abs || (aiLoc.File.Abs == ajLoc.File.Abs && aiLoc.File.Rel < ajLoc.File.Rel)
}
if aiLoc.Line != ajLoc.Line {
return aiLoc.Line < ajLoc.Line
}
if aiLoc.Column != ajLoc.Column {
return aiLoc.Column < ajLoc.Column
}
if ai.Kind != aj.Kind {
return ai.Kind < aj.Kind
}
return ai.Data.Text < aj.Data.Text
}
// This is used to represent both file system paths (Namespace == "file") and
// abstract module paths (Namespace != "file"). Abstract module paths represent
// "virtual modules" when used for an input file and "package paths" when used
// to represent an external module.
type Path struct {
Text string
Namespace string
// This feature was added to support ancient CSS libraries that append things
// like "?#iefix" and "#icons" to some of their import paths as a hack for IE6.
// The intent is for these suffix parts to be ignored but passed through to
// the output. This is supported by other bundlers, so we also support this.
IgnoredSuffix string
// Import attributes (the "with" keyword after an import) can affect path
// resolution. In other words, two paths in the same file that are otherwise
// equal but that have different import attributes may resolve to different
// paths.
ImportAttributes ImportAttributes
Flags PathFlags
}
// We rely on paths as map keys. Go doesn't support custom hash codes and
// only implements hash codes for certain types. In particular, hash codes
// are implemented for strings but not for arrays of strings. So we have to
// pack these import attributes into a string.
type ImportAttributes struct {
packedData string
}
type ImportAttribute struct {
Key string
Value string
}
// This returns a sorted array instead of a map to make determinism easier
func (attrs ImportAttributes) DecodeIntoArray() (result []ImportAttribute) {
if attrs.packedData == "" {
return nil
}
bytes := []byte(attrs.packedData)
for len(bytes) > 0 {
kn := 4 + binary.LittleEndian.Uint32(bytes[:4])
k := string(bytes[4:kn])
bytes = bytes[kn:]
vn := 4 + binary.LittleEndian.Uint32(bytes[:4])
v := string(bytes[4:vn])
bytes = bytes[vn:]
result = append(result, ImportAttribute{Key: k, Value: v})
}
return result
}
func (attrs ImportAttributes) DecodeIntoMap() (result map[string]string) {
if array := attrs.DecodeIntoArray(); len(array) > 0 {
result = make(map[string]string, len(array))
for _, attr := range array {
result[attr.Key] = attr.Value
}
}
return
}
func EncodeImportAttributes(value map[string]string) ImportAttributes {
if len(value) == 0 {
return ImportAttributes{}
}
keys := make([]string, 0, len(value))
for k := range value {
keys = append(keys, k)
}
sort.Strings(keys)
var sb strings.Builder
var n [4]byte
for _, k := range keys {
v := value[k]
binary.LittleEndian.PutUint32(n[:], uint32(len(k)))
sb.Write(n[:])
sb.WriteString(k)
binary.LittleEndian.PutUint32(n[:], uint32(len(v)))
sb.Write(n[:])
sb.WriteString(v)
}
return ImportAttributes{packedData: sb.String()}
}
type PathFlags uint8
const (
// This corresponds to a value of "false' in the "browser" package.json field
PathDisabled PathFlags = 1 << iota
)
func (p Path) IsDisabled() bool {
return (p.Flags & PathDisabled) != 0
}
var noColorResult bool
var noColorOnce sync.Once
func hasNoColorEnvironmentVariable() bool {
noColorOnce.Do(func() {
// Read "NO_COLOR" from the environment. This is a convention that some
// software follows. See https://no-color.org/ for more information.
if _, ok := os.LookupEnv("NO_COLOR"); ok {
noColorResult = true
}
})
return noColorResult
}
// This has a custom implementation instead of using "filepath.Dir/Base/Ext"
// because it should work the same on Unix and Windows. These names end up in
// the generated output and the generated output should not depend on the OS.
func PlatformIndependentPathDirBaseExt(path string) (dir string, base string, ext string) {
absRootSlash := -1
// Make sure we don't strip off the slash for the root of the file system
if len(path) > 0 && (path[0] == '/' || path[0] == '\\') {
absRootSlash = 0 // Unix
} else if len(path) > 2 && path[1] == ':' && (path[2] == '/' || path[2] == '\\') {
if c := path[0]; (c >= 'a' && c < 'z') || (c >= 'A' && c <= 'Z') {
absRootSlash = 2 // Windows
}
}
for {
i := strings.LastIndexAny(path, "/\\")
// Stop if there are no more slashes
if i < 0 {
base = path
break
}
// Stop if we found a non-trailing slash
if i == absRootSlash {
dir, base = path[:i+1], path[i+1:]
break
}
if i+1 != len(path) {
dir, base = path[:i], path[i+1:]
break
}
// Ignore trailing slashes
path = path[:i]
}
// Strip off the extension
if dot := strings.LastIndexByte(base, '.'); dot >= 0 {
ext = base[dot:]
// We default to the "local-css" loader for ".module.css" files. Make sure
// the string names generated by this don't all have "_module_" in them.
if ext == ".css" {
if dot2 := strings.LastIndexByte(base[:dot], '.'); dot2 >= 0 && base[dot2:] == ".module.css" {
dot = dot2
ext = base[dot:]
}
}
base = base[:dot]
}
return
}
type PrettyPaths struct {
// This option exists to help people that run esbuild in many different
// directories and want a unified way of reporting file paths. It avoids
// needing to code to convert from relative paths back to absolute paths
// to find the original file. It means builds are not reproducible across
// machines, however.
Abs string
// This is a mostly platform-independent path. It's relative to the current
// working directory and always uses standard path separators. This is the
// default behavior since it leads to reproducible builds across machines.
//
// Note that these paths still use the original case of the path, so they may
// still work differently on file systems that are case-insensitive vs.
// case-sensitive.
Rel string
}
type PathStyle uint8
const (
RelPath PathStyle = iota
AbsPath
)
func (paths *PrettyPaths) Select(style PathStyle) string {
if style == AbsPath {
return paths.Abs
}
return paths.Rel
}
type Source struct {
// This is used for error messages and the metadata JSON file.
PrettyPaths PrettyPaths
// An identifier that is mixed in to automatically-generated symbol names to
// improve readability. For example, if the identifier is "util" then the
// symbol for an "export default" statement will be called "util_default".
IdentifierName string
Contents string
// This is used as a unique key to identify this source file. It should never
// be shown to the user (e.g. never print this to the terminal).
//
// If it's marked as an absolute path, it's a platform-dependent path that
// includes environment-specific things such as Windows backslash path
// separators and potentially the user's home directory. Only use this for
// passing to syscalls for reading and writing to the file system. Do not
// include this in any output data.
//
// If it's marked as not an absolute path, it's an opaque string that is used
// to refer to an automatically-generated module.
KeyPath Path
Index uint32
}
func (s *Source) TextForRange(r Range) string {
return s.Contents[r.Loc.Start : r.Loc.Start+r.Len]
}
func (s *Source) LocBeforeWhitespace(loc Loc) Loc {
for loc.Start > 0 {
c, width := utf8.DecodeLastRuneInString(s.Contents[:loc.Start])
if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
break
}
loc.Start -= int32(width)
}
return loc
}
func (s *Source) RangeOfOperatorBefore(loc Loc, op string) Range {
text := s.Contents[:loc.Start]
index := strings.LastIndex(text, op)
if index >= 0 {
return Range{Loc: Loc{Start: int32(index)}, Len: int32(len(op))}
}
return Range{Loc: loc}
}
func (s *Source) RangeOfOperatorAfter(loc Loc, op string) Range {
text := s.Contents[loc.Start:]
index := strings.Index(text, op)
if index >= 0 {
return Range{Loc: Loc{Start: loc.Start + int32(index)}, Len: int32(len(op))}
}
return Range{Loc: loc}
}
func (s *Source) RangeOfString(loc Loc) Range {
text := s.Contents[loc.Start:]
if len(text) == 0 {
return Range{Loc: loc, Len: 0}
}
quote := text[0]
if quote == '"' || quote == '\'' {
// Search for the matching quote character
for i := 1; i < len(text); i++ {
c := text[i]
if c == quote {
return Range{Loc: loc, Len: int32(i + 1)}
} else if c == '\\' {
i += 1
}
}
}
if quote == '`' {
// Search for the matching quote character
for i := 1; i < len(text); i++ {
c := text[i]
if c == quote {
return Range{Loc: loc, Len: int32(i + 1)}
} else if c == '\\' {
i += 1
} else if c == '$' && i+1 < len(text) && text[i+1] == '{' {
break // Only return the range for no-substitution template literals
}
}
}
return Range{Loc: loc, Len: 0}
}
func (s *Source) RangeOfNumber(loc Loc) (r Range) {
text := s.Contents[loc.Start:]
r = Range{Loc: loc, Len: 0}
if len(text) > 0 {
if c := text[0]; c >= '0' && c <= '9' {
r.Len = 1
for int(r.Len) < len(text) {
c := text[r.Len]
if (c < '0' || c > '9') && (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && c != '.' && c != '_' {
break
}
r.Len++
}
}
}
return
}
func (s *Source) RangeOfLegacyOctalEscape(loc Loc) (r Range) {
text := s.Contents[loc.Start:]
r = Range{Loc: loc, Len: 0}
if len(text) >= 2 && text[0] == '\\' {
r.Len = 2
for r.Len < 4 && int(r.Len) < len(text) {
c := text[r.Len]
if c < '0' || c > '9' {
break
}
r.Len++
}
}
return
}
func (s *Source) CommentTextWithoutIndent(r Range) string {
text := s.Contents[r.Loc.Start:r.End()]
if len(text) < 2 || !strings.HasPrefix(text, "/*") {
return text
}
prefix := s.Contents[:r.Loc.Start]
// Figure out the initial indent
indent := 0
seekBackwardToNewline:
for len(prefix) > 0 {
c, size := utf8.DecodeLastRuneInString(prefix)
switch c {
case '\r', '\n', '\u2028', '\u2029':
break seekBackwardToNewline
}
prefix = prefix[:len(prefix)-size]
indent++
}
// Split the comment into lines
var lines []string
start := 0
for i, c := range text {
switch c {
case '\r', '\n':
// Don't double-append for Windows style "\r\n" newlines
if start <= i {
lines = append(lines, text[start:i])
}
start = i + 1
// Ignore the second part of Windows style "\r\n" newlines
if c == '\r' && start < len(text) && text[start] == '\n' {
start++
}
case '\u2028', '\u2029':
lines = append(lines, text[start:i])
start = i + 3
}
}
lines = append(lines, text[start:])
// Find the minimum indent over all lines after the first line
for _, line := range lines[1:] {
lineIndent := 0
for _, c := range line {
if c != ' ' && c != '\t' {
break
}
lineIndent++
}
if indent > lineIndent {
indent = lineIndent
}
}
// Trim the indent off of all lines after the first line
for i, line := range lines {
if i > 0 {
lines[i] = line[indent:]
}
}
return strings.Join(lines, "\n")
}
func plural(prefix string, count int, shown int, someAreMissing bool) string {
var text string
if count == 1 {
text = fmt.Sprintf("%d %s", count, prefix)
} else {
text = fmt.Sprintf("%d %ss", count, prefix)
}
if shown < count {
text = fmt.Sprintf("%d of %s", shown, text)
} else if someAreMissing && count > 1 {
text = "all " + text
}
return text
}
func errorAndWarningSummary(errors int, warnings int, shownErrors int, shownWarnings int) string {
someAreMissing := shownWarnings < warnings || shownErrors < errors
switch {
case errors == 0:
return plural("warning", warnings, shownWarnings, someAreMissing)
case warnings == 0:
return plural("error", errors, shownErrors, someAreMissing)
default:
return fmt.Sprintf("%s and %s",
plural("warning", warnings, shownWarnings, someAreMissing),
plural("error", errors, shownErrors, someAreMissing))
}
}
type APIKind uint8
const (
GoAPI APIKind = iota
CLIAPI
JSAPI
)
// This can be used to customize error messages for the current API kind
var API APIKind
type TerminalInfo struct {
IsTTY bool
UseColorEscapes bool
Width int
Height int
}
func NewStderrLog(options OutputOptions) Log {
var mutex sync.Mutex
var msgs SortableMsgs
terminalInfo := GetTerminalInfo(os.Stderr)
errors := 0
warnings := 0
shownErrors := 0
shownWarnings := 0
hasErrors := false
remainingMessagesBeforeLimit := options.MessageLimit
if remainingMessagesBeforeLimit == 0 {
remainingMessagesBeforeLimit = 0x7FFFFFFF
}
var deferredWarnings []Msg
finalizeLog := func() {
// Print the deferred warning now if there was no error after all
for remainingMessagesBeforeLimit > 0 && len(deferredWarnings) > 0 {
shownWarnings++
writeStringWithColor(os.Stderr, deferredWarnings[0].String(options, terminalInfo))
deferredWarnings = deferredWarnings[1:]
remainingMessagesBeforeLimit--
}
// Print out a summary
if options.MessageLimit > 0 && errors+warnings > options.MessageLimit {
writeStringWithColor(os.Stderr, fmt.Sprintf("%s shown (disable the message limit with --log-limit=0)\n",
errorAndWarningSummary(errors, warnings, shownErrors, shownWarnings)))
} else if options.LogLevel <= LevelInfo && (warnings != 0 || errors != 0) {
writeStringWithColor(os.Stderr, fmt.Sprintf("%s\n",
errorAndWarningSummary(errors, warnings, shownErrors, shownWarnings)))
}
}
switch options.Color {
case ColorNever:
terminalInfo.UseColorEscapes = false
case ColorAlways:
terminalInfo.UseColorEscapes = SupportsColorEscapes
}
return Log{
Level: options.LogLevel,
Overrides: options.Overrides,
AddMsg: func(msg Msg) {
mutex.Lock()
defer mutex.Unlock()
msgs = append(msgs, msg)
switch msg.Kind {
case Verbose:
if options.LogLevel <= LevelVerbose {
writeStringWithColor(os.Stderr, msg.String(options, terminalInfo))
}
case Debug:
if options.LogLevel <= LevelDebug {
writeStringWithColor(os.Stderr, msg.String(options, terminalInfo))
}
case Info:
if options.LogLevel <= LevelInfo {
writeStringWithColor(os.Stderr, msg.String(options, terminalInfo))
}
case Error:
hasErrors = true
if options.LogLevel <= LevelError {
errors++
}
case Warning:
if options.LogLevel <= LevelWarning {
warnings++
}
}
// Be silent if we're past the limit so we don't flood the terminal
if remainingMessagesBeforeLimit == 0 {
return
}
switch msg.Kind {
case Error:
if options.LogLevel <= LevelError {
shownErrors++
writeStringWithColor(os.Stderr, msg.String(options, terminalInfo))
remainingMessagesBeforeLimit--
}
case Warning:
if options.LogLevel <= LevelWarning {
if remainingMessagesBeforeLimit > (options.MessageLimit+1)/2 {
shownWarnings++
writeStringWithColor(os.Stderr, msg.String(options, terminalInfo))
remainingMessagesBeforeLimit--
} else {
// If we have less than half of the slots left, wait for potential
// future errors instead of using up all of the slots with warnings.
// We want the log for a failed build to always have at least one
// error in it.
deferredWarnings = append(deferredWarnings, msg)
}
}
}
},
HasErrors: func() bool {
mutex.Lock()
defer mutex.Unlock()
return hasErrors
},
Peek: func() []Msg {
mutex.Lock()
defer mutex.Unlock()
sort.Stable(msgs)
return append([]Msg{}, msgs...)
},
Done: func() []Msg {
mutex.Lock()
defer mutex.Unlock()
finalizeLog()
sort.Stable(msgs)
return msgs
},
}
}
func PrintErrorToStderr(osArgs []string, text string) {
PrintMessageToStderr(osArgs, Msg{Kind: Error, Data: MsgData{Text: text}})
}
func PrintErrorWithNoteToStderr(osArgs []string, text string, note string) {
msg := Msg{
Kind: Error,
Data: MsgData{Text: text},
}
if note != "" {
msg.Notes = []MsgData{{Text: note}}
}
PrintMessageToStderr(osArgs, msg)
}
func OutputOptionsForArgs(osArgs []string) OutputOptions {
options := OutputOptions{IncludeSource: true}
// Implement a mini argument parser so these options always work even if we
// haven't yet gotten to the general-purpose argument parsing code
for _, arg := range osArgs {
switch arg {
case "--color=false":
options.Color = ColorNever
case "--color=true", "--color":
options.Color = ColorAlways
case "--log-level=info":
options.LogLevel = LevelInfo
case "--log-level=warning":
options.LogLevel = LevelWarning
case "--log-level=error":
options.LogLevel = LevelError
case "--log-level=silent":
options.LogLevel = LevelSilent
}
}
return options
}
func PrintMessageToStderr(osArgs []string, msg Msg) {
log := NewStderrLog(OutputOptionsForArgs(osArgs))
log.AddMsg(msg)
log.Done()
}
type Colors struct {
Reset string
Bold string
Dim string
Underline string
Red string
Green string
Blue string
Cyan string
Magenta string
Yellow string
RedBgRed string
RedBgWhite string
GreenBgGreen string
GreenBgWhite string
BlueBgBlue string
BlueBgWhite string
CyanBgCyan string
CyanBgBlack string
MagentaBgMagenta string
MagentaBgBlack string
YellowBgYellow string
YellowBgBlack string
}
var TerminalColors = Colors{
Reset: "\033[0m",
Bold: "\033[1m",
Dim: "\033[37m",
Underline: "\033[4m",
Red: "\033[31m",
Green: "\033[32m",
Blue: "\033[34m",
Cyan: "\033[36m",
Magenta: "\033[35m",
Yellow: "\033[33m",
RedBgRed: "\033[41;31m",
RedBgWhite: "\033[41;97m",
GreenBgGreen: "\033[42;32m",
GreenBgWhite: "\033[42;97m",
BlueBgBlue: "\033[44;34m",
BlueBgWhite: "\033[44;97m",
CyanBgCyan: "\033[46;36m",
CyanBgBlack: "\033[46;30m",
MagentaBgMagenta: "\033[45;35m",
MagentaBgBlack: "\033[45;30m",
YellowBgYellow: "\033[43;33m",
YellowBgBlack: "\033[43;30m",
}
func PrintText(file *os.File, level LogLevel, osArgs []string, callback func(Colors) string) {
options := OutputOptionsForArgs(osArgs)
// Skip logging these if these logs are disabled
if options.LogLevel > level {
return
}
PrintTextWithColor(file, options.Color, callback)
}
func PrintTextWithColor(file *os.File, useColor UseColor, callback func(Colors) string) {
var useColorEscapes bool
switch useColor {
case ColorNever:
useColorEscapes = false
case ColorAlways:
useColorEscapes = SupportsColorEscapes
case ColorIfTerminal:
useColorEscapes = GetTerminalInfo(file).UseColorEscapes
}
var colors Colors
if useColorEscapes {
colors = TerminalColors
}
writeStringWithColor(file, callback(colors))
}
type SummaryTableEntry struct {
Dir string
Base string
Size string
Bytes int
IsSourceMap bool
}
// This type is just so we can use Go's native sort function
type SummaryTable []SummaryTableEntry
func (t SummaryTable) Len() int { return len(t) }
func (t SummaryTable) Swap(i int, j int) { t[i], t[j] = t[j], t[i] }
func (t SummaryTable) Less(i int, j int) bool {
ti := t[i]
tj := t[j]
// Sort source maps last
if !ti.IsSourceMap && tj.IsSourceMap {
return true
}
if ti.IsSourceMap && !tj.IsSourceMap {
return false
}
// Sort by size first
if ti.Bytes > tj.Bytes {
return true
}
if ti.Bytes < tj.Bytes {
return false
}
// Sort alphabetically by directory first
if ti.Dir < tj.Dir {
return true
}
if ti.Dir > tj.Dir {
return false
}
// Then sort alphabetically by file name
return ti.Base < tj.Base
}
// Show a warning icon next to output files that are 1mb or larger
const sizeWarningThreshold = 1024 * 1024
func PrintSummary(useColor UseColor, table SummaryTable, start *time.Time) {
PrintTextWithColor(os.Stderr, useColor, func(colors Colors) string {
isProbablyWindowsCommandPrompt := isProbablyWindowsCommandPrompt()
sb := strings.Builder{}
if len(table) > 0 {
info := GetTerminalInfo(os.Stderr)
// Truncate the table in case it's really long
maxLength := info.Height / 2
if info.Height == 0 {
maxLength = 20
} else if maxLength < 5 {
maxLength = 5
}
length := len(table)
sort.Sort(table)
if length > maxLength {
table = table[:maxLength]
}
// Compute the maximum width of the size column
spacingBetweenColumns := 2
hasSizeWarning := false
maxPath := 0
maxSize := 0
for _, entry := range table {
path := len(entry.Dir) + len(entry.Base)
size := len(entry.Size) + spacingBetweenColumns
if path > maxPath {
maxPath = path
}
if size > maxSize {
maxSize = size
}
if !entry.IsSourceMap && entry.Bytes >= sizeWarningThreshold {
hasSizeWarning = true
}
}
margin := " "
layoutWidth := info.Width
if layoutWidth < 1 {
layoutWidth = defaultTerminalWidth
}
layoutWidth -= 2 * len(margin)
if hasSizeWarning {
// Add space for the warning icon
layoutWidth -= 2
}
if layoutWidth > maxPath+maxSize {
layoutWidth = maxPath + maxSize
}
sb.WriteByte('\n')
for _, entry := range table {
dir, base := entry.Dir, entry.Base
pathWidth := layoutWidth - maxSize
// Truncate the path with "..." to fit on one line
if len(dir)+len(base) > pathWidth {
// Trim the directory from the front, leaving the trailing slash
if len(dir) > 0 {
n := pathWidth - len(base) - 3
if n < 1 {
n = 1
}
dir = "..." + dir[len(dir)-n:]
}
// Trim the file name from the back
if len(dir)+len(base) > pathWidth {
n := pathWidth - len(dir) - 3
if n < 0 {
n = 0
}
base = base[:n] + "..."
}
}
spacer := layoutWidth - len(entry.Size) - len(dir) - len(base)
if spacer < 0 {
spacer = 0
}
// Put a warning next to the size if it's above a certain threshold
sizeColor := colors.Cyan
sizeWarning := ""
if !entry.IsSourceMap && entry.Bytes >= sizeWarningThreshold {
sizeColor = colors.Yellow
// Emoji don't work in Windows Command Prompt
if !isProbablyWindowsCommandPrompt {
sizeWarning = " ⚠️"
}
}
sb.WriteString(fmt.Sprintf("%s%s%s%s%s%s%s%s%s%s%s%s\n",
margin,
colors.Dim,
dir,
colors.Reset,
colors.Bold,
base,
colors.Reset,
strings.Repeat(" ", spacer),
sizeColor,
entry.Size,
sizeWarning,
colors.Reset,
))
}
// Say how many remaining files are not shown
if length > maxLength {
plural := "s"
if length == maxLength+1 {
plural = ""
}
sb.WriteString(fmt.Sprintf("%s%s...and %d more output file%s...%s\n", margin, colors.Dim, length-maxLength, plural, colors.Reset))
}
}
sb.WriteByte('\n')
lightningSymbol := "⚡ "
// Emoji don't work in Windows Command Prompt
if isProbablyWindowsCommandPrompt {
lightningSymbol = ""
}
// Printing the time taken is optional
if start != nil {
sb.WriteString(fmt.Sprintf("%s%sDone in %dms%s\n",
lightningSymbol,
colors.Green,
time.Since(*start).Milliseconds(),
colors.Reset,
))
}
return sb.String()
})
}
type DeferLogKind uint8
const (
DeferLogAll DeferLogKind = iota
DeferLogNoVerboseOrDebug
)
func NewDeferLog(kind DeferLogKind, overrides map[MsgID]LogLevel) Log {
var msgs SortableMsgs
var mutex sync.Mutex
var hasErrors bool
return Log{
Level: LevelInfo,
Overrides: overrides,
AddMsg: func(msg Msg) {
if kind == DeferLogNoVerboseOrDebug && (msg.Kind == Verbose || msg.Kind == Debug) {
return
}
mutex.Lock()
defer mutex.Unlock()
if msg.Kind == Error {
hasErrors = true
}
msgs = append(msgs, msg)
},
HasErrors: func() bool {
mutex.Lock()
defer mutex.Unlock()
return hasErrors
},
Peek: func() []Msg {
mutex.Lock()
defer mutex.Unlock()
return append([]Msg{}, msgs...)
},
Done: func() []Msg {
mutex.Lock()
defer mutex.Unlock()
sort.Stable(msgs)
return msgs
},
}
}
type UseColor uint8
const (
ColorIfTerminal UseColor = iota
ColorNever
ColorAlways
)
type OutputOptions struct {
MessageLimit int
IncludeSource bool
Color UseColor
LogLevel LogLevel
PathStyle PathStyle
Overrides map[MsgID]LogLevel
}
func (msg Msg) String(options OutputOptions, terminalInfo TerminalInfo) string {
// Format the message
var text strings.Builder
text.WriteString(msgString(options.IncludeSource, options.PathStyle, terminalInfo, msg.ID, msg.Kind, msg.Data, msg.PluginName))
// Format the notes
var oldData MsgData
for i, note := range msg.Notes {
if options.IncludeSource && (i == 0 || strings.IndexByte(oldData.Text, '\n') >= 0 || oldData.Location != nil) {
text.WriteString("\n")
}
text.WriteString(msgString(options.IncludeSource, options.PathStyle, terminalInfo, MsgID_None, Note, note, ""))
oldData = note
}
// Add extra spacing between messages if source code is present
if options.IncludeSource {
text.WriteString("\n")
}
return text.String()
}
// The number of margin characters in addition to the line number
const extraMarginChars = 9
func marginWithLineText(maxMargin int, line int) string {
number := fmt.Sprintf("%d", line)
return fmt.Sprintf(" %s%s │ ", strings.Repeat(" ", maxMargin-len(number)), number)
}
func emptyMarginText(maxMargin int, isLast bool) string {
space := strings.Repeat(" ", maxMargin)
if isLast {
return fmt.Sprintf(" %s ╵ ", space)
}
return fmt.Sprintf(" %s │ ", space)
}
func msgString(includeSource bool, pathStyle PathStyle, terminalInfo TerminalInfo, id MsgID, kind MsgKind, data MsgData, pluginName string) string {
if !includeSource {
if loc := data.Location; loc != nil {
return fmt.Sprintf("%s: %s: %s\n", loc.File.Select(pathStyle), kind.String(), data.Text)
}
return fmt.Sprintf("%s: %s\n", kind.String(), data.Text)
}
var colors Colors
if terminalInfo.UseColorEscapes {
colors = TerminalColors
}
var iconColor string
var kindColorBrackets string
var kindColorText string
location := ""
if data.Location != nil {
maxMargin := len(fmt.Sprintf("%d", data.Location.Line))
d := detailStruct(data, pathStyle, terminalInfo, maxMargin)
if d.Suggestion != "" {
location = fmt.Sprintf("\n %s:%d:%d:\n%s%s%s%s%s%s\n%s%s%s%s%s\n%s%s%s%s%s\n%s",
d.Path, d.Line, d.Column,
colors.Dim, d.SourceBefore, colors.Green, d.SourceMarked, colors.Dim, d.SourceAfter,
emptyMarginText(maxMargin, false), d.Indent, colors.Green, d.Marker, colors.Dim,
emptyMarginText(maxMargin, true), d.Indent, colors.Green, d.Suggestion, colors.Reset,
d.ContentAfter,
)
} else {
location = fmt.Sprintf("\n %s:%d:%d:\n%s%s%s%s%s%s\n%s%s%s%s%s\n%s",
d.Path, d.Line, d.Column,
colors.Dim, d.SourceBefore, colors.Green, d.SourceMarked, colors.Dim, d.SourceAfter,
emptyMarginText(maxMargin, true), d.Indent, colors.Green, d.Marker, colors.Reset,
d.ContentAfter,
)
}
}
switch kind {
case Verbose:
iconColor = colors.Cyan
kindColorBrackets = colors.CyanBgCyan
kindColorText = colors.CyanBgBlack
case Debug:
iconColor = colors.Green
kindColorBrackets = colors.GreenBgGreen
kindColorText = colors.GreenBgWhite
case Info:
iconColor = colors.Blue
kindColorBrackets = colors.BlueBgBlue
kindColorText = colors.BlueBgWhite
case Error:
iconColor = colors.Red
kindColorBrackets = colors.RedBgRed
kindColorText = colors.RedBgWhite
case Warning:
iconColor = colors.Yellow
kindColorBrackets = colors.YellowBgYellow
kindColorText = colors.YellowBgBlack
case Note:
sb := strings.Builder{}
for _, line := range strings.Split(data.Text, "\n") {
// Special-case word wrapping
if wrapWidth := terminalInfo.Width; wrapWidth > 2 {
if !data.DisableMaximumWidth && wrapWidth > 100 {
wrapWidth = 100 // Enforce a maximum paragraph width for readability
}
for _, run := range wrapWordsInString(line, wrapWidth-2) {
sb.WriteString(" ")
sb.WriteString(linkifyText(run, colors.Underline, colors.Reset))
sb.WriteByte('\n')
}
continue
}
// Otherwise, just write an indented line
sb.WriteString(" ")
sb.WriteString(linkifyText(line, colors.Underline, colors.Reset))
sb.WriteByte('\n')
}
sb.WriteString(location)
return sb.String()
}
if pluginName != "" {
pluginName = fmt.Sprintf(" %s%s[plugin %s]%s", colors.Bold, colors.Magenta, pluginName, colors.Reset)
}
msgID := MsgIDToString(id)
if msgID != "" {
msgID = fmt.Sprintf(" [%s]", msgID)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | true |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/logger/logger_darwin.go | internal/logger/logger_darwin.go | //go:build darwin
// +build darwin
package logger
import (
"os"
"golang.org/x/sys/unix"
)
const SupportsColorEscapes = true
func GetTerminalInfo(file *os.File) (info TerminalInfo) {
fd := file.Fd()
// Is this file descriptor a terminal?
if _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA); err == nil {
info.IsTTY = true
info.UseColorEscapes = !hasNoColorEnvironmentVariable()
// Get the width of the window
if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil {
info.Width = int(w.Col)
info.Height = int(w.Row)
}
}
return
}
func writeStringWithColor(file *os.File, text string) {
file.WriteString(text)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/cache/cache_fs.go | internal/cache/cache_fs.go | package cache
import (
"sync"
"github.com/evanw/esbuild/internal/fs"
)
// This cache uses information from the "stat" syscall to try to avoid re-
// reading files from the file system during subsequent builds if the file
// hasn't changed. The assumption is reading the file metadata is faster than
// reading the file contents.
type FSCache struct {
entries map[string]*fsEntry
mutex sync.Mutex
}
type fsEntry struct {
contents string
modKey fs.ModKey
isModKeyUsable bool
}
func (c *FSCache) ReadFile(fs fs.FS, path string) (contents string, canonicalError error, originalError error) {
entry := func() *fsEntry {
c.mutex.Lock()
defer c.mutex.Unlock()
return c.entries[path]
}()
// If the file's modification key hasn't changed since it was cached, assume
// the contents of the file are also the same and skip reading the file.
modKey, modKeyErr := fs.ModKey(path)
if entry != nil && entry.isModKeyUsable && modKeyErr == nil && entry.modKey == modKey {
return entry.contents, nil, nil
}
contents, err, originalError := fs.ReadFile(path)
if err != nil {
return "", err, originalError
}
c.mutex.Lock()
defer c.mutex.Unlock()
c.entries[path] = &fsEntry{
contents: contents,
modKey: modKey,
isModKeyUsable: modKeyErr == nil,
}
return contents, nil, nil
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/cache/cache.go | internal/cache/cache.go | package cache
import (
"sync"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/runtime"
)
// This is a cache of the parsed contents of a set of files. The idea is to be
// able to reuse the results of parsing between builds and make subsequent
// builds faster by avoiding redundant parsing work. This only works if:
//
// - The AST information in the cache must be considered immutable. There is
// no way to enforce this in Go, but please be disciplined about this. The
// ASTs are shared in between builds. Any information that must be mutated
// in the AST during a build must be done on a shallow clone of the data if
// the mutation happens after parsing (i.e. a clone that clones everything
// that will be mutated and shares only the parts that won't be mutated).
//
// - The information in the cache must not depend at all on the contents of
// any file other than the file being cached. Invalidating an entry in the
// cache does not also invalidate any entries that depend on that file, so
// caching information that depends on other files can result in incorrect
// results due to reusing stale data. For example, do not "bake in" some
// value imported from another file.
//
// - Cached ASTs must only be reused if the parsing options are identical
// between builds. For example, it would be bad if the AST parser depended
// on options inherited from a nearby "package.json" file but those options
// were not part of the cache key. Then the cached AST could incorrectly be
// reused even if the contents of that "package.json" file have changed.
type CacheSet struct {
FSCache FSCache
CSSCache CSSCache
JSONCache JSONCache
JSCache JSCache
SourceIndexCache SourceIndexCache
}
func MakeCacheSet() *CacheSet {
return &CacheSet{
SourceIndexCache: SourceIndexCache{
globEntries: make(map[uint64]uint32),
entries: make(map[sourceIndexKey]uint32),
nextSourceIndex: runtime.SourceIndex + 1,
},
FSCache: FSCache{
entries: make(map[string]*fsEntry),
},
CSSCache: CSSCache{
entries: make(map[logger.Path]*cssCacheEntry),
},
JSONCache: JSONCache{
entries: make(map[logger.Path]*jsonCacheEntry),
},
JSCache: JSCache{
entries: make(map[logger.Path]*jsCacheEntry),
},
}
}
type SourceIndexCache struct {
globEntries map[uint64]uint32
entries map[sourceIndexKey]uint32
mutex sync.Mutex
nextSourceIndex uint32
}
type SourceIndexKind uint8
const (
SourceIndexNormal SourceIndexKind = iota
SourceIndexJSStubForCSS
)
type sourceIndexKey struct {
path logger.Path
kind SourceIndexKind
}
func (c *SourceIndexCache) LenHint() uint32 {
c.mutex.Lock()
defer c.mutex.Unlock()
// Add some extra room at the end for a new file or two without reallocating
const someExtraRoom = 16
return c.nextSourceIndex + someExtraRoom
}
func (c *SourceIndexCache) Get(path logger.Path, kind SourceIndexKind) uint32 {
key := sourceIndexKey{path: path, kind: kind}
c.mutex.Lock()
defer c.mutex.Unlock()
if sourceIndex, ok := c.entries[key]; ok {
return sourceIndex
}
sourceIndex := c.nextSourceIndex
c.nextSourceIndex++
c.entries[key] = sourceIndex
return sourceIndex
}
func (c *SourceIndexCache) GetGlob(parentSourceIndex uint32, globIndex uint32) uint32 {
key := (uint64(parentSourceIndex) << 32) | uint64(globIndex)
c.mutex.Lock()
defer c.mutex.Unlock()
if sourceIndex, ok := c.globEntries[key]; ok {
return sourceIndex
}
sourceIndex := c.nextSourceIndex
c.nextSourceIndex++
c.globEntries[key] = sourceIndex
return sourceIndex
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/cache/cache_ast.go | internal/cache/cache_ast.go | package cache
import (
"sync"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_parser"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_parser"
"github.com/evanw/esbuild/internal/logger"
)
// This cache intends to avoid unnecessarily re-parsing files in subsequent
// builds. For a given path, parsing can be avoided if the contents of the file
// and the options for the parser are the same as last time. Even if the
// contents of the file are the same, the options for the parser may have
// changed if they depend on some other file ("package.json" for example).
//
// This cache checks if the file contents have changed even though we have
// the ability to detect if a file has changed on the file system by reading
// its metadata. First of all, if the file contents are cached then they should
// be the same pointer, which makes the comparison trivial. Also we want to
// cache the AST for plugins in the common case that the plugin output stays
// the same.
////////////////////////////////////////////////////////////////////////////////
// CSS
type CSSCache struct {
entries map[logger.Path]*cssCacheEntry
mutex sync.Mutex
}
type cssCacheEntry struct {
source logger.Source
msgs []logger.Msg
ast css_ast.AST
options css_parser.Options
}
func (c *CSSCache) Parse(log logger.Log, source logger.Source, options css_parser.Options) css_ast.AST {
// Check the cache
entry := func() *cssCacheEntry {
c.mutex.Lock()
defer c.mutex.Unlock()
return c.entries[source.KeyPath]
}()
// Cache hit
if entry != nil && entry.source == source && entry.options.Equal(&options) {
for _, msg := range entry.msgs {
log.AddMsg(msg)
}
return entry.ast
}
// Cache miss
tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides)
ast := css_parser.Parse(tempLog, source, options)
msgs := tempLog.Done()
for _, msg := range msgs {
log.AddMsg(msg)
}
// Create the cache entry
entry = &cssCacheEntry{
source: source,
options: options,
ast: ast,
msgs: msgs,
}
// Save for next time
c.mutex.Lock()
defer c.mutex.Unlock()
c.entries[source.KeyPath] = entry
return ast
}
////////////////////////////////////////////////////////////////////////////////
// JSON
type JSONCache struct {
entries map[logger.Path]*jsonCacheEntry
mutex sync.Mutex
}
type jsonCacheEntry struct {
expr js_ast.Expr
msgs []logger.Msg
source logger.Source
options js_parser.JSONOptions
ok bool
}
func (c *JSONCache) Parse(log logger.Log, source logger.Source, options js_parser.JSONOptions) (js_ast.Expr, bool) {
// Check the cache
entry := func() *jsonCacheEntry {
c.mutex.Lock()
defer c.mutex.Unlock()
return c.entries[source.KeyPath]
}()
// Cache hit
if entry != nil && entry.source == source && entry.options == options {
for _, msg := range entry.msgs {
log.AddMsg(msg)
}
return entry.expr, entry.ok
}
// Cache miss
tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides)
expr, ok := js_parser.ParseJSON(tempLog, source, options)
msgs := tempLog.Done()
for _, msg := range msgs {
log.AddMsg(msg)
}
// Create the cache entry
entry = &jsonCacheEntry{
source: source,
options: options,
expr: expr,
ok: ok,
msgs: msgs,
}
// Save for next time
c.mutex.Lock()
defer c.mutex.Unlock()
c.entries[source.KeyPath] = entry
return expr, ok
}
////////////////////////////////////////////////////////////////////////////////
// JS
type JSCache struct {
entries map[logger.Path]*jsCacheEntry
mutex sync.Mutex
}
type jsCacheEntry struct {
source logger.Source
msgs []logger.Msg
options js_parser.Options
ast js_ast.AST
ok bool
}
func (c *JSCache) Parse(log logger.Log, source logger.Source, options js_parser.Options) (js_ast.AST, bool) {
// Check the cache
entry := func() *jsCacheEntry {
c.mutex.Lock()
defer c.mutex.Unlock()
return c.entries[source.KeyPath]
}()
// Cache hit
if entry != nil && entry.source == source && entry.options.Equal(&options) {
for _, msg := range entry.msgs {
log.AddMsg(msg)
}
return entry.ast, entry.ok
}
// Cache miss
tempLog := logger.NewDeferLog(logger.DeferLogAll, log.Overrides)
ast, ok := js_parser.Parse(tempLog, source, options)
msgs := tempLog.Done()
for _, msg := range msgs {
log.AddMsg(msg)
}
// Create the cache entry
entry = &jsCacheEntry{
source: source,
options: options,
ast: ast,
ok: ok,
msgs: msgs,
}
// Save for next time
c.mutex.Lock()
defer c.mutex.Unlock()
c.entries[source.KeyPath] = entry
return ast, ok
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/css_lexer/css_lexer_test.go | internal/css_lexer/css_lexer_test.go | package css_lexer
import (
"strings"
"testing"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/test"
)
func lexToken(contents string) (T, string) {
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
result := Tokenize(log, test.SourceForTest(contents), Options{})
if len(result.Tokens) > 0 {
t := result.Tokens[0]
return t.Kind, t.DecodedText(contents)
}
return TEndOfFile, ""
}
func lexerError(contents string) string {
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
Tokenize(log, test.SourceForTest(contents), Options{})
var text strings.Builder
for _, msg := range log.Done() {
text.WriteString(msg.String(logger.OutputOptions{}, logger.TerminalInfo{}))
}
return text.String()
}
func TestTokens(t *testing.T) {
expected := []struct {
contents string
text string
token T
}{
{"", "end of file", TEndOfFile},
{"@media", "@-keyword", TAtKeyword},
{"url(x y", "bad URL token", TBadURL},
{"-->", "\"-->\"", TCDC},
{"<!--", "\"<!--\"", TCDO},
{"}", "\"}\"", TCloseBrace},
{"]", "\"]\"", TCloseBracket},
{")", "\")\"", TCloseParen},
{":", "\":\"", TColon},
{",", "\",\"", TComma},
{"?", "delimiter", TDelim},
{"&", "\"&\"", TDelimAmpersand},
{"*", "\"*\"", TDelimAsterisk},
{"|", "\"|\"", TDelimBar},
{"^", "\"^\"", TDelimCaret},
{"$", "\"$\"", TDelimDollar},
{".", "\".\"", TDelimDot},
{"=", "\"=\"", TDelimEquals},
{"!", "\"!\"", TDelimExclamation},
{">", "\">\"", TDelimGreaterThan},
{"+", "\"+\"", TDelimPlus},
{"/", "\"/\"", TDelimSlash},
{"~", "\"~\"", TDelimTilde},
{"1px", "dimension", TDimension},
{"max(", "function token", TFunction},
{"#name", "hash token", THash},
{"name", "identifier", TIdent},
{"123", "number", TNumber},
{"{", "\"{\"", TOpenBrace},
{"[", "\"[\"", TOpenBracket},
{"(", "\"(\"", TOpenParen},
{"50%", "percentage", TPercentage},
{";", "\";\"", TSemicolon},
{"'abc'", "string token", TString},
{"url(test)", "URL token", TURL},
{" ", "whitespace", TWhitespace},
}
for _, it := range expected {
contents := it.contents
token := it.token
t.Run(contents, func(t *testing.T) {
kind, _ := lexToken(contents)
test.AssertEqual(t, kind, token)
})
}
}
func TestStringParsing(t *testing.T) {
contentsOfStringToken := func(contents string) string {
t.Helper()
kind, text := lexToken(contents)
test.AssertEqual(t, kind, TString)
return text
}
test.AssertEqual(t, contentsOfStringToken("\"foo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\oo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\"o\""), "f\"o")
test.AssertEqual(t, contentsOfStringToken("\"f\\\\o\""), "f\\o")
test.AssertEqual(t, contentsOfStringToken("\"f\\\no\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\ro\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\r\no\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\\fo\""), "fo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6fo\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "foo")
test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "fo o")
test.AssertEqual(t, contentsOfStringToken("\"f\\fffffffo\""), "f\uFFFDfo")
test.AssertEqual(t, contentsOfStringToken("\"f\\10abcdeo\""), "f\U0010ABCDeo")
}
func TestURLParsing(t *testing.T) {
contentsOfURLToken := func(expected T, contents string) string {
t.Helper()
kind, text := lexToken(contents)
test.AssertEqual(t, kind, expected)
return text
}
test.AssertEqual(t, contentsOfURLToken(TURL, "url(foo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url( foo\t\t)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\oo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\\"o)"), "f\"o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\'o)"), "f'o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\)o)"), "f)o")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6fo)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6f o)"), "foo")
test.AssertEqual(t, contentsOfURLToken(TBadURL, "url(f\\6f o)"), "url(f\\6f o)")
}
func TestComment(t *testing.T) {
test.AssertEqualWithDiff(t, lexerError("/*"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n")
test.AssertEqualWithDiff(t, lexerError("/*/"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n")
test.AssertEqualWithDiff(t, lexerError("/**/"), "")
test.AssertEqualWithDiff(t, lexerError("//"), "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n")
}
func TestString(t *testing.T) {
test.AssertEqualWithDiff(t, lexerError("'"), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("\""), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("'\\'"), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("\"\\\""), "<stdin>: WARNING: Unterminated string token\n")
test.AssertEqualWithDiff(t, lexerError("''"), "")
test.AssertEqualWithDiff(t, lexerError("\"\""), "")
}
func TestBOM(t *testing.T) {
// A byte order mark should not be parsed as an identifier
kind, _ := lexToken("\uFEFF.")
test.AssertEqual(t, kind, TDelimDot)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/css_lexer/css_lexer.go | internal/css_lexer/css_lexer.go | package css_lexer
import (
"strings"
"unicode/utf8"
"github.com/evanw/esbuild/internal/logger"
)
// The lexer converts a source file to a stream of tokens. Unlike esbuild's
// JavaScript lexer, this CSS lexer runs to completion before the CSS parser
// begins, resulting in a single array of all tokens in the file.
type T uint8
const eof = -1
const (
TEndOfFile T = iota
TAtKeyword
TUnterminatedString
TBadURL
TCDC // "-->"
TCDO // "<!--"
TCloseBrace
TCloseBracket
TCloseParen
TColon
TComma
TDelim
TDelimAmpersand
TDelimAsterisk
TDelimBar
TDelimCaret
TDelimDollar
TDelimDot
TDelimEquals
TDelimExclamation
TDelimGreaterThan
TDelimLessThan
TDelimMinus
TDelimPlus
TDelimSlash
TDelimTilde
TDimension
TFunction
THash
TIdent
TNumber
TOpenBrace
TOpenBracket
TOpenParen
TPercentage
TSemicolon
TString
TURL
TWhitespace
// This is never something that the lexer generates directly. Instead this is
// an esbuild-specific token for global/local names that "TIdent" tokens may
// be changed into.
TSymbol
)
var tokenToString = []string{
"end of file",
"@-keyword",
"bad string token",
"bad URL token",
"\"-->\"",
"\"<!--\"",
"\"}\"",
"\"]\"",
"\")\"",
"\":\"",
"\",\"",
"delimiter",
"\"&\"",
"\"*\"",
"\"|\"",
"\"^\"",
"\"$\"",
"\".\"",
"\"=\"",
"\"!\"",
"\">\"",
"\"<\"",
"\"-\"",
"\"+\"",
"\"/\"",
"\"~\"",
"dimension",
"function token",
"hash token",
"identifier",
"number",
"\"{\"",
"\"[\"",
"\"(\"",
"percentage",
"\";\"",
"string token",
"URL token",
"whitespace",
"identifier",
}
func (t T) String() string {
return tokenToString[t]
}
func (t T) IsNumeric() bool {
return t == TNumber || t == TPercentage || t == TDimension
}
type TokenFlags uint8
const (
IsID TokenFlags = 1 << iota
DidWarnAboutSingleLineComment
)
// This token struct is designed to be memory-efficient. It just references a
// range in the input file instead of directly containing the substring of text
// since a range takes up less memory than a string.
type Token struct {
Range logger.Range // 8 bytes
UnitOffset uint16 // 2 bytes
Kind T // 1 byte
Flags TokenFlags // 1 byte
}
func (token Token) DecodedText(contents string) string {
raw := contents[token.Range.Loc.Start:token.Range.End()]
switch token.Kind {
case TIdent, TDimension:
return decodeEscapesInToken(raw)
case TAtKeyword, THash:
return decodeEscapesInToken(raw[1:])
case TFunction:
return decodeEscapesInToken(raw[:len(raw)-1])
case TString:
return decodeEscapesInToken(raw[1 : len(raw)-1])
case TURL:
start := 4
end := len(raw)
// Note: URL tokens with syntax errors may not have a trailing ")"
if raw[end-1] == ')' {
end--
}
// Trim leading and trailing whitespace
for start < end && isWhitespace(rune(raw[start])) {
start++
}
for start < end && isWhitespace(rune(raw[end-1])) {
end--
}
return decodeEscapesInToken(raw[start:end])
}
return raw
}
type lexer struct {
Options
log logger.Log
source logger.Source
allComments []logger.Range
legalCommentsBefore []Comment
sourceMappingURL logger.Span
tracker logger.LineColumnTracker
approximateNewlineCount int
current int
oldSingleLineCommentEnd logger.Loc
codePoint rune
Token Token
}
type Comment struct {
Text string
Loc logger.Loc
TokenIndexAfter uint32
}
type TokenizeResult struct {
Tokens []Token
AllComments []logger.Range
LegalComments []Comment
SourceMapComment logger.Span
ApproximateLineCount int32
}
type Options struct {
RecordAllComments bool
}
func Tokenize(log logger.Log, source logger.Source, options Options) TokenizeResult {
lexer := lexer{
Options: options,
log: log,
source: source,
tracker: logger.MakeLineColumnTracker(&source),
}
lexer.step()
// The U+FEFF character is usually a zero-width non-breaking space. However,
// when it's used at the start of a text stream it is called a BOM (byte order
// mark) instead and indicates that the text stream is UTF-8 encoded. This is
// problematic for us because CSS does not treat U+FEFF as whitespace. Only
// " \t\r\n\f" characters are treated as whitespace. Skip over the BOM if it
// is present so it doesn't cause us trouble when we try to parse it.
if lexer.codePoint == '\uFEFF' {
lexer.step()
}
lexer.next()
var tokens []Token
var legalComments []Comment
for lexer.Token.Kind != TEndOfFile {
if lexer.legalCommentsBefore != nil {
for _, comment := range lexer.legalCommentsBefore {
comment.TokenIndexAfter = uint32(len(tokens))
legalComments = append(legalComments, comment)
}
lexer.legalCommentsBefore = nil
}
tokens = append(tokens, lexer.Token)
lexer.next()
}
if lexer.legalCommentsBefore != nil {
for _, comment := range lexer.legalCommentsBefore {
comment.TokenIndexAfter = uint32(len(tokens))
legalComments = append(legalComments, comment)
}
lexer.legalCommentsBefore = nil
}
return TokenizeResult{
Tokens: tokens,
AllComments: lexer.allComments,
LegalComments: legalComments,
ApproximateLineCount: int32(lexer.approximateNewlineCount) + 1,
SourceMapComment: lexer.sourceMappingURL,
}
}
func (lexer *lexer) step() {
codePoint, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
// Use -1 to indicate the end of the file
if width == 0 {
codePoint = eof
}
// Track the approximate number of newlines in the file so we can preallocate
// the line offset table in the printer for source maps. The line offset table
// is the #1 highest allocation in the heap profile, so this is worth doing.
// This count is approximate because it handles "\n" and "\r\n" (the common
// cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless
// because it's only a preallocation. The array will just grow if it's too small.
if codePoint == '\n' {
lexer.approximateNewlineCount++
}
lexer.codePoint = codePoint
lexer.Token.Range.Len = int32(lexer.current) - lexer.Token.Range.Loc.Start
lexer.current += width
}
func (lexer *lexer) next() {
// Reference: https://www.w3.org/TR/css-syntax-3/
for {
lexer.Token = Token{Range: logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}}
switch lexer.codePoint {
case eof:
lexer.Token.Kind = TEndOfFile
case '/':
lexer.step()
switch lexer.codePoint {
case '*':
lexer.step()
lexer.consumeToEndOfMultiLineComment(lexer.Token.Range)
continue
case '/':
// Warn when people use "//" comments, which are invalid in CSS
loc := lexer.Token.Range.Loc
if loc.Start >= lexer.oldSingleLineCommentEnd.Start {
contents := lexer.source.Contents
end := lexer.current
for end < len(contents) && !isNewline(rune(contents[end])) {
end++
}
lexer.log.AddID(logger.MsgID_CSS_JSCommentInCSS, logger.Warning, &lexer.tracker, logger.Range{Loc: loc, Len: 2},
"Comments in CSS use \"/* ... */\" instead of \"//\"")
lexer.oldSingleLineCommentEnd.Start = int32(end)
lexer.Token.Flags |= DidWarnAboutSingleLineComment
}
}
lexer.Token.Kind = TDelimSlash
case ' ', '\t', '\n', '\r', '\f':
lexer.step()
for {
if isWhitespace(lexer.codePoint) {
lexer.step()
} else if lexer.codePoint == '/' && lexer.current < len(lexer.source.Contents) && lexer.source.Contents[lexer.current] == '*' {
startRange := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 2}
lexer.step()
lexer.step()
lexer.consumeToEndOfMultiLineComment(startRange)
} else {
break
}
}
lexer.Token.Kind = TWhitespace
case '"', '\'':
lexer.Token.Kind = lexer.consumeString()
case '#':
lexer.step()
if IsNameContinue(lexer.codePoint) || lexer.isValidEscape() {
lexer.Token.Kind = THash
if lexer.wouldStartIdentifier() {
lexer.Token.Flags |= IsID
}
lexer.consumeName()
} else {
lexer.Token.Kind = TDelim
}
case '(':
lexer.step()
lexer.Token.Kind = TOpenParen
case ')':
lexer.step()
lexer.Token.Kind = TCloseParen
case '[':
lexer.step()
lexer.Token.Kind = TOpenBracket
case ']':
lexer.step()
lexer.Token.Kind = TCloseBracket
case '{':
lexer.step()
lexer.Token.Kind = TOpenBrace
case '}':
lexer.step()
lexer.Token.Kind = TCloseBrace
case ',':
lexer.step()
lexer.Token.Kind = TComma
case ':':
lexer.step()
lexer.Token.Kind = TColon
case ';':
lexer.step()
lexer.Token.Kind = TSemicolon
case '+':
if lexer.wouldStartNumber() {
lexer.Token.Kind = lexer.consumeNumeric()
} else {
lexer.step()
lexer.Token.Kind = TDelimPlus
}
case '.':
if lexer.wouldStartNumber() {
lexer.Token.Kind = lexer.consumeNumeric()
} else {
lexer.step()
lexer.Token.Kind = TDelimDot
}
case '-':
if lexer.wouldStartNumber() {
lexer.Token.Kind = lexer.consumeNumeric()
} else if lexer.current+2 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+2] == "->" {
lexer.step()
lexer.step()
lexer.step()
lexer.Token.Kind = TCDC
} else if lexer.wouldStartIdentifier() {
lexer.Token.Kind = lexer.consumeIdentLike()
} else {
lexer.step()
lexer.Token.Kind = TDelimMinus
}
case '<':
if lexer.current+3 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+3] == "!--" {
lexer.step()
lexer.step()
lexer.step()
lexer.step()
lexer.Token.Kind = TCDO
} else {
lexer.step()
lexer.Token.Kind = TDelimLessThan
}
case '@':
lexer.step()
if lexer.wouldStartIdentifier() {
lexer.consumeName()
lexer.Token.Kind = TAtKeyword
} else {
lexer.Token.Kind = TDelim
}
case '\\':
if lexer.isValidEscape() {
lexer.Token.Kind = lexer.consumeIdentLike()
} else {
lexer.step()
lexer.log.AddError(&lexer.tracker, lexer.Token.Range, "Invalid escape")
lexer.Token.Kind = TDelim
}
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
lexer.Token.Kind = lexer.consumeNumeric()
case '>':
lexer.step()
lexer.Token.Kind = TDelimGreaterThan
case '~':
lexer.step()
lexer.Token.Kind = TDelimTilde
case '&':
lexer.step()
lexer.Token.Kind = TDelimAmpersand
case '*':
lexer.step()
lexer.Token.Kind = TDelimAsterisk
case '|':
lexer.step()
lexer.Token.Kind = TDelimBar
case '!':
lexer.step()
lexer.Token.Kind = TDelimExclamation
case '=':
lexer.step()
lexer.Token.Kind = TDelimEquals
case '^':
lexer.step()
lexer.Token.Kind = TDelimCaret
case '$':
lexer.step()
lexer.Token.Kind = TDelimDollar
default:
if IsNameStart(lexer.codePoint) {
lexer.Token.Kind = lexer.consumeIdentLike()
} else {
lexer.step()
lexer.Token.Kind = TDelim
}
}
return
}
}
func (lexer *lexer) consumeToEndOfMultiLineComment(startRange logger.Range) {
startOfSourceMappingURL := 0
isLegalComment := false
switch lexer.codePoint {
case '#', '@':
// Keep track of the contents of the "sourceMappingURL=" comment
if strings.HasPrefix(lexer.source.Contents[lexer.current:], " sourceMappingURL=") {
startOfSourceMappingURL = lexer.current + len(" sourceMappingURL=")
}
case '!':
// Remember if this is a legal comment
isLegalComment = true
}
for {
switch lexer.codePoint {
case '*':
endOfSourceMappingURL := lexer.current - 1
lexer.step()
if lexer.codePoint == '/' {
commentEnd := lexer.current
lexer.step()
// Record the source mapping URL
if startOfSourceMappingURL != 0 {
r := logger.Range{Loc: logger.Loc{Start: int32(startOfSourceMappingURL)}}
text := lexer.source.Contents[startOfSourceMappingURL:endOfSourceMappingURL]
for int(r.Len) < len(text) && !isWhitespace(rune(text[r.Len])) {
r.Len++
}
lexer.sourceMappingURL = logger.Span{Text: text[:r.Len], Range: r}
}
// Record all comments
commentRange := logger.Range{Loc: startRange.Loc, Len: int32(commentEnd) - startRange.Loc.Start}
if lexer.RecordAllComments {
lexer.allComments = append(lexer.allComments, commentRange)
}
// Record legal comments
if text := lexer.source.Contents[startRange.Loc.Start:commentEnd]; isLegalComment || containsAtPreserveOrAtLicense(text) {
text = lexer.source.CommentTextWithoutIndent(commentRange)
lexer.legalCommentsBefore = append(lexer.legalCommentsBefore, Comment{Loc: startRange.Loc, Text: text})
}
return
}
case eof: // This indicates the end of the file
lexer.log.AddErrorWithNotes(&lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
"Expected \"*/\" to terminate multi-line comment",
[]logger.MsgData{lexer.tracker.MsgData(startRange, "The multi-line comment starts here:")})
return
default:
lexer.step()
}
}
}
func containsAtPreserveOrAtLicense(text string) bool {
for i, c := range text {
if c == '@' && (strings.HasPrefix(text[i+1:], "preserve") || strings.HasPrefix(text[i+1:], "license")) {
return true
}
}
return false
}
func (lexer *lexer) isValidEscape() bool {
if lexer.codePoint != '\\' {
return false
}
c, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
return !isNewline(c)
}
func (lexer *lexer) wouldStartIdentifier() bool {
if IsNameStart(lexer.codePoint) {
return true
}
if lexer.codePoint == '-' {
c, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:])
if c == utf8.RuneError && width <= 1 {
return false // Decoding error
}
if IsNameStart(c) || c == '-' {
return true
}
if c == '\\' {
c2, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current+width:])
return !isNewline(c2)
}
return false
}
return lexer.isValidEscape()
}
func WouldStartIdentifierWithoutEscapes(text string) bool {
c, width := utf8.DecodeRuneInString(text)
if c == utf8.RuneError && width <= 1 {
return false // Decoding error
}
if IsNameStart(c) {
return true
}
if c == '-' {
c2, width2 := utf8.DecodeRuneInString(text[width:])
if c2 == utf8.RuneError && width2 <= 1 {
return false // Decoding error
}
if IsNameStart(c2) || c2 == '-' {
return true
}
}
return false
}
func RangeOfIdentifier(source logger.Source, loc logger.Loc) logger.Range {
text := source.Contents[loc.Start:]
if len(text) == 0 {
return logger.Range{Loc: loc, Len: 0}
}
i := 0
n := len(text)
for {
c, width := utf8.DecodeRuneInString(text[i:])
if IsNameContinue(c) {
i += width
continue
}
// Handle an escape
if c == '\\' && i+1 < n && !isNewline(rune(text[i+1])) {
i += width // Skip the backslash
c, width = utf8.DecodeRuneInString(text[i:])
if _, ok := isHex(c); ok {
i += width
c, width = utf8.DecodeRuneInString(text[i:])
for j := 0; j < 5; j++ {
if _, ok := isHex(c); !ok {
break
}
i += width
c, width = utf8.DecodeRuneInString(text[i:])
}
if isWhitespace(c) {
i += width
}
}
continue
}
break
}
// Don't end with a whitespace
if i > 0 && isWhitespace(rune(text[i-1])) {
i--
}
return logger.Range{Loc: loc, Len: int32(i)}
}
func (lexer *lexer) wouldStartNumber() bool {
if lexer.codePoint >= '0' && lexer.codePoint <= '9' {
return true
} else if lexer.codePoint == '.' {
contents := lexer.source.Contents
if lexer.current < len(contents) {
c := contents[lexer.current]
return c >= '0' && c <= '9'
}
} else if lexer.codePoint == '+' || lexer.codePoint == '-' {
contents := lexer.source.Contents
n := len(contents)
if lexer.current < n {
c := contents[lexer.current]
if c >= '0' && c <= '9' {
return true
}
if c == '.' && lexer.current+1 < n {
c = contents[lexer.current+1]
return c >= '0' && c <= '9'
}
}
}
return false
}
// Note: This function is hot in profiles
func (lexer *lexer) consumeName() string {
// Common case: no escapes, identifier is a substring of the input. Doing this
// in a tight loop that avoids UTF-8 decoding and that increments a single
// number instead of doing "step()" is noticeably faster. For example, doing
// this sped up end-to-end parsing and printing of a large CSS file from 97ms
// to 84ms (around 15% faster).
contents := lexer.source.Contents
if IsNameContinue(lexer.codePoint) {
n := len(contents)
i := lexer.current
for i < n && IsNameContinue(rune(contents[i])) {
i++
}
lexer.current = i
lexer.step()
}
raw := contents[lexer.Token.Range.Loc.Start:lexer.Token.Range.End()]
if !lexer.isValidEscape() {
return raw
}
// Uncommon case: escapes, identifier is allocated
sb := strings.Builder{}
sb.WriteString(raw)
sb.WriteRune(lexer.consumeEscape())
for {
if IsNameContinue(lexer.codePoint) {
sb.WriteRune(lexer.codePoint)
lexer.step()
} else if lexer.isValidEscape() {
sb.WriteRune(lexer.consumeEscape())
} else {
break
}
}
return sb.String()
}
func (lexer *lexer) consumeEscape() rune {
lexer.step() // Skip the backslash
c := lexer.codePoint
if hex, ok := isHex(c); ok {
lexer.step()
for i := 0; i < 5; i++ {
if next, ok := isHex(lexer.codePoint); ok {
lexer.step()
hex = hex*16 + next
} else {
break
}
}
if isWhitespace(lexer.codePoint) {
lexer.step()
}
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
return utf8.RuneError
}
return rune(hex)
}
if c == eof {
return utf8.RuneError
}
lexer.step()
return c
}
func (lexer *lexer) consumeIdentLike() T {
name := lexer.consumeName()
if lexer.codePoint == '(' {
matchingLoc := logger.Loc{Start: lexer.Token.Range.End()}
lexer.step()
if len(name) == 3 {
u, r, l := name[0], name[1], name[2]
if (u == 'u' || u == 'U') && (r == 'r' || r == 'R') && (l == 'l' || l == 'L') {
// Save state
approximateNewlineCount := lexer.approximateNewlineCount
codePoint := lexer.codePoint
tokenRangeLen := lexer.Token.Range.Len
current := lexer.current
// Check to see if this is a URL token instead of a function
for isWhitespace(lexer.codePoint) {
lexer.step()
}
if lexer.codePoint != '"' && lexer.codePoint != '\'' {
return lexer.consumeURL(matchingLoc)
}
// Restore state (i.e. backtrack)
lexer.approximateNewlineCount = approximateNewlineCount
lexer.codePoint = codePoint
lexer.Token.Range.Len = tokenRangeLen
lexer.current = current
}
}
return TFunction
}
return TIdent
}
func (lexer *lexer) consumeURL(matchingLoc logger.Loc) T {
validURL:
for {
switch lexer.codePoint {
case ')':
lexer.step()
return TURL
case eof:
loc := logger.Loc{Start: lexer.Token.Range.End()}
lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token",
[]logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")})
return TURL
case ' ', '\t', '\n', '\r', '\f':
lexer.step()
for isWhitespace(lexer.codePoint) {
lexer.step()
}
if lexer.codePoint != ')' {
loc := logger.Loc{Start: lexer.Token.Range.End()}
lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token",
[]logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")})
if lexer.codePoint == eof {
return TURL
}
break validURL
}
lexer.step()
return TURL
case '"', '\'', '(':
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Expected \")\" to end URL token",
[]logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")})
break validURL
case '\\':
if !lexer.isValidEscape() {
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
lexer.log.AddID(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Invalid escape")
break validURL
}
lexer.consumeEscape()
default:
if isNonPrintable(lexer.codePoint) {
r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1}
lexer.log.AddID(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Unexpected non-printable character in URL token")
break validURL
}
lexer.step()
}
}
// Consume the remnants of a bad url
for {
switch lexer.codePoint {
case ')', eof:
lexer.step()
return TBadURL
case '\\':
if lexer.isValidEscape() {
lexer.consumeEscape()
}
}
lexer.step()
}
}
func (lexer *lexer) consumeString() T {
quote := lexer.codePoint
lexer.step()
for {
switch lexer.codePoint {
case '\\':
lexer.step()
// Handle Windows CRLF
if lexer.codePoint == '\r' {
lexer.step()
if lexer.codePoint == '\n' {
lexer.step()
}
continue
}
// Otherwise, fall through to ignore the character after the backslash
case eof, '\n', '\r', '\f':
lexer.log.AddID(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker,
logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}},
"Unterminated string token")
return TUnterminatedString
case quote:
lexer.step()
return TString
}
lexer.step()
}
}
func (lexer *lexer) consumeNumeric() T {
// Skip over leading sign
if lexer.codePoint == '+' || lexer.codePoint == '-' {
lexer.step()
}
// Skip over leading digits
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
lexer.step()
}
// Skip over digits after dot
if lexer.codePoint == '.' {
lexer.step()
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
lexer.step()
}
}
// Skip over exponent
if lexer.codePoint == 'e' || lexer.codePoint == 'E' {
contents := lexer.source.Contents
// Look ahead before advancing to make sure this is an exponent, not a unit
if lexer.current < len(contents) {
c := contents[lexer.current]
if (c == '+' || c == '-') && lexer.current+1 < len(contents) {
c = contents[lexer.current+1]
}
// Only consume this if it's an exponent
if c >= '0' && c <= '9' {
lexer.step()
if lexer.codePoint == '+' || lexer.codePoint == '-' {
lexer.step()
}
for lexer.codePoint >= '0' && lexer.codePoint <= '9' {
lexer.step()
}
}
}
}
// Determine the numeric type
if lexer.wouldStartIdentifier() {
lexer.Token.UnitOffset = uint16(lexer.Token.Range.Len)
lexer.consumeName()
return TDimension
}
if lexer.codePoint == '%' {
lexer.step()
return TPercentage
}
return TNumber
}
func IsNameStart(c rune) bool {
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c >= 0x80 || c == '\x00'
}
func IsNameContinue(c rune) bool {
return IsNameStart(c) || (c >= '0' && c <= '9') || c == '-'
}
func isNewline(c rune) bool {
switch c {
case '\n', '\r', '\f':
return true
}
return false
}
func isWhitespace(c rune) bool {
switch c {
case ' ', '\t', '\n', '\r', '\f':
return true
}
return false
}
func isHex(c rune) (int, bool) {
if c >= '0' && c <= '9' {
return int(c - '0'), true
}
if c >= 'a' && c <= 'f' {
return int(c + (10 - 'a')), true
}
if c >= 'A' && c <= 'F' {
return int(c + (10 - 'A')), true
}
return 0, false
}
func isNonPrintable(c rune) bool {
return c <= 0x08 || c == 0x0B || (c >= 0x0E && c <= 0x1F) || c == 0x7F
}
func decodeEscapesInToken(inner string) string {
i := 0
for i < len(inner) {
if c := inner[i]; c == '\\' || c == '\x00' {
break
}
i++
}
if i == len(inner) {
return inner
}
sb := strings.Builder{}
sb.WriteString(inner[:i])
inner = inner[i:]
for len(inner) > 0 {
c, width := utf8.DecodeRuneInString(inner)
inner = inner[width:]
if c != '\\' {
if c == '\x00' {
c = utf8.RuneError
}
sb.WriteRune(c)
continue
}
if len(inner) == 0 {
sb.WriteRune(utf8.RuneError)
continue
}
c, width = utf8.DecodeRuneInString(inner)
inner = inner[width:]
hex, ok := isHex(c)
if !ok {
if c == '\n' || c == '\f' {
continue
}
// Handle Windows CRLF
if c == '\r' {
c, width = utf8.DecodeRuneInString(inner)
if c == '\n' {
inner = inner[width:]
}
continue
}
// If we get here, this is not a valid escape. However, this is still
// allowed. In this case the backslash is just ignored.
sb.WriteRune(c)
continue
}
// Parse up to five additional hex characters (so six in total)
for i := 0; i < 5 && len(inner) > 0; i++ {
c, width = utf8.DecodeRuneInString(inner)
if next, ok := isHex(c); ok {
inner = inner[width:]
hex = hex*16 + next
} else {
break
}
}
if len(inner) > 0 {
c, width = utf8.DecodeRuneInString(inner)
if isWhitespace(c) {
inner = inner[width:]
}
}
if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF {
sb.WriteRune(utf8.RuneError)
continue
}
sb.WriteRune(rune(hex))
}
return sb.String()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/comment.go | internal/helpers/comment.go | package helpers
import (
"strings"
)
func EscapeClosingTag(text string, slashTag string) string {
if slashTag == "" {
return text
}
i := strings.Index(text, "</")
if i < 0 {
return text
}
var b strings.Builder
for {
b.WriteString(text[:i+1])
text = text[i+1:]
if len(text) >= len(slashTag) && strings.EqualFold(text[:len(slashTag)], slashTag) {
b.WriteByte('\\')
}
i = strings.Index(text, "</")
if i < 0 {
break
}
}
b.WriteString(text)
return b.String()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/mime.go | internal/helpers/mime.go | package helpers
import "strings"
var builtinTypesLower = map[string]string{
// Text
".css": "text/css; charset=utf-8",
".htm": "text/html; charset=utf-8",
".html": "text/html; charset=utf-8",
".js": "text/javascript; charset=utf-8",
".json": "application/json; charset=utf-8",
".markdown": "text/markdown; charset=utf-8",
".md": "text/markdown; charset=utf-8",
".mjs": "text/javascript; charset=utf-8",
".xhtml": "application/xhtml+xml; charset=utf-8",
".xml": "text/xml; charset=utf-8",
// Images
".avif": "image/avif",
".gif": "image/gif",
".jpeg": "image/jpeg",
".jpg": "image/jpeg",
".png": "image/png",
".svg": "image/svg+xml",
".webp": "image/webp",
// Fonts
".eot": "application/vnd.ms-fontobject",
".otf": "font/otf",
".sfnt": "font/sfnt",
".ttf": "font/ttf",
".woff": "font/woff",
".woff2": "font/woff2",
// Other
".pdf": "application/pdf",
".wasm": "application/wasm",
".webmanifest": "application/manifest+json",
}
// This is used instead of Go's built-in "mime.TypeByExtension" function because
// that function is broken on Windows: https://github.com/golang/go/issues/32350.
func MimeTypeByExtension(ext string) string {
contentType := builtinTypesLower[ext]
if contentType == "" {
contentType = builtinTypesLower[strings.ToLower(ext)]
}
return contentType
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/path.go | internal/helpers/path.go | package helpers
import (
"net/url"
"strings"
"github.com/evanw/esbuild/internal/fs"
)
func IsInsideNodeModules(path string) bool {
for {
// This is written in a platform-independent manner because it's run on
// user-specified paths which can be arbitrary non-file-system things. So
// for example Windows paths may end up being used on Unix or URLs may end
// up being used on Windows. Be consistently agnostic to which kind of
// slash is used on all platforms.
slash := strings.LastIndexAny(path, "/\\")
if slash == -1 {
return false
}
dir, base := path[:slash], path[slash+1:]
if base == "node_modules" {
return true
}
path = dir
}
}
func IsFileURL(fileURL *url.URL) bool {
return fileURL.Scheme == "file" && (fileURL.Host == "" || fileURL.Host == "localhost") && strings.HasPrefix(fileURL.Path, "/")
}
func FileURLFromFilePath(filePath string) *url.URL {
// Append a trailing slash so that resolving the URL includes the trailing
// directory, and turn Windows-style paths with volumes into URL-style paths:
//
// "/Users/User/Desktop" => "/Users/User/Desktop/"
// "C:\\Users\\User\\Desktop" => "/C:/Users/User/Desktop/"
//
filePath = strings.ReplaceAll(filePath, "\\", "/")
if !strings.HasPrefix(filePath, "/") {
filePath = "/" + filePath
}
return &url.URL{Scheme: "file", Path: filePath}
}
func FilePathFromFileURL(fs fs.FS, fileURL *url.URL) string {
path := fileURL.Path
// Convert URL-style paths back into Windows-style paths if needed:
//
// "/C:/Users/User/foo.js.map" => "C:\\Users\\User\\foo.js.map"
//
if !strings.HasPrefix(fs.Cwd(), "/") {
path = strings.TrimPrefix(path, "/")
path = strings.ReplaceAll(path, "/", "\\") // This is needed for "filepath.Rel()" to work
}
return path
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/bitset.go | internal/helpers/bitset.go | package helpers
import "bytes"
type BitSet struct {
entries []byte
}
func NewBitSet(bitCount uint) BitSet {
return BitSet{make([]byte, (bitCount+7)/8)}
}
func (bs BitSet) HasBit(bit uint) bool {
return (bs.entries[bit/8] & (1 << (bit & 7))) != 0
}
func (bs BitSet) SetBit(bit uint) {
bs.entries[bit/8] |= 1 << (bit & 7)
}
func (bs BitSet) Equals(other BitSet) bool {
return bytes.Equal(bs.entries, other.entries)
}
func (bs BitSet) String() string {
return string(bs.entries)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/timer.go | internal/helpers/timer.go | package helpers
import (
"fmt"
"strings"
"sync"
"time"
"github.com/evanw/esbuild/internal/logger"
)
type Timer struct {
data []timerData
mutex sync.Mutex
}
type timerData struct {
time time.Time
name string
isEnd bool
}
func (t *Timer) Begin(name string) {
if t != nil {
t.data = append(t.data, timerData{
name: name,
time: time.Now(),
})
}
}
func (t *Timer) End(name string) {
if t != nil {
t.data = append(t.data, timerData{
name: name,
time: time.Now(),
isEnd: true,
})
}
}
func (t *Timer) Fork() *Timer {
if t != nil {
return &Timer{}
}
return nil
}
func (t *Timer) Join(other *Timer) {
if t != nil && other != nil {
t.mutex.Lock()
defer t.mutex.Unlock()
t.data = append(t.data, other.data...)
}
}
func (t *Timer) Log(log logger.Log) {
if t == nil {
return
}
type pair struct {
timerData
index uint32
}
var notes []logger.MsgData
var stack []pair
indent := 0
for _, item := range t.data {
if !item.isEnd {
top := pair{timerData: item, index: uint32(len(notes))}
notes = append(notes, logger.MsgData{DisableMaximumWidth: true})
stack = append(stack, top)
indent++
} else {
indent--
last := len(stack) - 1
top := stack[last]
stack = stack[:last]
if item.name != top.name {
panic("Internal error")
}
notes[top.index].Text = fmt.Sprintf("%s%s: %dms",
strings.Repeat(" ", indent),
top.name,
item.time.Sub(top.time).Milliseconds())
}
}
log.AddIDWithNotes(logger.MsgID_None, logger.Info, nil, logger.Range{},
"Timing information (times may not nest hierarchically due to parallelism)", notes)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/hash.go | internal/helpers/hash.go | package helpers
// From: http://boost.sourceforge.net/doc/html/boost/hash_combine.html
func HashCombine(seed uint32, hash uint32) uint32 {
return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2))
}
func HashCombineString(seed uint32, text string) uint32 {
seed = HashCombine(seed, uint32(len(text)))
for _, c := range text {
seed = HashCombine(seed, uint32(c))
}
return seed
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/quote.go | internal/helpers/quote.go | package helpers
import "unicode/utf8"
const hexChars = "0123456789ABCDEF"
const firstASCII = 0x20
const lastASCII = 0x7E
const firstHighSurrogate = 0xD800
const firstLowSurrogate = 0xDC00
const lastLowSurrogate = 0xDFFF
func canPrintWithoutEscape(c rune, asciiOnly bool) bool {
if c <= lastASCII {
return c >= firstASCII && c != '\\' && c != '"'
} else {
return !asciiOnly && c != '\uFEFF' && (c < firstHighSurrogate || c > lastLowSurrogate)
}
}
func QuoteSingle(text string, asciiOnly bool) []byte {
return internalQuote(text, asciiOnly, '\'')
}
func QuoteForJSON(text string, asciiOnly bool) []byte {
return internalQuote(text, asciiOnly, '"')
}
func internalQuote(text string, asciiOnly bool, quoteChar byte) []byte {
// Estimate the required length
lenEstimate := 2
for _, c := range text {
if canPrintWithoutEscape(c, asciiOnly) {
lenEstimate += utf8.RuneLen(c)
} else {
switch c {
case '\b', '\f', '\n', '\r', '\t', '\\':
lenEstimate += 2
case '"':
if quoteChar == '"' {
lenEstimate += 2
}
case '\'':
if quoteChar == '\'' {
lenEstimate += 2
}
default:
if c <= 0xFFFF {
lenEstimate += 6
} else {
lenEstimate += 12
}
}
}
}
// Preallocate the array
bytes := make([]byte, 0, lenEstimate)
i := 0
n := len(text)
bytes = append(bytes, quoteChar)
for i < n {
c, width := DecodeWTF8Rune(text[i:])
// Fast path: a run of characters that don't need escaping
if canPrintWithoutEscape(c, asciiOnly) {
start := i
i += width
for i < n {
c, width = DecodeWTF8Rune(text[i:])
if !canPrintWithoutEscape(c, asciiOnly) {
break
}
i += width
}
bytes = append(bytes, text[start:i]...)
continue
}
switch c {
case '\b':
bytes = append(bytes, "\\b"...)
i++
case '\f':
bytes = append(bytes, "\\f"...)
i++
case '\n':
bytes = append(bytes, "\\n"...)
i++
case '\r':
bytes = append(bytes, "\\r"...)
i++
case '\t':
bytes = append(bytes, "\\t"...)
i++
case '\\':
bytes = append(bytes, "\\\\"...)
i++
case '"':
if quoteChar == '"' {
bytes = append(bytes, "\\\""...)
} else {
bytes = append(bytes, '"')
}
i++
case '\'':
if quoteChar == '\'' {
bytes = append(bytes, "\\'"...)
} else {
bytes = append(bytes, '\'')
}
i++
default:
i += width
if c <= 0xFFFF {
bytes = append(
bytes,
'\\', 'u', hexChars[c>>12], hexChars[(c>>8)&15], hexChars[(c>>4)&15], hexChars[c&15],
)
} else {
c -= 0x10000
lo := firstHighSurrogate + ((c >> 10) & 0x3FF)
hi := firstLowSurrogate + (c & 0x3FF)
bytes = append(
bytes,
'\\', 'u', hexChars[lo>>12], hexChars[(lo>>8)&15], hexChars[(lo>>4)&15], hexChars[lo&15],
'\\', 'u', hexChars[hi>>12], hexChars[(hi>>8)&15], hexChars[(hi>>4)&15], hexChars[hi&15],
)
}
}
}
return append(bytes, quoteChar)
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/glob.go | internal/helpers/glob.go | package helpers
import "strings"
type GlobWildcard uint8
const (
GlobNone GlobWildcard = iota
GlobAllExceptSlash
GlobAllIncludingSlash
)
type GlobPart struct {
Prefix string
Wildcard GlobWildcard
}
// The returned array will always be at least one element. If there are no
// wildcards then it will be exactly one element, and if there are wildcards
// then it will be more than one element.
func ParseGlobPattern(text string) (pattern []GlobPart) {
for {
star := strings.IndexByte(text, '*')
if star < 0 {
pattern = append(pattern, GlobPart{Prefix: text})
break
}
count := 1
for star+count < len(text) && text[star+count] == '*' {
count++
}
wildcard := GlobAllExceptSlash
// Allow both "/" and "\" as slashes
if count > 1 && (star == 0 || text[star-1] == '/' || text[star-1] == '\\') &&
(star+count == len(text) || text[star+count] == '/' || text[star+count] == '\\') {
wildcard = GlobAllIncludingSlash // A "globstar" path segment
}
pattern = append(pattern, GlobPart{Prefix: text[:star], Wildcard: wildcard})
text = text[star+count:]
}
return
}
func GlobPatternToString(pattern []GlobPart) string {
sb := strings.Builder{}
for _, part := range pattern {
sb.WriteString(part.Prefix)
switch part.Wildcard {
case GlobAllExceptSlash:
sb.WriteByte('*')
case GlobAllIncludingSlash:
sb.WriteString("**")
}
}
return sb.String()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/stack.go | internal/helpers/stack.go | package helpers
import (
"runtime/debug"
"strings"
)
func PrettyPrintedStack() string {
lines := strings.Split(strings.TrimSpace(string(debug.Stack())), "\n")
// Strip the first "goroutine" line
if len(lines) > 0 {
if first := lines[0]; strings.HasPrefix(first, "goroutine ") && strings.HasSuffix(first, ":") {
lines = lines[1:]
}
}
sb := strings.Builder{}
for _, line := range lines {
// Indented lines are source locations
if strings.HasPrefix(line, "\t") {
line = line[1:]
line = strings.TrimPrefix(line, "github.com/evanw/esbuild/")
if offset := strings.LastIndex(line, " +0x"); offset != -1 {
line = line[:offset]
}
sb.WriteString(" (")
sb.WriteString(line)
sb.WriteString(")")
continue
}
// Other lines are function calls
if sb.Len() > 0 {
sb.WriteByte('\n')
}
if strings.HasSuffix(line, ")") {
if paren := strings.LastIndexByte(line, '('); paren != -1 {
line = line[:paren]
}
}
if slash := strings.LastIndexByte(line, '/'); slash != -1 {
line = line[slash+1:]
}
sb.WriteString(line)
}
return sb.String()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/joiner.go | internal/helpers/joiner.go | package helpers
import (
"bytes"
"strings"
)
// This provides an efficient way to join lots of big string and byte slices
// together. It avoids the cost of repeatedly reallocating as the buffer grows
// by measuring exactly how big the buffer should be and then allocating once.
// This is a measurable speedup.
type Joiner struct {
strings []joinerString
bytes []joinerBytes
length uint32
lastByte byte
}
type joinerString struct {
data string
offset uint32
}
type joinerBytes struct {
data []byte
offset uint32
}
func (j *Joiner) AddString(data string) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.strings = append(j.strings, joinerString{data, j.length})
j.length += uint32(len(data))
}
func (j *Joiner) AddBytes(data []byte) {
if len(data) > 0 {
j.lastByte = data[len(data)-1]
}
j.bytes = append(j.bytes, joinerBytes{data, j.length})
j.length += uint32(len(data))
}
func (j *Joiner) LastByte() byte {
return j.lastByte
}
func (j *Joiner) Length() uint32 {
return j.length
}
func (j *Joiner) EnsureNewlineAtEnd() {
if j.length > 0 && j.lastByte != '\n' {
j.AddString("\n")
}
}
func (j *Joiner) Done() []byte {
if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 {
// No need to allocate if there was only a single byte array written
return j.bytes[0].data
}
buffer := make([]byte, j.length)
for _, item := range j.strings {
copy(buffer[item.offset:], item.data)
}
for _, item := range j.bytes {
copy(buffer[item.offset:], item.data)
}
return buffer
}
func (j *Joiner) Contains(s string, b []byte) bool {
for _, item := range j.strings {
if strings.Contains(item.data, s) {
return true
}
}
for _, item := range j.bytes {
if bytes.Contains(item.data, b) {
return true
}
}
return false
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/strings.go | internal/helpers/strings.go | package helpers
import (
"fmt"
"strings"
)
func StringArraysEqual(a []string, b []string) bool {
if len(a) != len(b) {
return false
}
for i, x := range a {
if x != b[i] {
return false
}
}
return true
}
func StringArrayArraysEqual(a [][]string, b [][]string) bool {
if len(a) != len(b) {
return false
}
for i, x := range a {
if !StringArraysEqual(x, b[i]) {
return false
}
}
return true
}
func StringArrayToQuotedCommaSeparatedString(a []string) string {
sb := strings.Builder{}
for i, str := range a {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(fmt.Sprintf("%q", str))
}
return sb.String()
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
evanw/esbuild | https://github.com/evanw/esbuild/blob/cd832972927f1f67b6d2cc895c06a8759c1cf309/internal/helpers/waitgroup.go | internal/helpers/waitgroup.go | package helpers
import "sync/atomic"
// Go's "sync.WaitGroup" is not thread-safe. Specifically it's not safe to call
// "Add" concurrently with "Wait", which is problematic because we have a case
// where we would like to do that.
//
// This is a simple alternative implementation of "sync.WaitGroup" that is
// thread-safe and that works for our purposes. We don't need to worry about
// multiple waiters so the implementation can be very simple.
type ThreadSafeWaitGroup struct {
counter int32
channel chan struct{}
}
func MakeThreadSafeWaitGroup() *ThreadSafeWaitGroup {
return &ThreadSafeWaitGroup{
channel: make(chan struct{}, 1),
}
}
func (wg *ThreadSafeWaitGroup) Add(delta int32) {
if counter := atomic.AddInt32(&wg.counter, delta); counter == 0 {
wg.channel <- struct{}{}
} else if counter < 0 {
panic("sync: negative WaitGroup counter")
}
}
func (wg *ThreadSafeWaitGroup) Done() {
wg.Add(-1)
}
func (wg *ThreadSafeWaitGroup) Wait() {
<-wg.channel
}
| go | MIT | cd832972927f1f67b6d2cc895c06a8759c1cf309 | 2026-01-07T08:35:49.242278Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.