| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | package reflect |
| |
|
| | import ( |
| | "internal/abi" |
| | "internal/goarch" |
| | "iter" |
| | "runtime" |
| | "strconv" |
| | "sync" |
| | "unicode" |
| | "unicode/utf8" |
| | "unsafe" |
| | ) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type Type interface { |
| | |
| |
|
| | |
| | |
| | Align() int |
| |
|
| | |
| | |
| | FieldAlign() int |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | Method(int) Method |
| |
|
| | |
| | |
| | Methods() iter.Seq[Method] |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | MethodByName(string) (Method, bool) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | NumMethod() int |
| |
|
| | |
| | |
| | Name() string |
| |
|
| | |
| | |
| | |
| | |
| | |
| | PkgPath() string |
| |
|
| | |
| | |
| | Size() uintptr |
| |
|
| | |
| | |
| | |
| | |
| | |
| | String() string |
| |
|
| | |
| | Kind() Kind |
| |
|
| | |
| | Implements(u Type) bool |
| |
|
| | |
| | AssignableTo(u Type) bool |
| |
|
| | |
| | |
| | |
| | |
| | ConvertibleTo(u Type) bool |
| |
|
| | |
| | |
| | |
| | |
| | Comparable() bool |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | Bits() int |
| |
|
| | |
| | |
| | ChanDir() ChanDir |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | IsVariadic() bool |
| |
|
| | |
| | |
| | Elem() Type |
| |
|
| | |
| | |
| | |
| | Field(i int) StructField |
| |
|
| | |
| | |
| | |
| | Fields() iter.Seq[StructField] |
| |
|
| | |
| | |
| | |
| | |
| | FieldByIndex(index []int) StructField |
| |
|
| | |
| | |
| | |
| | |
| | |
| | FieldByName(name string) (StructField, bool) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | FieldByNameFunc(match func(string) bool) (StructField, bool) |
| |
|
| | |
| | |
| | |
| | In(i int) Type |
| |
|
| | |
| | |
| | |
| | Ins() iter.Seq[Type] |
| |
|
| | |
| | |
| | Key() Type |
| |
|
| | |
| | |
| | Len() int |
| |
|
| | |
| | |
| | NumField() int |
| |
|
| | |
| | |
| | NumIn() int |
| |
|
| | |
| | |
| | NumOut() int |
| |
|
| | |
| | |
| | |
| | Out(i int) Type |
| |
|
| | |
| | |
| | |
| | Outs() iter.Seq[Type] |
| |
|
| | |
| | |
| | OverflowComplex(x complex128) bool |
| |
|
| | |
| | |
| | OverflowFloat(x float64) bool |
| |
|
| | |
| | |
| | OverflowInt(x int64) bool |
| |
|
| | |
| | |
| | OverflowUint(x uint64) bool |
| |
|
| | |
| | CanSeq() bool |
| |
|
| | |
| | CanSeq2() bool |
| |
|
| | common() *abi.Type |
| | uncommon() *uncommonType |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | type Kind uint |
| |
|
| | const ( |
| | Invalid Kind = iota |
| | Bool |
| | Int |
| | Int8 |
| | Int16 |
| | Int32 |
| | Int64 |
| | Uint |
| | Uint8 |
| | Uint16 |
| | Uint32 |
| | Uint64 |
| | Uintptr |
| | Float32 |
| | Float64 |
| | Complex64 |
| | Complex128 |
| | Array |
| | Chan |
| | Func |
| | Interface |
| | Map |
| | Pointer |
| | Slice |
| | String |
| | Struct |
| | UnsafePointer |
| | ) |
| |
|
| | |
| | |
| | |
| | const Ptr = Pointer |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type uncommonType = abi.UncommonType |
| |
|
| | |
| | type common struct { |
| | abi.Type |
| | } |
| |
|
| | |
| | |
| | type rtype struct { |
| | t abi.Type |
| | } |
| |
|
| | func (t *rtype) common() *abi.Type { |
| | return &t.t |
| | } |
| |
|
| | func (t *rtype) uncommon() *abi.UncommonType { |
| | return t.t.Uncommon() |
| | } |
| |
|
| | type aNameOff = abi.NameOff |
| | type aTypeOff = abi.TypeOff |
| | type aTextOff = abi.TextOff |
| |
|
| | |
| | type ChanDir int |
| |
|
| | const ( |
| | RecvDir ChanDir = 1 << iota |
| | SendDir |
| | BothDir = RecvDir | SendDir |
| | ) |
| |
|
| | |
| | type arrayType = abi.ArrayType |
| |
|
| | |
| | type chanType = abi.ChanType |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type funcType = abi.FuncType |
| |
|
| | |
| | type interfaceType struct { |
| | abi.InterfaceType |
| | } |
| |
|
| | func (t *interfaceType) nameOff(off aNameOff) abi.Name { |
| | return toRType(&t.Type).nameOff(off) |
| | } |
| |
|
| | func nameOffFor(t *abi.Type, off aNameOff) abi.Name { |
| | return toRType(t).nameOff(off) |
| | } |
| |
|
| | func typeOffFor(t *abi.Type, off aTypeOff) *abi.Type { |
| | return toRType(t).typeOff(off) |
| | } |
| |
|
| | func (t *interfaceType) typeOff(off aTypeOff) *abi.Type { |
| | return toRType(&t.Type).typeOff(off) |
| | } |
| |
|
| | func (t *interfaceType) common() *abi.Type { |
| | return &t.Type |
| | } |
| |
|
| | func (t *interfaceType) uncommon() *abi.UncommonType { |
| | return t.Uncommon() |
| | } |
| |
|
| | |
| | type ptrType struct { |
| | abi.PtrType |
| | } |
| |
|
| | |
| | type sliceType struct { |
| | abi.SliceType |
| | } |
| |
|
| | |
| | type structField = abi.StructField |
| |
|
| | |
| | type structType struct { |
| | abi.StructType |
| | } |
| |
|
| | func pkgPath(n abi.Name) string { |
| | if n.Bytes == nil || *n.DataChecked(0, "name flag field")&(1<<2) == 0 { |
| | return "" |
| | } |
| | i, l := n.ReadVarint(1) |
| | off := 1 + i + l |
| | if n.HasTag() { |
| | i2, l2 := n.ReadVarint(off) |
| | off += i2 + l2 |
| | } |
| | var nameOff int32 |
| | |
| | |
| | copy((*[4]byte)(unsafe.Pointer(&nameOff))[:], (*[4]byte)(unsafe.Pointer(n.DataChecked(off, "name offset field")))[:]) |
| | pkgPathName := abi.Name{Bytes: (*byte)(resolveTypeOff(unsafe.Pointer(n.Bytes), nameOff))} |
| | return pkgPathName.Name() |
| | } |
| |
|
| | func newName(n, tag string, exported, embedded bool) abi.Name { |
| | return abi.NewName(n, tag, exported, embedded) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | type Method struct { |
| | |
| | Name string |
| |
|
| | |
| | |
| | |
| | |
| | |
| | PkgPath string |
| |
|
| | Type Type |
| | Func Value |
| | Index int |
| | } |
| |
|
| | |
| | func (m Method) IsExported() bool { |
| | return m.PkgPath == "" |
| | } |
| |
|
| | |
| | func (k Kind) String() string { |
| | if uint(k) < uint(len(kindNames)) { |
| | return kindNames[uint(k)] |
| | } |
| | return "kind" + strconv.Itoa(int(k)) |
| | } |
| |
|
| | var kindNames = []string{ |
| | Invalid: "invalid", |
| | Bool: "bool", |
| | Int: "int", |
| | Int8: "int8", |
| | Int16: "int16", |
| | Int32: "int32", |
| | Int64: "int64", |
| | Uint: "uint", |
| | Uint8: "uint8", |
| | Uint16: "uint16", |
| | Uint32: "uint32", |
| | Uint64: "uint64", |
| | Uintptr: "uintptr", |
| | Float32: "float32", |
| | Float64: "float64", |
| | Complex64: "complex64", |
| | Complex128: "complex128", |
| | Array: "array", |
| | Chan: "chan", |
| | Func: "func", |
| | Interface: "interface", |
| | Map: "map", |
| | Pointer: "ptr", |
| | Slice: "slice", |
| | String: "string", |
| | Struct: "struct", |
| | UnsafePointer: "unsafe.Pointer", |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func addReflectOff(ptr unsafe.Pointer) int32 |
| |
|
| | |
| | |
| | func resolveReflectName(n abi.Name) aNameOff { |
| | return aNameOff(addReflectOff(unsafe.Pointer(n.Bytes))) |
| | } |
| |
|
| | |
| | |
| | func resolveReflectType(t *abi.Type) aTypeOff { |
| | return aTypeOff(addReflectOff(unsafe.Pointer(t))) |
| | } |
| |
|
| | |
| | |
| | |
| | func resolveReflectText(ptr unsafe.Pointer) aTextOff { |
| | return aTextOff(addReflectOff(ptr)) |
| | } |
| |
|
| | func (t *rtype) nameOff(off aNameOff) abi.Name { |
| | return abi.Name{Bytes: (*byte)(resolveNameOff(unsafe.Pointer(t), int32(off)))} |
| | } |
| |
|
| | func (t *rtype) typeOff(off aTypeOff) *abi.Type { |
| | return (*abi.Type)(resolveTypeOff(unsafe.Pointer(t), int32(off))) |
| | } |
| |
|
| | func (t *rtype) textOff(off aTextOff) unsafe.Pointer { |
| | return resolveTextOff(unsafe.Pointer(t), int32(off)) |
| | } |
| |
|
| | func textOffFor(t *abi.Type, off aTextOff) unsafe.Pointer { |
| | return toRType(t).textOff(off) |
| | } |
| |
|
| | func (t *rtype) String() string { |
| | s := t.nameOff(t.t.Str).Name() |
| | if t.t.TFlag&abi.TFlagExtraStar != 0 { |
| | return s[1:] |
| | } |
| | return s |
| | } |
| |
|
| | func (t *rtype) Size() uintptr { return t.t.Size() } |
| |
|
| | func (t *rtype) Bits() int { |
| | if t == nil { |
| | panic("reflect: Bits of nil Type") |
| | } |
| | k := t.Kind() |
| | if k < Int || k > Complex128 { |
| | panic("reflect: Bits of non-arithmetic Type " + t.String()) |
| | } |
| | return int(t.t.Size_) * 8 |
| | } |
| |
|
| | func (t *rtype) Align() int { return t.t.Align() } |
| |
|
| | func (t *rtype) FieldAlign() int { return t.t.FieldAlign() } |
| |
|
| | func (t *rtype) Kind() Kind { return Kind(t.t.Kind()) } |
| |
|
| | func (t *rtype) exportedMethods() []abi.Method { |
| | ut := t.uncommon() |
| | if ut == nil { |
| | return nil |
| | } |
| | return ut.ExportedMethods() |
| | } |
| |
|
| | func (t *rtype) NumMethod() int { |
| | if t.Kind() == Interface { |
| | tt := (*interfaceType)(unsafe.Pointer(t)) |
| | return tt.NumMethod() |
| | } |
| | return len(t.exportedMethods()) |
| | } |
| |
|
| | func (t *rtype) Method(i int) (m Method) { |
| | if t.Kind() == Interface { |
| | tt := (*interfaceType)(unsafe.Pointer(t)) |
| | return tt.Method(i) |
| | } |
| | methods := t.exportedMethods() |
| | if i < 0 || i >= len(methods) { |
| | panic("reflect: Method index out of range") |
| | } |
| | p := methods[i] |
| | pname := t.nameOff(p.Name) |
| | m.Name = pname.Name() |
| | fl := flag(Func) |
| | mtyp := t.typeOff(p.Mtyp) |
| | ft := (*funcType)(unsafe.Pointer(mtyp)) |
| | in := make([]Type, 0, 1+ft.NumIn()) |
| | in = append(in, t) |
| | for _, arg := range ft.InSlice() { |
| | in = append(in, toRType(arg)) |
| | } |
| | out := make([]Type, 0, ft.NumOut()) |
| | for _, ret := range ft.OutSlice() { |
| | out = append(out, toRType(ret)) |
| | } |
| | mt := FuncOf(in, out, ft.IsVariadic()) |
| | m.Type = mt |
| | tfn := t.textOff(p.Tfn) |
| | fn := unsafe.Pointer(&tfn) |
| | m.Func = Value{&mt.(*rtype).t, fn, fl} |
| |
|
| | m.Index = i |
| | return m |
| | } |
| |
|
| | func (t *rtype) MethodByName(name string) (m Method, ok bool) { |
| | if t.Kind() == Interface { |
| | tt := (*interfaceType)(unsafe.Pointer(t)) |
| | return tt.MethodByName(name) |
| | } |
| | ut := t.uncommon() |
| | if ut == nil { |
| | return Method{}, false |
| | } |
| |
|
| | methods := ut.ExportedMethods() |
| |
|
| | |
| | |
| | i, j := 0, len(methods) |
| | for i < j { |
| | h := int(uint(i+j) >> 1) |
| | |
| | if !(t.nameOff(methods[h].Name).Name() >= name) { |
| | i = h + 1 |
| | } else { |
| | j = h |
| | } |
| | } |
| | |
| | if i < len(methods) && name == t.nameOff(methods[i].Name).Name() { |
| | return t.Method(i), true |
| | } |
| |
|
| | return Method{}, false |
| | } |
| |
|
| | func (t *rtype) PkgPath() string { |
| | if t.t.TFlag&abi.TFlagNamed == 0 { |
| | return "" |
| | } |
| | ut := t.uncommon() |
| | if ut == nil { |
| | return "" |
| | } |
| | return t.nameOff(ut.PkgPath).Name() |
| | } |
| |
|
| | func pkgPathFor(t *abi.Type) string { |
| | return toRType(t).PkgPath() |
| | } |
| |
|
| | func (t *rtype) Name() string { |
| | if !t.t.HasName() { |
| | return "" |
| | } |
| | s := t.String() |
| | i := len(s) - 1 |
| | sqBrackets := 0 |
| | for i >= 0 && (s[i] != '.' || sqBrackets != 0) { |
| | switch s[i] { |
| | case ']': |
| | sqBrackets++ |
| | case '[': |
| | sqBrackets-- |
| | } |
| | i-- |
| | } |
| | return s[i+1:] |
| | } |
| |
|
| | func nameFor(t *abi.Type) string { |
| | return toRType(t).Name() |
| | } |
| |
|
| | func (t *rtype) ChanDir() ChanDir { |
| | if t.Kind() != Chan { |
| | panic("reflect: ChanDir of non-chan type " + t.String()) |
| | } |
| | tt := (*abi.ChanType)(unsafe.Pointer(t)) |
| | return ChanDir(tt.Dir) |
| | } |
| |
|
| | func toRType(t *abi.Type) *rtype { |
| | return (*rtype)(unsafe.Pointer(t)) |
| | } |
| |
|
| | func elem(t *abi.Type) *abi.Type { |
| | et := t.Elem() |
| | if et != nil { |
| | return et |
| | } |
| | panic("reflect: Elem of invalid type " + stringFor(t)) |
| | } |
| |
|
| | func (t *rtype) Elem() Type { |
| | return toType(elem(t.common())) |
| | } |
| |
|
| | func (t *rtype) Field(i int) StructField { |
| | if t.Kind() != Struct { |
| | panic("reflect: Field of non-struct type " + t.String()) |
| | } |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | return tt.Field(i) |
| | } |
| |
|
| | func (t *rtype) FieldByIndex(index []int) StructField { |
| | if t.Kind() != Struct { |
| | panic("reflect: FieldByIndex of non-struct type " + t.String()) |
| | } |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | return tt.FieldByIndex(index) |
| | } |
| |
|
| | func (t *rtype) FieldByName(name string) (StructField, bool) { |
| | if t.Kind() != Struct { |
| | panic("reflect: FieldByName of non-struct type " + t.String()) |
| | } |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | return tt.FieldByName(name) |
| | } |
| |
|
| | func (t *rtype) FieldByNameFunc(match func(string) bool) (StructField, bool) { |
| | if t.Kind() != Struct { |
| | panic("reflect: FieldByNameFunc of non-struct type " + t.String()) |
| | } |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | return tt.FieldByNameFunc(match) |
| | } |
| |
|
| | func (t *rtype) Len() int { |
| | if t.Kind() != Array { |
| | panic("reflect: Len of non-array type " + t.String()) |
| | } |
| | tt := (*arrayType)(unsafe.Pointer(t)) |
| | return int(tt.Len) |
| | } |
| |
|
| | func (t *rtype) NumField() int { |
| | if t.Kind() != Struct { |
| | panic("reflect: NumField of non-struct type " + t.String()) |
| | } |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | return len(tt.Fields) |
| | } |
| |
|
| | func (t *rtype) In(i int) Type { |
| | if t.Kind() != Func { |
| | panic("reflect: In of non-func type " + t.String()) |
| | } |
| | tt := (*abi.FuncType)(unsafe.Pointer(t)) |
| | return toType(tt.InSlice()[i]) |
| | } |
| |
|
| | func (t *rtype) NumIn() int { |
| | if t.Kind() != Func { |
| | panic("reflect: NumIn of non-func type " + t.String()) |
| | } |
| | tt := (*abi.FuncType)(unsafe.Pointer(t)) |
| | return tt.NumIn() |
| | } |
| |
|
| | func (t *rtype) NumOut() int { |
| | if t.Kind() != Func { |
| | panic("reflect: NumOut of non-func type " + t.String()) |
| | } |
| | tt := (*abi.FuncType)(unsafe.Pointer(t)) |
| | return tt.NumOut() |
| | } |
| |
|
| | func (t *rtype) Out(i int) Type { |
| | if t.Kind() != Func { |
| | panic("reflect: Out of non-func type " + t.String()) |
| | } |
| | tt := (*abi.FuncType)(unsafe.Pointer(t)) |
| | return toType(tt.OutSlice()[i]) |
| | } |
| |
|
| | func (t *rtype) IsVariadic() bool { |
| | if t.Kind() != Func { |
| | panic("reflect: IsVariadic of non-func type " + t.String()) |
| | } |
| | tt := (*abi.FuncType)(unsafe.Pointer(t)) |
| | return tt.IsVariadic() |
| | } |
| |
|
| | func (t *rtype) OverflowComplex(x complex128) bool { |
| | k := t.Kind() |
| | switch k { |
| | case Complex64: |
| | return overflowFloat32(real(x)) || overflowFloat32(imag(x)) |
| | case Complex128: |
| | return false |
| | } |
| | panic("reflect: OverflowComplex of non-complex type " + t.String()) |
| | } |
| |
|
| | func (t *rtype) OverflowFloat(x float64) bool { |
| | k := t.Kind() |
| | switch k { |
| | case Float32: |
| | return overflowFloat32(x) |
| | case Float64: |
| | return false |
| | } |
| | panic("reflect: OverflowFloat of non-float type " + t.String()) |
| | } |
| |
|
| | func (t *rtype) OverflowInt(x int64) bool { |
| | k := t.Kind() |
| | switch k { |
| | case Int, Int8, Int16, Int32, Int64: |
| | bitSize := t.Size() * 8 |
| | trunc := (x << (64 - bitSize)) >> (64 - bitSize) |
| | return x != trunc |
| | } |
| | panic("reflect: OverflowInt of non-int type " + t.String()) |
| | } |
| |
|
| | func (t *rtype) OverflowUint(x uint64) bool { |
| | k := t.Kind() |
| | switch k { |
| | case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64: |
| | bitSize := t.Size() * 8 |
| | trunc := (x << (64 - bitSize)) >> (64 - bitSize) |
| | return x != trunc |
| | } |
| | panic("reflect: OverflowUint of non-uint type " + t.String()) |
| | } |
| |
|
| | func (t *rtype) CanSeq() bool { |
| | switch t.Kind() { |
| | case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Array, Slice, Chan, String, Map: |
| | return true |
| | case Func: |
| | return canRangeFunc(&t.t) |
| | case Pointer: |
| | return t.Elem().Kind() == Array |
| | } |
| | return false |
| | } |
| |
|
| | func canRangeFunc(t *abi.Type) bool { |
| | if t.Kind() != abi.Func { |
| | return false |
| | } |
| | f := t.FuncType() |
| | if f.InCount != 1 || f.OutCount != 0 { |
| | return false |
| | } |
| | y := f.In(0) |
| | if y.Kind() != abi.Func { |
| | return false |
| | } |
| | yield := y.FuncType() |
| | return yield.InCount == 1 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool |
| | } |
| |
|
| | func (t *rtype) CanSeq2() bool { |
| | switch t.Kind() { |
| | case Array, Slice, String, Map: |
| | return true |
| | case Func: |
| | return canRangeFunc2(&t.t) |
| | case Pointer: |
| | return t.Elem().Kind() == Array |
| | } |
| | return false |
| | } |
| |
|
| | func canRangeFunc2(t *abi.Type) bool { |
| | if t.Kind() != abi.Func { |
| | return false |
| | } |
| | f := t.FuncType() |
| | if f.InCount != 1 || f.OutCount != 0 { |
| | return false |
| | } |
| | y := f.In(0) |
| | if y.Kind() != abi.Func { |
| | return false |
| | } |
| | yield := y.FuncType() |
| | return yield.InCount == 2 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool |
| | } |
| |
|
| | func (t *rtype) Fields() iter.Seq[StructField] { |
| | if t.Kind() != Struct { |
| | panic("reflect: Fields of non-struct type " + t.String()) |
| | } |
| | return func(yield func(StructField) bool) { |
| | for i := range t.NumField() { |
| | if !yield(t.Field(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | func (t *rtype) Methods() iter.Seq[Method] { |
| | return func(yield func(Method) bool) { |
| | for i := range t.NumMethod() { |
| | if !yield(t.Method(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | func (t *rtype) Ins() iter.Seq[Type] { |
| | if t.Kind() != Func { |
| | panic("reflect: Ins of non-func type " + t.String()) |
| | } |
| | return func(yield func(Type) bool) { |
| | for i := range t.NumIn() { |
| | if !yield(t.In(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | func (t *rtype) Outs() iter.Seq[Type] { |
| | if t.Kind() != Func { |
| | panic("reflect: Outs of non-func type " + t.String()) |
| | } |
| | return func(yield func(Type) bool) { |
| | for i := range t.NumOut() { |
| | if !yield(t.Out(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func add(p unsafe.Pointer, x uintptr, whySafe string) unsafe.Pointer { |
| | return unsafe.Pointer(uintptr(p) + x) |
| | } |
| |
|
| | func (d ChanDir) String() string { |
| | switch d { |
| | case SendDir: |
| | return "chan<-" |
| | case RecvDir: |
| | return "<-chan" |
| | case BothDir: |
| | return "chan" |
| | } |
| | return "ChanDir" + strconv.Itoa(int(d)) |
| | } |
| |
|
| | |
| | func (t *interfaceType) Method(i int) (m Method) { |
| | if i < 0 || i >= len(t.Methods) { |
| | return |
| | } |
| | p := &t.Methods[i] |
| | pname := t.nameOff(p.Name) |
| | m.Name = pname.Name() |
| | if !pname.IsExported() { |
| | m.PkgPath = pkgPath(pname) |
| | if m.PkgPath == "" { |
| | m.PkgPath = t.PkgPath.Name() |
| | } |
| | } |
| | m.Type = toType(t.typeOff(p.Typ)) |
| | m.Index = i |
| | return |
| | } |
| |
|
| | |
| | func (t *interfaceType) NumMethod() int { return len(t.Methods) } |
| |
|
| | |
| | func (t *interfaceType) MethodByName(name string) (m Method, ok bool) { |
| | if t == nil { |
| | return |
| | } |
| | var p *abi.Imethod |
| | for i := range t.Methods { |
| | p = &t.Methods[i] |
| | if t.nameOff(p.Name).Name() == name { |
| | return t.Method(i), true |
| | } |
| | } |
| | return |
| | } |
| |
|
| | |
| | type StructField struct { |
| | |
| | Name string |
| |
|
| | |
| | |
| | |
| | PkgPath string |
| |
|
| | Type Type |
| | Tag StructTag |
| | Offset uintptr |
| | Index []int |
| | Anonymous bool |
| | } |
| |
|
| | |
| | func (f StructField) IsExported() bool { |
| | return f.PkgPath == "" |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type StructTag string |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (tag StructTag) Get(key string) string { |
| | v, _ := tag.Lookup(key) |
| | return v |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (tag StructTag) Lookup(key string) (value string, ok bool) { |
| | |
| | |
| |
|
| | for tag != "" { |
| | |
| | i := 0 |
| | for i < len(tag) && tag[i] == ' ' { |
| | i++ |
| | } |
| | tag = tag[i:] |
| | if tag == "" { |
| | break |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | i = 0 |
| | for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { |
| | i++ |
| | } |
| | if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' { |
| | break |
| | } |
| | name := string(tag[:i]) |
| | tag = tag[i+1:] |
| |
|
| | |
| | i = 1 |
| | for i < len(tag) && tag[i] != '"' { |
| | if tag[i] == '\\' { |
| | i++ |
| | } |
| | i++ |
| | } |
| | if i >= len(tag) { |
| | break |
| | } |
| | qvalue := string(tag[:i+1]) |
| | tag = tag[i+1:] |
| |
|
| | if key == name { |
| | value, err := strconv.Unquote(qvalue) |
| | if err != nil { |
| | break |
| | } |
| | return value, true |
| | } |
| | } |
| | return "", false |
| | } |
| |
|
| | |
| | func (t *structType) Field(i int) (f StructField) { |
| | if i < 0 || i >= len(t.Fields) { |
| | panic("reflect: Field index out of bounds") |
| | } |
| | p := &t.Fields[i] |
| | f.Type = toType(p.Typ) |
| | f.Name = p.Name.Name() |
| | f.Anonymous = p.Embedded() |
| | if !p.Name.IsExported() { |
| | f.PkgPath = t.PkgPath.Name() |
| | } |
| | if tag := p.Name.Tag(); tag != "" { |
| | f.Tag = StructTag(tag) |
| | } |
| | f.Offset = p.Offset |
| |
|
| | |
| | |
| | if i < 256 && runtime.GOOS != "js" && runtime.GOOS != "wasip1" { |
| | staticuint64s := getStaticuint64s() |
| | p := unsafe.Pointer(&(*staticuint64s)[i]) |
| | if unsafe.Sizeof(int(0)) == 4 && goarch.BigEndian { |
| | p = unsafe.Add(p, 4) |
| | } |
| | f.Index = unsafe.Slice((*int)(p), 1) |
| | } else { |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | f.Index = []int{i} |
| | } |
| | return |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func getStaticuint64s() *[256]uint64 |
| |
|
| | |
| | |
| |
|
| | |
| | func (t *structType) FieldByIndex(index []int) (f StructField) { |
| | f.Type = toType(&t.Type) |
| | for i, x := range index { |
| | if i > 0 { |
| | ft := f.Type |
| | if ft.Kind() == Pointer && ft.Elem().Kind() == Struct { |
| | ft = ft.Elem() |
| | } |
| | f.Type = ft |
| | } |
| | f = f.Type.Field(x) |
| | } |
| | return |
| | } |
| |
|
| | |
| | type fieldScan struct { |
| | typ *structType |
| | index []int |
| | } |
| |
|
| | |
| | |
| | func (t *structType) FieldByNameFunc(match func(string) bool) (result StructField, ok bool) { |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | current := []fieldScan{} |
| | next := []fieldScan{{typ: t}} |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | var nextCount map[*structType]int |
| |
|
| | |
| | |
| | |
| | |
| | |
| | visited := map[*structType]bool{} |
| |
|
| | for len(next) > 0 { |
| | current, next = next, current[:0] |
| | count := nextCount |
| | nextCount = nil |
| |
|
| | |
| | |
| | |
| | |
| | for _, scan := range current { |
| | t := scan.typ |
| | if visited[t] { |
| | |
| | |
| | |
| | continue |
| | } |
| | visited[t] = true |
| | for i := range t.Fields { |
| | f := &t.Fields[i] |
| | |
| | fname := f.Name.Name() |
| | var ntyp *abi.Type |
| | if f.Embedded() { |
| | |
| | ntyp = f.Typ |
| | if ntyp.Kind() == abi.Pointer { |
| | ntyp = ntyp.Elem() |
| | } |
| | } |
| |
|
| | |
| | if match(fname) { |
| | |
| | if count[t] > 1 || ok { |
| | |
| | return StructField{}, false |
| | } |
| | result = t.Field(i) |
| | result.Index = nil |
| | result.Index = append(result.Index, scan.index...) |
| | result.Index = append(result.Index, i) |
| | ok = true |
| | continue |
| | } |
| |
|
| | |
| | |
| | |
| | if ok || ntyp == nil || ntyp.Kind() != abi.Struct { |
| | continue |
| | } |
| | styp := (*structType)(unsafe.Pointer(ntyp)) |
| | if nextCount[styp] > 0 { |
| | nextCount[styp] = 2 |
| | continue |
| | } |
| | if nextCount == nil { |
| | nextCount = map[*structType]int{} |
| | } |
| | nextCount[styp] = 1 |
| | if count[t] > 1 { |
| | nextCount[styp] = 2 |
| | } |
| | var index []int |
| | index = append(index, scan.index...) |
| | index = append(index, i) |
| | next = append(next, fieldScan{styp, index}) |
| | } |
| | } |
| | if ok { |
| | break |
| | } |
| | } |
| | return |
| | } |
| |
|
| | |
| | |
| | func (t *structType) FieldByName(name string) (f StructField, present bool) { |
| | |
| | hasEmbeds := false |
| | if name != "" { |
| | for i := range t.Fields { |
| | tf := &t.Fields[i] |
| | if tf.Name.Name() == name { |
| | return t.Field(i), true |
| | } |
| | if tf.Embedded() { |
| | hasEmbeds = true |
| | } |
| | } |
| | } |
| | if !hasEmbeds { |
| | return |
| | } |
| | return t.FieldByNameFunc(func(s string) bool { return s == name }) |
| | } |
| |
|
| | |
| | |
| | func TypeOf(i any) Type { |
| | return toType(abi.TypeOf(i)) |
| | } |
| |
|
| | |
| | func TypeFor[T any]() Type { |
| | |
| | return toRType(abi.TypeFor[T]()) |
| | } |
| |
|
| | |
| | func rtypeOf(i any) *abi.Type { |
| | return abi.TypeOf(i) |
| | } |
| |
|
| | |
| | var ptrMap sync.Map |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func PtrTo(t Type) Type { return PointerTo(t) } |
| |
|
| | |
| | |
| | func PointerTo(t Type) Type { |
| | return toRType(t.(*rtype).ptrTo()) |
| | } |
| |
|
| | func (t *rtype) ptrTo() *abi.Type { |
| | at := &t.t |
| | if at.PtrToThis != 0 { |
| | return t.typeOff(at.PtrToThis) |
| | } |
| |
|
| | |
| | if pi, ok := ptrMap.Load(t); ok { |
| | return &pi.(*ptrType).Type |
| | } |
| |
|
| | |
| | s := "*" + t.String() |
| | for _, tt := range typesByString(s) { |
| | p := (*ptrType)(unsafe.Pointer(tt)) |
| | if p.Elem != &t.t { |
| | continue |
| | } |
| | pi, _ := ptrMap.LoadOrStore(t, p) |
| | return &pi.(*ptrType).Type |
| | } |
| |
|
| | |
| | |
| | var iptr any = (*unsafe.Pointer)(nil) |
| | prototype := *(**ptrType)(unsafe.Pointer(&iptr)) |
| | pp := *prototype |
| |
|
| | pp.Str = resolveReflectName(newName(s, "", false, false)) |
| | pp.PtrToThis = 0 |
| |
|
| | |
| | |
| | |
| | |
| | |
| | pp.Hash = fnv1(t.t.Hash, '*') |
| |
|
| | pp.Elem = at |
| |
|
| | pi, _ := ptrMap.LoadOrStore(t, &pp) |
| | return &pi.(*ptrType).Type |
| | } |
| |
|
| | func ptrTo(t *abi.Type) *abi.Type { |
| | return toRType(t).ptrTo() |
| | } |
| |
|
| | |
| | func fnv1(x uint32, list ...byte) uint32 { |
| | for _, b := range list { |
| | x = x*16777619 ^ uint32(b) |
| | } |
| | return x |
| | } |
| |
|
| | func (t *rtype) Implements(u Type) bool { |
| | if u == nil { |
| | panic("reflect: nil type passed to Type.Implements") |
| | } |
| | if u.Kind() != Interface { |
| | panic("reflect: non-interface type passed to Type.Implements") |
| | } |
| | return implements(u.common(), t.common()) |
| | } |
| |
|
| | func (t *rtype) AssignableTo(u Type) bool { |
| | if u == nil { |
| | panic("reflect: nil type passed to Type.AssignableTo") |
| | } |
| | uu := u.common() |
| | return directlyAssignable(uu, t.common()) || implements(uu, t.common()) |
| | } |
| |
|
| | func (t *rtype) ConvertibleTo(u Type) bool { |
| | if u == nil { |
| | panic("reflect: nil type passed to Type.ConvertibleTo") |
| | } |
| | return convertOp(u.common(), t.common()) != nil |
| | } |
| |
|
| | func (t *rtype) Comparable() bool { |
| | return t.t.Equal != nil |
| | } |
| |
|
| | |
| | func implements(T, V *abi.Type) bool { |
| | if T.Kind() != abi.Interface { |
| | return false |
| | } |
| | t := (*interfaceType)(unsafe.Pointer(T)) |
| | if len(t.Methods) == 0 { |
| | return true |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | if V.Kind() == abi.Interface { |
| | v := (*interfaceType)(unsafe.Pointer(V)) |
| | i := 0 |
| | for j := 0; j < len(v.Methods); j++ { |
| | tm := &t.Methods[i] |
| | tmName := t.nameOff(tm.Name) |
| | vm := &v.Methods[j] |
| | vmName := nameOffFor(V, vm.Name) |
| | if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Typ) == t.typeOff(tm.Typ) { |
| | if !tmName.IsExported() { |
| | tmPkgPath := pkgPath(tmName) |
| | if tmPkgPath == "" { |
| | tmPkgPath = t.PkgPath.Name() |
| | } |
| | vmPkgPath := pkgPath(vmName) |
| | if vmPkgPath == "" { |
| | vmPkgPath = v.PkgPath.Name() |
| | } |
| | if tmPkgPath != vmPkgPath { |
| | continue |
| | } |
| | } |
| | if i++; i >= len(t.Methods) { |
| | return true |
| | } |
| | } |
| | } |
| | return false |
| | } |
| |
|
| | v := V.Uncommon() |
| | if v == nil { |
| | return false |
| | } |
| | i := 0 |
| | vmethods := v.Methods() |
| | for j := 0; j < int(v.Mcount); j++ { |
| | tm := &t.Methods[i] |
| | tmName := t.nameOff(tm.Name) |
| | vm := vmethods[j] |
| | vmName := nameOffFor(V, vm.Name) |
| | if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Mtyp) == t.typeOff(tm.Typ) { |
| | if !tmName.IsExported() { |
| | tmPkgPath := pkgPath(tmName) |
| | if tmPkgPath == "" { |
| | tmPkgPath = t.PkgPath.Name() |
| | } |
| | vmPkgPath := pkgPath(vmName) |
| | if vmPkgPath == "" { |
| | vmPkgPath = nameOffFor(V, v.PkgPath).Name() |
| | } |
| | if tmPkgPath != vmPkgPath { |
| | continue |
| | } |
| | } |
| | if i++; i >= len(t.Methods) { |
| | return true |
| | } |
| | } |
| | } |
| | return false |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func specialChannelAssignability(T, V *abi.Type) bool { |
| | |
| | |
| | |
| | |
| | return V.ChanDir() == abi.BothDir && (nameFor(T) == "" || nameFor(V) == "") && haveIdenticalType(T.Elem(), V.Elem(), true) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func directlyAssignable(T, V *abi.Type) bool { |
| | |
| | if T == V { |
| | return true |
| | } |
| |
|
| | |
| | |
| | if T.HasName() && V.HasName() || T.Kind() != V.Kind() { |
| | return false |
| | } |
| |
|
| | if T.Kind() == abi.Chan && specialChannelAssignability(T, V) { |
| | return true |
| | } |
| |
|
| | |
| | return haveIdenticalUnderlyingType(T, V, true) |
| | } |
| |
|
| | func haveIdenticalType(T, V *abi.Type, cmpTags bool) bool { |
| | if cmpTags { |
| | return T == V |
| | } |
| |
|
| | if nameFor(T) != nameFor(V) || T.Kind() != V.Kind() || pkgPathFor(T) != pkgPathFor(V) { |
| | return false |
| | } |
| |
|
| | return haveIdenticalUnderlyingType(T, V, false) |
| | } |
| |
|
| | func haveIdenticalUnderlyingType(T, V *abi.Type, cmpTags bool) bool { |
| | if T == V { |
| | return true |
| | } |
| |
|
| | kind := Kind(T.Kind()) |
| | if kind != Kind(V.Kind()) { |
| | return false |
| | } |
| |
|
| | |
| | |
| | if Bool <= kind && kind <= Complex128 || kind == String || kind == UnsafePointer { |
| | return true |
| | } |
| |
|
| | |
| | switch kind { |
| | case Array: |
| | return T.Len() == V.Len() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags) |
| |
|
| | case Chan: |
| | return V.ChanDir() == T.ChanDir() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags) |
| |
|
| | case Func: |
| | t := (*funcType)(unsafe.Pointer(T)) |
| | v := (*funcType)(unsafe.Pointer(V)) |
| | if t.OutCount != v.OutCount || t.InCount != v.InCount { |
| | return false |
| | } |
| | for i := 0; i < t.NumIn(); i++ { |
| | if !haveIdenticalType(t.In(i), v.In(i), cmpTags) { |
| | return false |
| | } |
| | } |
| | for i := 0; i < t.NumOut(); i++ { |
| | if !haveIdenticalType(t.Out(i), v.Out(i), cmpTags) { |
| | return false |
| | } |
| | } |
| | return true |
| |
|
| | case Interface: |
| | t := (*interfaceType)(unsafe.Pointer(T)) |
| | v := (*interfaceType)(unsafe.Pointer(V)) |
| | if len(t.Methods) == 0 && len(v.Methods) == 0 { |
| | return true |
| | } |
| | |
| | |
| | return false |
| |
|
| | case Map: |
| | return haveIdenticalType(T.Key(), V.Key(), cmpTags) && haveIdenticalType(T.Elem(), V.Elem(), cmpTags) |
| |
|
| | case Pointer, Slice: |
| | return haveIdenticalType(T.Elem(), V.Elem(), cmpTags) |
| |
|
| | case Struct: |
| | t := (*structType)(unsafe.Pointer(T)) |
| | v := (*structType)(unsafe.Pointer(V)) |
| | if len(t.Fields) != len(v.Fields) { |
| | return false |
| | } |
| | if t.PkgPath.Name() != v.PkgPath.Name() { |
| | return false |
| | } |
| | for i := range t.Fields { |
| | tf := &t.Fields[i] |
| | vf := &v.Fields[i] |
| | if tf.Name.Name() != vf.Name.Name() { |
| | return false |
| | } |
| | if !haveIdenticalType(tf.Typ, vf.Typ, cmpTags) { |
| | return false |
| | } |
| | if cmpTags && tf.Name.Tag() != vf.Name.Tag() { |
| | return false |
| | } |
| | if tf.Offset != vf.Offset { |
| | return false |
| | } |
| | if tf.Embedded() != vf.Embedded() { |
| | return false |
| | } |
| | } |
| | return true |
| | } |
| |
|
| | return false |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func typelinks() (sections []unsafe.Pointer, offset [][]int32) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func rtypeOff(section unsafe.Pointer, off int32) *abi.Type { |
| | return (*abi.Type)(add(section, uintptr(off), "sizeof(rtype) > 0")) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func typesByString(s string) []*abi.Type { |
| | sections, offset := typelinks() |
| | var ret []*abi.Type |
| |
|
| | for offsI, offs := range offset { |
| | section := sections[offsI] |
| |
|
| | |
| | |
| | i, j := 0, len(offs) |
| | for i < j { |
| | h := int(uint(i+j) >> 1) |
| | |
| | if !(stringFor(rtypeOff(section, offs[h])) >= s) { |
| | i = h + 1 |
| | } else { |
| | j = h |
| | } |
| | } |
| | |
| |
|
| | |
| | |
| | |
| | for j := i; j < len(offs); j++ { |
| | typ := rtypeOff(section, offs[j]) |
| | if stringFor(typ) != s { |
| | break |
| | } |
| | ret = append(ret, typ) |
| | } |
| | } |
| | return ret |
| | } |
| |
|
| | |
| | var lookupCache sync.Map |
| |
|
| | |
| | |
| | |
| | type cacheKey struct { |
| | kind Kind |
| | t1 *abi.Type |
| | t2 *abi.Type |
| | extra uintptr |
| | } |
| |
|
| | |
| | |
| | |
| | var funcLookupCache struct { |
| | sync.Mutex |
| |
|
| | |
| | |
| | m sync.Map |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func ChanOf(dir ChanDir, t Type) Type { |
| | typ := t.common() |
| |
|
| | |
| | ckey := cacheKey{Chan, typ, nil, uintptr(dir)} |
| | if ch, ok := lookupCache.Load(ckey); ok { |
| | return ch.(*rtype) |
| | } |
| |
|
| | |
| | if typ.Size_ >= 1<<16 { |
| | panic("reflect.ChanOf: element size too large") |
| | } |
| |
|
| | |
| | var s string |
| | switch dir { |
| | default: |
| | panic("reflect.ChanOf: invalid dir") |
| | case SendDir: |
| | s = "chan<- " + stringFor(typ) |
| | case RecvDir: |
| | s = "<-chan " + stringFor(typ) |
| | case BothDir: |
| | typeStr := stringFor(typ) |
| | if typeStr[0] == '<' { |
| | |
| | |
| | |
| | |
| | s = "chan (" + typeStr + ")" |
| | } else { |
| | s = "chan " + typeStr |
| | } |
| | } |
| | for _, tt := range typesByString(s) { |
| | ch := (*chanType)(unsafe.Pointer(tt)) |
| | if ch.Elem == typ && ch.Dir == abi.ChanDir(dir) { |
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt)) |
| | return ti.(Type) |
| | } |
| | } |
| |
|
| | |
| | var ichan any = (chan unsafe.Pointer)(nil) |
| | prototype := *(**chanType)(unsafe.Pointer(&ichan)) |
| | ch := *prototype |
| | ch.TFlag = abi.TFlagRegularMemory | abi.TFlagDirectIface |
| | ch.Dir = abi.ChanDir(dir) |
| | ch.Str = resolveReflectName(newName(s, "", false, false)) |
| | ch.Hash = fnv1(typ.Hash, 'c', byte(dir)) |
| | ch.Elem = typ |
| |
|
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(&ch.Type)) |
| | return ti.(Type) |
| | } |
| |
|
| | var funcTypes []Type |
| | var funcTypesMutex sync.Mutex |
| |
|
| | func initFuncTypes(n int) Type { |
| | funcTypesMutex.Lock() |
| | defer funcTypesMutex.Unlock() |
| | if n >= len(funcTypes) { |
| | newFuncTypes := make([]Type, n+1) |
| | copy(newFuncTypes, funcTypes) |
| | funcTypes = newFuncTypes |
| | } |
| | if funcTypes[n] != nil { |
| | return funcTypes[n] |
| | } |
| |
|
| | funcTypes[n] = StructOf([]StructField{ |
| | { |
| | Name: "FuncType", |
| | Type: TypeOf(funcType{}), |
| | }, |
| | { |
| | Name: "Args", |
| | Type: ArrayOf(n, TypeOf(&rtype{})), |
| | }, |
| | }) |
| | return funcTypes[n] |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func FuncOf(in, out []Type, variadic bool) Type { |
| | if variadic && (len(in) == 0 || in[len(in)-1].Kind() != Slice) { |
| | panic("reflect.FuncOf: last arg of variadic func must be slice") |
| | } |
| |
|
| | |
| | var ifunc any = (func())(nil) |
| | prototype := *(**funcType)(unsafe.Pointer(&ifunc)) |
| | n := len(in) + len(out) |
| |
|
| | if n > 128 { |
| | panic("reflect.FuncOf: too many arguments") |
| | } |
| |
|
| | o := New(initFuncTypes(n)).Elem() |
| | ft := (*funcType)(unsafe.Pointer(o.Field(0).Addr().Pointer())) |
| | args := unsafe.Slice((**rtype)(unsafe.Pointer(o.Field(1).Addr().Pointer())), n)[0:0:n] |
| | *ft = *prototype |
| |
|
| | |
| | var hash uint32 |
| | for _, in := range in { |
| | t := in.(*rtype) |
| | args = append(args, t) |
| | hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash)) |
| | } |
| | if variadic { |
| | hash = fnv1(hash, 'v') |
| | } |
| | hash = fnv1(hash, '.') |
| | for _, out := range out { |
| | t := out.(*rtype) |
| | args = append(args, t) |
| | hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash)) |
| | } |
| |
|
| | ft.TFlag = abi.TFlagDirectIface |
| | ft.Hash = hash |
| | ft.InCount = uint16(len(in)) |
| | ft.OutCount = uint16(len(out)) |
| | if variadic { |
| | ft.OutCount |= 1 << 15 |
| | } |
| |
|
| | |
| | if ts, ok := funcLookupCache.m.Load(hash); ok { |
| | for _, t := range ts.([]*abi.Type) { |
| | if haveIdenticalUnderlyingType(&ft.Type, t, true) { |
| | return toRType(t) |
| | } |
| | } |
| | } |
| |
|
| | |
| | funcLookupCache.Lock() |
| | defer funcLookupCache.Unlock() |
| | if ts, ok := funcLookupCache.m.Load(hash); ok { |
| | for _, t := range ts.([]*abi.Type) { |
| | if haveIdenticalUnderlyingType(&ft.Type, t, true) { |
| | return toRType(t) |
| | } |
| | } |
| | } |
| |
|
| | addToCache := func(tt *abi.Type) Type { |
| | var rts []*abi.Type |
| | if rti, ok := funcLookupCache.m.Load(hash); ok { |
| | rts = rti.([]*abi.Type) |
| | } |
| | funcLookupCache.m.Store(hash, append(rts, tt)) |
| | return toType(tt) |
| | } |
| |
|
| | |
| | str := funcStr(ft) |
| | for _, tt := range typesByString(str) { |
| | if haveIdenticalUnderlyingType(&ft.Type, tt, true) { |
| | return addToCache(tt) |
| | } |
| | } |
| |
|
| | |
| | ft.Str = resolveReflectName(newName(str, "", false, false)) |
| | ft.PtrToThis = 0 |
| | return addToCache(&ft.Type) |
| | } |
| | func stringFor(t *abi.Type) string { |
| | return toRType(t).String() |
| | } |
| |
|
| | |
| | func funcStr(ft *funcType) string { |
| | repr := make([]byte, 0, 64) |
| | repr = append(repr, "func("...) |
| | for i, t := range ft.InSlice() { |
| | if i > 0 { |
| | repr = append(repr, ", "...) |
| | } |
| | if ft.IsVariadic() && i == int(ft.InCount)-1 { |
| | repr = append(repr, "..."...) |
| | repr = append(repr, stringFor((*sliceType)(unsafe.Pointer(t)).Elem)...) |
| | } else { |
| | repr = append(repr, stringFor(t)...) |
| | } |
| | } |
| | repr = append(repr, ')') |
| | out := ft.OutSlice() |
| | if len(out) == 1 { |
| | repr = append(repr, ' ') |
| | } else if len(out) > 1 { |
| | repr = append(repr, " ("...) |
| | } |
| | for i, t := range out { |
| | if i > 0 { |
| | repr = append(repr, ", "...) |
| | } |
| | repr = append(repr, stringFor(t)...) |
| | } |
| | if len(out) > 1 { |
| | repr = append(repr, ')') |
| | } |
| | return string(repr) |
| | } |
| |
|
| | |
| | |
| | func isReflexive(t *abi.Type) bool { |
| | switch Kind(t.Kind()) { |
| | case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, String, UnsafePointer: |
| | return true |
| | case Float32, Float64, Complex64, Complex128, Interface: |
| | return false |
| | case Array: |
| | tt := (*arrayType)(unsafe.Pointer(t)) |
| | return isReflexive(tt.Elem) |
| | case Struct: |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | for _, f := range tt.Fields { |
| | if !isReflexive(f.Typ) { |
| | return false |
| | } |
| | } |
| | return true |
| | default: |
| | |
| | panic("isReflexive called on non-key type " + stringFor(t)) |
| | } |
| | } |
| |
|
| | |
| | func needKeyUpdate(t *abi.Type) bool { |
| | switch Kind(t.Kind()) { |
| | case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, UnsafePointer: |
| | return false |
| | case Float32, Float64, Complex64, Complex128, Interface, String: |
| | |
| | |
| | |
| | return true |
| | case Array: |
| | tt := (*arrayType)(unsafe.Pointer(t)) |
| | return needKeyUpdate(tt.Elem) |
| | case Struct: |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | for _, f := range tt.Fields { |
| | if needKeyUpdate(f.Typ) { |
| | return true |
| | } |
| | } |
| | return false |
| | default: |
| | |
| | panic("needKeyUpdate called on non-key type " + stringFor(t)) |
| | } |
| | } |
| |
|
| | |
| | func hashMightPanic(t *abi.Type) bool { |
| | switch Kind(t.Kind()) { |
| | case Interface: |
| | return true |
| | case Array: |
| | tt := (*arrayType)(unsafe.Pointer(t)) |
| | return hashMightPanic(tt.Elem) |
| | case Struct: |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | for _, f := range tt.Fields { |
| | if hashMightPanic(f.Typ) { |
| | return true |
| | } |
| | } |
| | return false |
| | default: |
| | return false |
| | } |
| | } |
| |
|
| | |
| | |
| | func emitGCMask(out []byte, base uintptr, typ *abi.Type, n uintptr) { |
| | ptrs := typ.PtrBytes / goarch.PtrSize |
| | words := typ.Size_ / goarch.PtrSize |
| | mask := typ.GcSlice(0, (ptrs+7)/8) |
| | for j := uintptr(0); j < ptrs; j++ { |
| | if (mask[j/8]>>(j%8))&1 != 0 { |
| | for i := uintptr(0); i < n; i++ { |
| | k := base + i*words + j |
| | out[k/8] |= 1 << (k % 8) |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | |
| | func SliceOf(t Type) Type { |
| | typ := t.common() |
| |
|
| | |
| | ckey := cacheKey{Slice, typ, nil, 0} |
| | if slice, ok := lookupCache.Load(ckey); ok { |
| | return slice.(Type) |
| | } |
| |
|
| | |
| | s := "[]" + stringFor(typ) |
| | for _, tt := range typesByString(s) { |
| | slice := (*sliceType)(unsafe.Pointer(tt)) |
| | if slice.Elem == typ { |
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt)) |
| | return ti.(Type) |
| | } |
| | } |
| |
|
| | |
| | var islice any = ([]unsafe.Pointer)(nil) |
| | prototype := *(**sliceType)(unsafe.Pointer(&islice)) |
| | slice := *prototype |
| | slice.TFlag = 0 |
| | slice.Str = resolveReflectName(newName(s, "", false, false)) |
| | slice.Hash = fnv1(typ.Hash, '[') |
| | slice.Elem = typ |
| | slice.PtrToThis = 0 |
| |
|
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(&slice.Type)) |
| | return ti.(Type) |
| | } |
| |
|
| | |
| | |
| | |
| | var structLookupCache struct { |
| | sync.Mutex |
| |
|
| | |
| | |
| | m sync.Map |
| | } |
| |
|
| | type structTypeUncommon struct { |
| | structType |
| | u uncommonType |
| | } |
| |
|
| | |
| | func isLetter(ch rune) bool { |
| | return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= utf8.RuneSelf && unicode.IsLetter(ch) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func isValidFieldName(fieldName string) bool { |
| | for i, c := range fieldName { |
| | if i == 0 && !isLetter(c) { |
| | return false |
| | } |
| |
|
| | if !(isLetter(c) || unicode.IsDigit(c)) { |
| | return false |
| | } |
| | } |
| |
|
| | return len(fieldName) > 0 |
| | } |
| |
|
| | |
| | func isRegularMemory(t Type) bool { |
| | switch t.Kind() { |
| | case Array: |
| | elem := t.Elem() |
| | if isRegularMemory(elem) { |
| | return true |
| | } |
| | return elem.Comparable() && t.Len() == 0 |
| | case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Chan, Pointer, Bool, UnsafePointer: |
| | return true |
| | case Struct: |
| | num := t.NumField() |
| | switch num { |
| | case 0: |
| | return true |
| | case 1: |
| | field := t.Field(0) |
| | if field.Name == "_" { |
| | return false |
| | } |
| | return isRegularMemory(field.Type) |
| | default: |
| | for i := range num { |
| | field := t.Field(i) |
| | if field.Name == "_" || !isRegularMemory(field.Type) || isPaddedField(t, i) { |
| | return false |
| | } |
| | } |
| | return true |
| | } |
| | } |
| | return false |
| | } |
| |
|
| | |
| | |
| | func isPaddedField(t Type, i int) bool { |
| | field := t.Field(i) |
| | if i+1 < t.NumField() { |
| | return field.Offset+field.Type.Size() != t.Field(i+1).Offset |
| | } |
| | return field.Offset+field.Type.Size() != t.Size() |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func StructOf(fields []StructField) Type { |
| | var ( |
| | hash = fnv1(0, []byte("struct {")...) |
| | size uintptr |
| | typalign uint8 |
| | comparable = true |
| | methods []abi.Method |
| |
|
| | fs = make([]structField, len(fields)) |
| | repr = make([]byte, 0, 64) |
| | fset = map[string]struct{}{} |
| | ) |
| |
|
| | lastzero := uintptr(0) |
| | repr = append(repr, "struct {"...) |
| | pkgpath := "" |
| | for i, field := range fields { |
| | if field.Name == "" { |
| | panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no name") |
| | } |
| | if !isValidFieldName(field.Name) { |
| | panic("reflect.StructOf: field " + strconv.Itoa(i) + " has invalid name") |
| | } |
| | if field.Type == nil { |
| | panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no type") |
| | } |
| | f, fpkgpath := runtimeStructField(field) |
| | ft := f.Typ |
| | if fpkgpath != "" { |
| | if pkgpath == "" { |
| | pkgpath = fpkgpath |
| | } else if pkgpath != fpkgpath { |
| | panic("reflect.Struct: fields with different PkgPath " + pkgpath + " and " + fpkgpath) |
| | } |
| | } |
| |
|
| | |
| | name := f.Name.Name() |
| | hash = fnv1(hash, []byte(name)...) |
| | if !f.Embedded() { |
| | repr = append(repr, (" " + name)...) |
| | } else { |
| | |
| | if f.Typ.Kind() == abi.Pointer { |
| | |
| | elem := ft.Elem() |
| | if k := elem.Kind(); k == abi.Pointer || k == abi.Interface { |
| | panic("reflect.StructOf: illegal embedded field type " + stringFor(ft)) |
| | } |
| | } |
| |
|
| | switch Kind(f.Typ.Kind()) { |
| | case Interface: |
| | ift := (*interfaceType)(unsafe.Pointer(ft)) |
| | for _, m := range ift.Methods { |
| | if pkgPath(ift.nameOff(m.Name)) != "" { |
| | |
| | panic("reflect: embedded interface with unexported method(s) not implemented") |
| | } |
| |
|
| | fnStub := resolveReflectText(unsafe.Pointer(abi.FuncPCABIInternal(embeddedIfaceMethStub))) |
| | methods = append(methods, abi.Method{ |
| | Name: resolveReflectName(ift.nameOff(m.Name)), |
| | Mtyp: resolveReflectType(ift.typeOff(m.Typ)), |
| | Ifn: fnStub, |
| | Tfn: fnStub, |
| | }) |
| | } |
| | case Pointer: |
| | ptr := (*ptrType)(unsafe.Pointer(ft)) |
| | if unt := ptr.Uncommon(); unt != nil { |
| | if i > 0 && unt.Mcount > 0 { |
| | |
| | panic("reflect: embedded type with methods not implemented if type is not first field") |
| | } |
| | if len(fields) > 1 { |
| | panic("reflect: embedded type with methods not implemented if there is more than one field") |
| | } |
| | for _, m := range unt.Methods() { |
| | mname := nameOffFor(ft, m.Name) |
| | if pkgPath(mname) != "" { |
| | |
| | |
| | panic("reflect: embedded interface with unexported method(s) not implemented") |
| | } |
| | methods = append(methods, abi.Method{ |
| | Name: resolveReflectName(mname), |
| | Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)), |
| | Ifn: resolveReflectText(textOffFor(ft, m.Ifn)), |
| | Tfn: resolveReflectText(textOffFor(ft, m.Tfn)), |
| | }) |
| | } |
| | } |
| | if unt := ptr.Elem.Uncommon(); unt != nil { |
| | for _, m := range unt.Methods() { |
| | mname := nameOffFor(ft, m.Name) |
| | if pkgPath(mname) != "" { |
| | |
| | |
| | panic("reflect: embedded interface with unexported method(s) not implemented") |
| | } |
| | methods = append(methods, abi.Method{ |
| | Name: resolveReflectName(mname), |
| | Mtyp: resolveReflectType(typeOffFor(ptr.Elem, m.Mtyp)), |
| | Ifn: resolveReflectText(textOffFor(ptr.Elem, m.Ifn)), |
| | Tfn: resolveReflectText(textOffFor(ptr.Elem, m.Tfn)), |
| | }) |
| | } |
| | } |
| | default: |
| | if unt := ft.Uncommon(); unt != nil { |
| | if i > 0 && unt.Mcount > 0 { |
| | |
| | panic("reflect: embedded type with methods not implemented if type is not first field") |
| | } |
| | if len(fields) > 1 && ft.IsDirectIface() { |
| | panic("reflect: embedded type with methods not implemented for non-pointer type") |
| | } |
| | for _, m := range unt.Methods() { |
| | mname := nameOffFor(ft, m.Name) |
| | if pkgPath(mname) != "" { |
| | |
| | |
| | panic("reflect: embedded interface with unexported method(s) not implemented") |
| | } |
| | methods = append(methods, abi.Method{ |
| | Name: resolveReflectName(mname), |
| | Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)), |
| | Ifn: resolveReflectText(textOffFor(ft, m.Ifn)), |
| | Tfn: resolveReflectText(textOffFor(ft, m.Tfn)), |
| | }) |
| |
|
| | } |
| | } |
| | } |
| | } |
| | if _, dup := fset[name]; dup && name != "_" { |
| | panic("reflect.StructOf: duplicate field " + name) |
| | } |
| | fset[name] = struct{}{} |
| |
|
| | hash = fnv1(hash, byte(ft.Hash>>24), byte(ft.Hash>>16), byte(ft.Hash>>8), byte(ft.Hash)) |
| |
|
| | repr = append(repr, (" " + stringFor(ft))...) |
| | if f.Name.HasTag() { |
| | hash = fnv1(hash, []byte(f.Name.Tag())...) |
| | repr = append(repr, (" " + strconv.Quote(f.Name.Tag()))...) |
| | } |
| | if i < len(fields)-1 { |
| | repr = append(repr, ';') |
| | } |
| |
|
| | comparable = comparable && (ft.Equal != nil) |
| |
|
| | offset := align(size, uintptr(ft.Align_)) |
| | if offset < size { |
| | panic("reflect.StructOf: struct size would exceed virtual address space") |
| | } |
| | if ft.Align_ > typalign { |
| | typalign = ft.Align_ |
| | } |
| | size = offset + ft.Size_ |
| | if size < offset { |
| | panic("reflect.StructOf: struct size would exceed virtual address space") |
| | } |
| | f.Offset = offset |
| |
|
| | if ft.Size_ == 0 { |
| | lastzero = size |
| | } |
| |
|
| | fs[i] = f |
| | } |
| |
|
| | if size > 0 && lastzero == size { |
| | |
| | |
| | |
| | |
| | |
| | size++ |
| | if size == 0 { |
| | panic("reflect.StructOf: struct size would exceed virtual address space") |
| | } |
| | } |
| |
|
| | var typ *structType |
| | var ut *uncommonType |
| |
|
| | if len(methods) == 0 { |
| | t := new(structTypeUncommon) |
| | typ = &t.structType |
| | ut = &t.u |
| | } else { |
| | |
| | |
| | |
| | |
| | |
| | tt := New(StructOf([]StructField{ |
| | {Name: "S", Type: TypeOf(structType{})}, |
| | {Name: "U", Type: TypeOf(uncommonType{})}, |
| | {Name: "M", Type: ArrayOf(len(methods), TypeOf(methods[0]))}, |
| | })) |
| |
|
| | typ = (*structType)(tt.Elem().Field(0).Addr().UnsafePointer()) |
| | ut = (*uncommonType)(tt.Elem().Field(1).Addr().UnsafePointer()) |
| |
|
| | copy(tt.Elem().Field(2).Slice(0, len(methods)).Interface().([]abi.Method), methods) |
| | } |
| | |
| | |
| | |
| | |
| | ut.Mcount = uint16(len(methods)) |
| | ut.Xcount = ut.Mcount |
| | ut.Moff = uint32(unsafe.Sizeof(uncommonType{})) |
| |
|
| | if len(fs) > 0 { |
| | repr = append(repr, ' ') |
| | } |
| | repr = append(repr, '}') |
| | hash = fnv1(hash, '}') |
| | str := string(repr) |
| |
|
| | |
| | s := align(size, uintptr(typalign)) |
| | if s < size { |
| | panic("reflect.StructOf: struct size would exceed virtual address space") |
| | } |
| | size = s |
| |
|
| | |
| | var istruct any = struct{}{} |
| | prototype := *(**structType)(unsafe.Pointer(&istruct)) |
| | *typ = *prototype |
| | typ.Fields = fs |
| | if pkgpath != "" { |
| | typ.PkgPath = newName(pkgpath, "", false, false) |
| | } |
| |
|
| | |
| | if ts, ok := structLookupCache.m.Load(hash); ok { |
| | for _, st := range ts.([]Type) { |
| | t := st.common() |
| | if haveIdenticalUnderlyingType(&typ.Type, t, true) { |
| | return toType(t) |
| | } |
| | } |
| | } |
| |
|
| | |
| | structLookupCache.Lock() |
| | defer structLookupCache.Unlock() |
| | if ts, ok := structLookupCache.m.Load(hash); ok { |
| | for _, st := range ts.([]Type) { |
| | t := st.common() |
| | if haveIdenticalUnderlyingType(&typ.Type, t, true) { |
| | return toType(t) |
| | } |
| | } |
| | } |
| |
|
| | addToCache := func(t Type) Type { |
| | var ts []Type |
| | if ti, ok := structLookupCache.m.Load(hash); ok { |
| | ts = ti.([]Type) |
| | } |
| | structLookupCache.m.Store(hash, append(ts, t)) |
| | return t |
| | } |
| |
|
| | |
| | for _, t := range typesByString(str) { |
| | if haveIdenticalUnderlyingType(&typ.Type, t, true) { |
| | |
| | |
| | |
| | return addToCache(toType(t)) |
| | } |
| | } |
| |
|
| | typ.Str = resolveReflectName(newName(str, "", false, false)) |
| | if isRegularMemory(toType(&typ.Type)) { |
| | typ.TFlag = abi.TFlagRegularMemory |
| | } else { |
| | typ.TFlag = 0 |
| | } |
| | typ.Hash = hash |
| | typ.Size_ = size |
| | typ.PtrBytes = typeptrdata(&typ.Type) |
| | typ.Align_ = typalign |
| | typ.FieldAlign_ = typalign |
| | typ.PtrToThis = 0 |
| | if len(methods) > 0 { |
| | typ.TFlag |= abi.TFlagUncommon |
| | } |
| |
|
| | if typ.PtrBytes == 0 { |
| | typ.GCData = nil |
| | } else if typ.PtrBytes <= abi.MaxPtrmaskBytes*8*goarch.PtrSize { |
| | bv := new(bitVector) |
| | addTypeBits(bv, 0, &typ.Type) |
| | typ.GCData = &bv.data[0] |
| | } else { |
| | |
| | |
| | typ.TFlag |= abi.TFlagGCMaskOnDemand |
| | typ.GCData = (*byte)(unsafe.Pointer(new(uintptr))) |
| | } |
| |
|
| | typ.Equal = nil |
| | if comparable { |
| | typ.Equal = func(p, q unsafe.Pointer) bool { |
| | for _, ft := range typ.Fields { |
| | pi := add(p, ft.Offset, "&x.field safe") |
| | qi := add(q, ft.Offset, "&x.field safe") |
| | if !ft.Typ.Equal(pi, qi) { |
| | return false |
| | } |
| | } |
| | return true |
| | } |
| | } |
| |
|
| | switch { |
| | case typ.Size_ == goarch.PtrSize && typ.PtrBytes == goarch.PtrSize: |
| | typ.TFlag |= abi.TFlagDirectIface |
| | default: |
| | typ.TFlag &^= abi.TFlagDirectIface |
| | } |
| |
|
| | return addToCache(toType(&typ.Type)) |
| | } |
| |
|
| | func embeddedIfaceMethStub() { |
| | panic("reflect: StructOf does not support methods of embedded interfaces") |
| | } |
| |
|
| | |
| | |
| | |
| | func runtimeStructField(field StructField) (structField, string) { |
| | if field.Anonymous && field.PkgPath != "" { |
| | panic("reflect.StructOf: field \"" + field.Name + "\" is anonymous but has PkgPath set") |
| | } |
| |
|
| | if field.IsExported() { |
| | |
| | |
| | c := field.Name[0] |
| | if 'a' <= c && c <= 'z' || c == '_' { |
| | panic("reflect.StructOf: field \"" + field.Name + "\" is unexported but missing PkgPath") |
| | } |
| | } |
| |
|
| | resolveReflectType(field.Type.common()) |
| | f := structField{ |
| | Name: newName(field.Name, string(field.Tag), field.IsExported(), field.Anonymous), |
| | Typ: field.Type.common(), |
| | Offset: 0, |
| | } |
| | return f, field.PkgPath |
| | } |
| |
|
| | |
| | |
| | |
| | func typeptrdata(t *abi.Type) uintptr { |
| | switch t.Kind() { |
| | case abi.Struct: |
| | st := (*structType)(unsafe.Pointer(t)) |
| | |
| | field := -1 |
| | for i := range st.Fields { |
| | ft := st.Fields[i].Typ |
| | if ft.Pointers() { |
| | field = i |
| | } |
| | } |
| | if field == -1 { |
| | return 0 |
| | } |
| | f := st.Fields[field] |
| | return f.Offset + f.Typ.PtrBytes |
| |
|
| | default: |
| | panic("reflect.typeptrdata: unexpected type, " + stringFor(t)) |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func ArrayOf(length int, elem Type) Type { |
| | if length < 0 { |
| | panic("reflect: negative length passed to ArrayOf") |
| | } |
| |
|
| | typ := elem.common() |
| |
|
| | |
| | ckey := cacheKey{Array, typ, nil, uintptr(length)} |
| | if array, ok := lookupCache.Load(ckey); ok { |
| | return array.(Type) |
| | } |
| |
|
| | |
| | s := "[" + strconv.Itoa(length) + "]" + stringFor(typ) |
| | for _, tt := range typesByString(s) { |
| | array := (*arrayType)(unsafe.Pointer(tt)) |
| | if array.Elem == typ { |
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt)) |
| | return ti.(Type) |
| | } |
| | } |
| |
|
| | |
| | var iarray any = [1]unsafe.Pointer{} |
| | prototype := *(**arrayType)(unsafe.Pointer(&iarray)) |
| | array := *prototype |
| | array.TFlag = typ.TFlag & abi.TFlagRegularMemory |
| | array.Str = resolveReflectName(newName(s, "", false, false)) |
| | array.Hash = fnv1(typ.Hash, '[') |
| | for n := uint32(length); n > 0; n >>= 8 { |
| | array.Hash = fnv1(array.Hash, byte(n)) |
| | } |
| | array.Hash = fnv1(array.Hash, ']') |
| | array.Elem = typ |
| | array.PtrToThis = 0 |
| | if typ.Size_ > 0 { |
| | max := ^uintptr(0) / typ.Size_ |
| | if uintptr(length) > max { |
| | panic("reflect.ArrayOf: array size would exceed virtual address space") |
| | } |
| | } |
| | array.Size_ = typ.Size_ * uintptr(length) |
| | if length > 0 && typ.Pointers() { |
| | array.PtrBytes = typ.Size_*uintptr(length-1) + typ.PtrBytes |
| | } else { |
| | array.PtrBytes = 0 |
| | } |
| | array.Align_ = typ.Align_ |
| | array.FieldAlign_ = typ.FieldAlign_ |
| | array.Len = uintptr(length) |
| | array.Slice = &(SliceOf(elem).(*rtype).t) |
| |
|
| | switch { |
| | case array.PtrBytes == 0: |
| | |
| | array.GCData = nil |
| |
|
| | case length == 1: |
| | |
| | |
| | array.TFlag |= typ.TFlag & abi.TFlagGCMaskOnDemand |
| | array.GCData = typ.GCData |
| |
|
| | case array.PtrBytes <= abi.MaxPtrmaskBytes*8*goarch.PtrSize: |
| | |
| | n := (array.PtrBytes/goarch.PtrSize + 7) / 8 |
| | |
| | n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1) |
| | mask := make([]byte, n) |
| | emitGCMask(mask, 0, typ, array.Len) |
| | array.GCData = &mask[0] |
| |
|
| | default: |
| | |
| | |
| | array.TFlag |= abi.TFlagGCMaskOnDemand |
| | array.GCData = (*byte)(unsafe.Pointer(new(uintptr))) |
| | } |
| |
|
| | etyp := typ |
| | esize := etyp.Size() |
| |
|
| | array.Equal = nil |
| | if eequal := etyp.Equal; eequal != nil { |
| | array.Equal = func(p, q unsafe.Pointer) bool { |
| | for i := 0; i < length; i++ { |
| | pi := arrayAt(p, i, esize, "i < length") |
| | qi := arrayAt(q, i, esize, "i < length") |
| | if !eequal(pi, qi) { |
| | return false |
| | } |
| |
|
| | } |
| | return true |
| | } |
| | } |
| |
|
| | switch { |
| | case array.Size_ == goarch.PtrSize && array.PtrBytes == goarch.PtrSize: |
| | array.TFlag |= abi.TFlagDirectIface |
| | default: |
| | array.TFlag &^= abi.TFlagDirectIface |
| | } |
| |
|
| | ti, _ := lookupCache.LoadOrStore(ckey, toRType(&array.Type)) |
| | return ti.(Type) |
| | } |
| |
|
| | func appendVarint(x []byte, v uintptr) []byte { |
| | for ; v >= 0x80; v >>= 7 { |
| | x = append(x, byte(v|0x80)) |
| | } |
| | x = append(x, byte(v)) |
| | return x |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func toType(t *abi.Type) Type { |
| | if t == nil { |
| | return nil |
| | } |
| | return toRType(t) |
| | } |
| |
|
| | type layoutKey struct { |
| | ftyp *funcType |
| | rcvr *abi.Type |
| | } |
| |
|
| | type layoutType struct { |
| | t *abi.Type |
| | framePool *sync.Pool |
| | abid abiDesc |
| | } |
| |
|
| | var layoutCache sync.Map |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func funcLayout(t *funcType, rcvr *abi.Type) (frametype *abi.Type, framePool *sync.Pool, abid abiDesc) { |
| | if t.Kind() != abi.Func { |
| | panic("reflect: funcLayout of non-func type " + stringFor(&t.Type)) |
| | } |
| | if rcvr != nil && rcvr.Kind() == abi.Interface { |
| | panic("reflect: funcLayout with interface receiver " + stringFor(rcvr)) |
| | } |
| | k := layoutKey{t, rcvr} |
| | if lti, ok := layoutCache.Load(k); ok { |
| | lt := lti.(layoutType) |
| | return lt.t, lt.framePool, lt.abid |
| | } |
| |
|
| | |
| | abid = newAbiDesc(t, rcvr) |
| |
|
| | |
| | x := &abi.Type{ |
| | Align_: goarch.PtrSize, |
| | |
| | |
| | |
| | |
| | Size_: align(abid.retOffset+abid.ret.stackBytes, goarch.PtrSize), |
| | PtrBytes: uintptr(abid.stackPtrs.n) * goarch.PtrSize, |
| | } |
| | if abid.stackPtrs.n > 0 { |
| | x.GCData = &abid.stackPtrs.data[0] |
| | } |
| |
|
| | var s string |
| | if rcvr != nil { |
| | s = "methodargs(" + stringFor(rcvr) + ")(" + stringFor(&t.Type) + ")" |
| | } else { |
| | s = "funcargs(" + stringFor(&t.Type) + ")" |
| | } |
| | x.Str = resolveReflectName(newName(s, "", false, false)) |
| |
|
| | |
| | framePool = &sync.Pool{New: func() any { |
| | return unsafe_New(x) |
| | }} |
| | lti, _ := layoutCache.LoadOrStore(k, layoutType{ |
| | t: x, |
| | framePool: framePool, |
| | abid: abid, |
| | }) |
| | lt := lti.(layoutType) |
| | return lt.t, lt.framePool, lt.abid |
| | } |
| |
|
| | |
| | type bitVector struct { |
| | n uint32 |
| | data []byte |
| | } |
| |
|
| | |
| | func (bv *bitVector) append(bit uint8) { |
| | if bv.n%(8*goarch.PtrSize) == 0 { |
| | |
| | |
| | |
| | for i := 0; i < goarch.PtrSize; i++ { |
| | bv.data = append(bv.data, 0) |
| | } |
| | } |
| | bv.data[bv.n/8] |= bit << (bv.n % 8) |
| | bv.n++ |
| | } |
| |
|
| | func addTypeBits(bv *bitVector, offset uintptr, t *abi.Type) { |
| | if !t.Pointers() { |
| | return |
| | } |
| |
|
| | switch Kind(t.Kind()) { |
| | case Chan, Func, Map, Pointer, Slice, String, UnsafePointer: |
| | |
| | for bv.n < uint32(offset/goarch.PtrSize) { |
| | bv.append(0) |
| | } |
| | bv.append(1) |
| |
|
| | case Interface: |
| | |
| | for bv.n < uint32(offset/goarch.PtrSize) { |
| | bv.append(0) |
| | } |
| | bv.append(1) |
| | bv.append(1) |
| |
|
| | case Array: |
| | |
| | tt := (*arrayType)(unsafe.Pointer(t)) |
| | for i := 0; i < int(tt.Len); i++ { |
| | addTypeBits(bv, offset+uintptr(i)*tt.Elem.Size_, tt.Elem) |
| | } |
| |
|
| | case Struct: |
| | |
| | tt := (*structType)(unsafe.Pointer(t)) |
| | for i := range tt.Fields { |
| | f := &tt.Fields[i] |
| | addTypeBits(bv, offset+f.Offset, f.Typ) |
| | } |
| | } |
| | } |
| |
|