| | |
| | |
| | |
| |
|
| | package reflect |
| |
|
| | import ( |
| | "errors" |
| | "internal/abi" |
| | "internal/goarch" |
| | "internal/strconv" |
| | "internal/unsafeheader" |
| | "iter" |
| | "math" |
| | "runtime" |
| | "unsafe" |
| | ) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type Value struct { |
| | |
| | |
| | typ_ *abi.Type |
| |
|
| | |
| | |
| | ptr unsafe.Pointer |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | flag |
| |
|
| | |
| | |
| | |
| | |
| | |
| | } |
| |
|
| | type flag uintptr |
| |
|
| | const ( |
| | flagKindWidth = 5 |
| | flagKindMask flag = 1<<flagKindWidth - 1 |
| | flagStickyRO flag = 1 << 5 |
| | flagEmbedRO flag = 1 << 6 |
| | flagIndir flag = 1 << 7 |
| | flagAddr flag = 1 << 8 |
| | flagMethod flag = 1 << 9 |
| | flagMethodShift = 10 |
| | flagRO flag = flagStickyRO | flagEmbedRO |
| | ) |
| |
|
| | func (f flag) kind() Kind { |
| | return Kind(f & flagKindMask) |
| | } |
| |
|
| | func (f flag) ro() flag { |
| | if f&flagRO != 0 { |
| | return flagStickyRO |
| | } |
| | return 0 |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) typ() *abi.Type { |
| | |
| | |
| | |
| | |
| | |
| | return (*abi.Type)(abi.NoEscape(unsafe.Pointer(v.typ_))) |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) pointer() unsafe.Pointer { |
| | if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() { |
| | panic("can't call pointer on a non-pointer Value") |
| | } |
| | if v.flag&flagIndir != 0 { |
| | return *(*unsafe.Pointer)(v.ptr) |
| | } |
| | return v.ptr |
| | } |
| |
|
| | |
| | func packEface(v Value) any { |
| | return *(*any)(unsafe.Pointer(&abi.EmptyInterface{ |
| | Type: v.typ(), |
| | Data: packEfaceData(v), |
| | })) |
| | } |
| |
|
| | |
| | |
| | func packEfaceData(v Value) unsafe.Pointer { |
| | t := v.typ() |
| | switch { |
| | case !t.IsDirectIface(): |
| | if v.flag&flagIndir == 0 { |
| | panic("bad indir") |
| | } |
| | |
| | ptr := v.ptr |
| | if v.flag&flagAddr != 0 { |
| | c := unsafe_New(t) |
| | typedmemmove(t, c, ptr) |
| | ptr = c |
| | } |
| | return ptr |
| | case v.flag&flagIndir != 0: |
| | |
| | |
| | return *(*unsafe.Pointer)(v.ptr) |
| | default: |
| | |
| | return v.ptr |
| | } |
| | } |
| |
|
| | |
| | func unpackEface(i any) Value { |
| | e := (*abi.EmptyInterface)(unsafe.Pointer(&i)) |
| | t := e.Type |
| | if t == nil { |
| | return Value{} |
| | } |
| | f := flag(t.Kind()) |
| | if !t.IsDirectIface() { |
| | f |= flagIndir |
| | } |
| | return Value{t, e.Data, f} |
| | } |
| |
|
| | |
| | |
| | |
| | type ValueError struct { |
| | Method string |
| | Kind Kind |
| | } |
| |
|
| | func (e *ValueError) Error() string { |
| | if e.Kind == 0 { |
| | return "reflect: call of " + e.Method + " on zero Value" |
| | } |
| | return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value" |
| | } |
| |
|
| | |
| | func valueMethodName() string { |
| | var pc [5]uintptr |
| | n := runtime.Callers(1, pc[:]) |
| | frames := runtime.CallersFrames(pc[:n]) |
| | var frame runtime.Frame |
| | for more := true; more; { |
| | const prefix = "reflect.Value." |
| | frame, more = frames.Next() |
| | name := frame.Function |
| | if len(name) > len(prefix) && name[:len(prefix)] == prefix { |
| | methodName := name[len(prefix):] |
| | if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' { |
| | return name |
| | } |
| | } |
| | } |
| | return "unknown method" |
| | } |
| |
|
| | |
| | type nonEmptyInterface struct { |
| | itab *abi.ITab |
| | word unsafe.Pointer |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (f flag) mustBe(expected Kind) { |
| | |
| | if Kind(f&flagKindMask) != expected { |
| | panic(&ValueError{valueMethodName(), f.kind()}) |
| | } |
| | } |
| |
|
| | |
| | |
| | func (f flag) mustBeExported() { |
| | if f == 0 || f&flagRO != 0 { |
| | f.mustBeExportedSlow() |
| | } |
| | } |
| |
|
| | func (f flag) mustBeExportedSlow() { |
| | if f == 0 { |
| | panic(&ValueError{valueMethodName(), Invalid}) |
| | } |
| | if f&flagRO != 0 { |
| | panic("reflect: " + valueMethodName() + " using value obtained using unexported field") |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | func (f flag) mustBeAssignable() { |
| | if f&flagRO != 0 || f&flagAddr == 0 { |
| | f.mustBeAssignableSlow() |
| | } |
| | } |
| |
|
| | func (f flag) mustBeAssignableSlow() { |
| | if f == 0 { |
| | panic(&ValueError{valueMethodName(), Invalid}) |
| | } |
| | |
| | if f&flagRO != 0 { |
| | panic("reflect: " + valueMethodName() + " using value obtained using unexported field") |
| | } |
| | if f&flagAddr == 0 { |
| | panic("reflect: " + valueMethodName() + " using unaddressable value") |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Addr() Value { |
| | if v.flag&flagAddr == 0 { |
| | panic("reflect.Value.Addr of unaddressable value") |
| | } |
| | |
| | |
| | fl := v.flag & flagRO |
| | return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)} |
| | } |
| |
|
| | |
| | |
| | func (v Value) Bool() bool { |
| | |
| | if v.kind() != Bool { |
| | v.panicNotBool() |
| | } |
| | return *(*bool)(v.ptr) |
| | } |
| |
|
| | func (v Value) panicNotBool() { |
| | v.mustBe(Bool) |
| | } |
| |
|
| | var bytesType = rtypeOf(([]byte)(nil)) |
| |
|
| | |
| | |
| | |
| | func (v Value) Bytes() []byte { |
| | |
| | if v.typ_ == bytesType { |
| | return *(*[]byte)(v.ptr) |
| | } |
| | return v.bytesSlow() |
| | } |
| |
|
| | func (v Value) bytesSlow() []byte { |
| | switch v.kind() { |
| | case Slice: |
| | if v.typ().Elem().Kind() != abi.Uint8 { |
| | panic("reflect.Value.Bytes of non-byte slice") |
| | } |
| | |
| | return *(*[]byte)(v.ptr) |
| | case Array: |
| | if v.typ().Elem().Kind() != abi.Uint8 { |
| | panic("reflect.Value.Bytes of non-byte array") |
| | } |
| | if !v.CanAddr() { |
| | panic("reflect.Value.Bytes of unaddressable byte array") |
| | } |
| | p := (*byte)(v.ptr) |
| | n := int((*arrayType)(unsafe.Pointer(v.typ())).Len) |
| | return unsafe.Slice(p, n) |
| | } |
| | panic(&ValueError{"reflect.Value.Bytes", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | func (v Value) runes() []rune { |
| | v.mustBe(Slice) |
| | if v.typ().Elem().Kind() != abi.Int32 { |
| | panic("reflect.Value.Bytes of non-rune slice") |
| | } |
| | |
| | return *(*[]rune)(v.ptr) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) CanAddr() bool { |
| | return v.flag&flagAddr != 0 |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) CanSet() bool { |
| | return v.flag&(flagAddr|flagRO) == flagAddr |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Call(in []Value) []Value { |
| | v.mustBe(Func) |
| | v.mustBeExported() |
| | return v.call("Call", in) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) CallSlice(in []Value) []Value { |
| | v.mustBe(Func) |
| | v.mustBeExported() |
| | return v.call("CallSlice", in) |
| | } |
| |
|
| | var callGC bool |
| |
|
| | const debugReflectCall = false |
| |
|
| | func (v Value) call(op string, in []Value) []Value { |
| | |
| | t := (*funcType)(unsafe.Pointer(v.typ())) |
| | var ( |
| | fn unsafe.Pointer |
| | rcvr Value |
| | rcvrtype *abi.Type |
| | ) |
| | if v.flag&flagMethod != 0 { |
| | rcvr = v |
| | rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift) |
| | } else if v.flag&flagIndir != 0 { |
| | fn = *(*unsafe.Pointer)(v.ptr) |
| | } else { |
| | fn = v.ptr |
| | } |
| |
|
| | if fn == nil { |
| | panic("reflect.Value.Call: call of nil function") |
| | } |
| |
|
| | isSlice := op == "CallSlice" |
| | n := t.NumIn() |
| | isVariadic := t.IsVariadic() |
| | if isSlice { |
| | if !isVariadic { |
| | panic("reflect: CallSlice of non-variadic function") |
| | } |
| | if len(in) < n { |
| | panic("reflect: CallSlice with too few input arguments") |
| | } |
| | if len(in) > n { |
| | panic("reflect: CallSlice with too many input arguments") |
| | } |
| | } else { |
| | if isVariadic { |
| | n-- |
| | } |
| | if len(in) < n { |
| | panic("reflect: Call with too few input arguments") |
| | } |
| | if !isVariadic && len(in) > n { |
| | panic("reflect: Call with too many input arguments") |
| | } |
| | } |
| | for _, x := range in { |
| | if x.Kind() == Invalid { |
| | panic("reflect: " + op + " using zero Value argument") |
| | } |
| | } |
| | for i := 0; i < n; i++ { |
| | if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(toRType(targ)) { |
| | panic("reflect: " + op + " using " + xt.String() + " as type " + stringFor(targ)) |
| | } |
| | } |
| | if !isSlice && isVariadic { |
| | |
| | m := len(in) - n |
| | slice := MakeSlice(toRType(t.In(n)), m, m) |
| | elem := toRType(t.In(n)).Elem() |
| | for i := 0; i < m; i++ { |
| | x := in[n+i] |
| | if xt := x.Type(); !xt.AssignableTo(elem) { |
| | panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op) |
| | } |
| | slice.Index(i).Set(x) |
| | } |
| | origIn := in |
| | in = make([]Value, n+1) |
| | copy(in[:n], origIn) |
| | in[n] = slice |
| | } |
| |
|
| | nin := len(in) |
| | if nin != t.NumIn() { |
| | panic("reflect.Value.Call: wrong argument count") |
| | } |
| | nout := t.NumOut() |
| |
|
| | |
| | var regArgs abi.RegArgs |
| |
|
| | |
| | frametype, framePool, abid := funcLayout(t, rcvrtype) |
| |
|
| | |
| | var stackArgs unsafe.Pointer |
| | if frametype.Size() != 0 { |
| | if nout == 0 { |
| | stackArgs = framePool.Get().(unsafe.Pointer) |
| | } else { |
| | |
| | |
| | stackArgs = unsafe_New(frametype) |
| | } |
| | } |
| | frameSize := frametype.Size() |
| |
|
| | if debugReflectCall { |
| | println("reflect.call", stringFor(&t.Type)) |
| | abid.dump() |
| | } |
| |
|
| | |
| |
|
| | |
| | inStart := 0 |
| | if rcvrtype != nil { |
| | |
| | |
| | |
| | switch st := abid.call.steps[0]; st.kind { |
| | case abiStepStack: |
| | storeRcvr(rcvr, stackArgs) |
| | case abiStepPointer: |
| | storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg])) |
| | fallthrough |
| | case abiStepIntReg: |
| | storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg])) |
| | case abiStepFloatReg: |
| | storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg])) |
| | default: |
| | panic("unknown ABI parameter kind") |
| | } |
| | inStart = 1 |
| | } |
| |
|
| | |
| | for i, v := range in { |
| | v.mustBeExported() |
| | targ := toRType(t.In(i)) |
| | |
| | |
| | |
| | v = v.assignTo("reflect.Value.Call", &targ.t, nil) |
| | stepsLoop: |
| | for _, st := range abid.call.stepsForValue(i + inStart) { |
| | switch st.kind { |
| | case abiStepStack: |
| | |
| | addr := add(stackArgs, st.stkOff, "precomputed stack arg offset") |
| | if v.flag&flagIndir != 0 { |
| | typedmemmove(&targ.t, addr, v.ptr) |
| | } else { |
| | *(*unsafe.Pointer)(addr) = v.ptr |
| | } |
| | |
| | break stepsLoop |
| | case abiStepIntReg, abiStepPointer: |
| | |
| | if v.flag&flagIndir != 0 { |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | if st.kind == abiStepPointer { |
| | |
| | |
| | |
| | regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset) |
| | } |
| | intToReg(®Args, st.ireg, st.size, offset) |
| | } else { |
| | if st.kind == abiStepPointer { |
| | |
| | regArgs.Ptrs[st.ireg] = v.ptr |
| | } |
| | regArgs.Ints[st.ireg] = uintptr(v.ptr) |
| | } |
| | case abiStepFloatReg: |
| | |
| | if v.flag&flagIndir == 0 { |
| | panic("attempted to copy pointer to FP register") |
| | } |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | floatToReg(®Args, st.freg, st.size, offset) |
| | default: |
| | panic("unknown ABI part kind") |
| | } |
| | } |
| | } |
| | |
| | |
| | frameSize = align(frameSize, goarch.PtrSize) |
| | frameSize += abid.spill |
| |
|
| | |
| | regArgs.ReturnIsPtr = abid.outRegPtrs |
| |
|
| | if debugReflectCall { |
| | regArgs.Dump() |
| | } |
| |
|
| | |
| | if callGC { |
| | runtime.GC() |
| | } |
| |
|
| | |
| | call(frametype, fn, stackArgs, uint32(frametype.Size()), uint32(abid.retOffset), uint32(frameSize), ®Args) |
| |
|
| | |
| | if callGC { |
| | runtime.GC() |
| | } |
| |
|
| | var ret []Value |
| | if nout == 0 { |
| | if stackArgs != nil { |
| | typedmemclr(frametype, stackArgs) |
| | framePool.Put(stackArgs) |
| | } |
| | } else { |
| | if stackArgs != nil { |
| | |
| | |
| | |
| | typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset) |
| | } |
| |
|
| | |
| | ret = make([]Value, nout) |
| | for i := 0; i < nout; i++ { |
| | tv := t.Out(i) |
| | if tv.Size() == 0 { |
| | |
| | |
| | ret[i] = Zero(toRType(tv)) |
| | continue |
| | } |
| | steps := abid.ret.stepsForValue(i) |
| | if st := steps[0]; st.kind == abiStepStack { |
| | |
| | |
| | |
| | fl := flagIndir | flag(tv.Kind()) |
| | ret[i] = Value{tv, add(stackArgs, st.stkOff, "tv.Size() != 0"), fl} |
| | |
| | |
| | |
| | |
| | continue |
| | } |
| |
|
| | |
| | if tv.IsDirectIface() { |
| | |
| | |
| | if steps[0].kind != abiStepPointer { |
| | print("kind=", steps[0].kind, ", type=", stringFor(tv), "\n") |
| | panic("mismatch between ABI description and types") |
| | } |
| | ret[i] = Value{tv, regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())} |
| | continue |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | s := unsafe_New(tv) |
| | for _, st := range steps { |
| | switch st.kind { |
| | case abiStepIntReg: |
| | offset := add(s, st.offset, "precomputed value offset") |
| | intFromReg(®Args, st.ireg, st.size, offset) |
| | case abiStepPointer: |
| | s := add(s, st.offset, "precomputed value offset") |
| | *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg] |
| | case abiStepFloatReg: |
| | offset := add(s, st.offset, "precomputed value offset") |
| | floatFromReg(®Args, st.freg, st.size, offset) |
| | case abiStepStack: |
| | panic("register-based return value has stack component") |
| | default: |
| | panic("unknown ABI part kind") |
| | } |
| | } |
| | ret[i] = Value{tv, s, flagIndir | flag(tv.Kind())} |
| | } |
| | } |
| |
|
| | return ret |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) { |
| | if callGC { |
| | |
| | |
| | |
| | |
| | |
| | runtime.GC() |
| | } |
| | ftyp := ctxt.ftyp |
| | f := ctxt.fn |
| |
|
| | _, _, abid := funcLayout(ftyp, nil) |
| |
|
| | |
| | ptr := frame |
| | in := make([]Value, 0, int(ftyp.InCount)) |
| | for i, typ := range ftyp.InSlice() { |
| | if typ.Size() == 0 { |
| | in = append(in, Zero(toRType(typ))) |
| | continue |
| | } |
| | v := Value{typ, nil, flag(typ.Kind())} |
| | steps := abid.call.stepsForValue(i) |
| | if st := steps[0]; st.kind == abiStepStack { |
| | if !typ.IsDirectIface() { |
| | |
| | |
| | |
| | |
| | v.ptr = unsafe_New(typ) |
| | if typ.Size() > 0 { |
| | typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0")) |
| | } |
| | v.flag |= flagIndir |
| | } else { |
| | v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr")) |
| | } |
| | } else { |
| | if !typ.IsDirectIface() { |
| | |
| | |
| | v.flag |= flagIndir |
| | v.ptr = unsafe_New(typ) |
| | for _, st := range steps { |
| | switch st.kind { |
| | case abiStepIntReg: |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | intFromReg(regs, st.ireg, st.size, offset) |
| | case abiStepPointer: |
| | s := add(v.ptr, st.offset, "precomputed value offset") |
| | *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg] |
| | case abiStepFloatReg: |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | floatFromReg(regs, st.freg, st.size, offset) |
| | case abiStepStack: |
| | panic("register-based return value has stack component") |
| | default: |
| | panic("unknown ABI part kind") |
| | } |
| | } |
| | } else { |
| | |
| | |
| | if steps[0].kind != abiStepPointer { |
| | print("kind=", steps[0].kind, ", type=", stringFor(typ), "\n") |
| | panic("mismatch between ABI description and types") |
| | } |
| | v.ptr = regs.Ptrs[steps[0].ireg] |
| | } |
| | } |
| | in = append(in, v) |
| | } |
| |
|
| | |
| | out := f(in) |
| | numOut := ftyp.NumOut() |
| | if len(out) != numOut { |
| | panic("reflect: wrong return count from function created by MakeFunc") |
| | } |
| |
|
| | |
| | if numOut > 0 { |
| | for i, typ := range ftyp.OutSlice() { |
| | v := out[i] |
| | if v.typ() == nil { |
| | panic("reflect: function created by MakeFunc using " + funcName(f) + |
| | " returned zero Value") |
| | } |
| | if v.flag&flagRO != 0 { |
| | panic("reflect: function created by MakeFunc using " + funcName(f) + |
| | " returned value obtained from unexported field") |
| | } |
| | if typ.Size() == 0 { |
| | continue |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | v = v.assignTo("reflect.MakeFunc", typ, nil) |
| | stepsLoop: |
| | for _, st := range abid.ret.stepsForValue(i) { |
| | switch st.kind { |
| | case abiStepStack: |
| | |
| | addr := add(ptr, st.stkOff, "precomputed stack arg offset") |
| | |
| | |
| | |
| | |
| | if v.flag&flagIndir != 0 { |
| | memmove(addr, v.ptr, st.size) |
| | } else { |
| | |
| | *(*uintptr)(addr) = uintptr(v.ptr) |
| | } |
| | |
| | break stepsLoop |
| | case abiStepIntReg, abiStepPointer: |
| | |
| | if v.flag&flagIndir != 0 { |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | intToReg(regs, st.ireg, st.size, offset) |
| | } else { |
| | |
| | |
| | |
| | |
| | |
| | regs.Ints[st.ireg] = uintptr(v.ptr) |
| | } |
| | case abiStepFloatReg: |
| | |
| | if v.flag&flagIndir == 0 { |
| | panic("attempted to copy pointer to FP register") |
| | } |
| | offset := add(v.ptr, st.offset, "precomputed value offset") |
| | floatToReg(regs, st.freg, st.size, offset) |
| | default: |
| | panic("unknown ABI part kind") |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | |
| | *retValid = true |
| |
|
| | |
| | |
| | |
| | |
| | runtime.KeepAlive(out) |
| |
|
| | |
| | |
| | |
| | runtime.KeepAlive(ctxt) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) { |
| | i := methodIndex |
| | if v.typ().Kind() == abi.Interface { |
| | tt := (*interfaceType)(unsafe.Pointer(v.typ())) |
| | if uint(i) >= uint(len(tt.Methods)) { |
| | panic("reflect: internal error: invalid method index") |
| | } |
| | m := &tt.Methods[i] |
| | if !tt.nameOff(m.Name).IsExported() { |
| | panic("reflect: " + op + " of unexported method") |
| | } |
| | iface := (*nonEmptyInterface)(v.ptr) |
| | if iface.itab == nil { |
| | panic("reflect: " + op + " of method on nil interface value") |
| | } |
| | rcvrtype = iface.itab.Type |
| | fn = unsafe.Pointer(&unsafe.Slice(&iface.itab.Fun[0], i+1)[i]) |
| | t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ))) |
| | } else { |
| | rcvrtype = v.typ() |
| | ms := v.typ().ExportedMethods() |
| | if uint(i) >= uint(len(ms)) { |
| | panic("reflect: internal error: invalid method index") |
| | } |
| | m := ms[i] |
| | if !nameOffFor(v.typ(), m.Name).IsExported() { |
| | panic("reflect: " + op + " of unexported method") |
| | } |
| | ifn := textOffFor(v.typ(), m.Ifn) |
| | fn = unsafe.Pointer(&ifn) |
| | t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp))) |
| | } |
| | return |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func storeRcvr(v Value, p unsafe.Pointer) { |
| | t := v.typ() |
| | if t.Kind() == abi.Interface { |
| | |
| | iface := (*nonEmptyInterface)(v.ptr) |
| | *(*unsafe.Pointer)(p) = iface.word |
| | } else if v.flag&flagIndir != 0 && t.IsDirectIface() { |
| | *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr) |
| | } else { |
| | *(*unsafe.Pointer)(p) = v.ptr |
| | } |
| | } |
| |
|
| | |
| | |
| | func align(x, n uintptr) uintptr { |
| | return (x + n - 1) &^ (n - 1) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) { |
| | rcvr := ctxt.rcvr |
| | rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | _, _, valueABI := funcLayout(valueFuncType, nil) |
| | valueFrame, valueRegs := frame, regs |
| | methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType) |
| |
|
| | |
| | |
| | methodFrame := methodFramePool.Get().(unsafe.Pointer) |
| | var methodRegs abi.RegArgs |
| |
|
| | |
| | switch st := methodABI.call.steps[0]; st.kind { |
| | case abiStepStack: |
| | |
| | |
| | storeRcvr(rcvr, methodFrame) |
| | case abiStepPointer: |
| | |
| | storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg])) |
| | fallthrough |
| | case abiStepIntReg: |
| | storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg])) |
| | case abiStepFloatReg: |
| | storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg])) |
| | default: |
| | panic("unknown ABI parameter kind") |
| | } |
| |
|
| | |
| | for i, t := range valueFuncType.InSlice() { |
| | valueSteps := valueABI.call.stepsForValue(i) |
| | methodSteps := methodABI.call.stepsForValue(i + 1) |
| |
|
| | |
| | if len(valueSteps) == 0 { |
| | if len(methodSteps) != 0 { |
| | panic("method ABI and value ABI do not align") |
| | } |
| | continue |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | if vStep := valueSteps[0]; vStep.kind == abiStepStack { |
| | mStep := methodSteps[0] |
| | |
| | if mStep.kind == abiStepStack { |
| | if vStep.size != mStep.size { |
| | panic("method ABI and value ABI do not align") |
| | } |
| | typedmemmove(t, |
| | add(methodFrame, mStep.stkOff, "precomputed stack offset"), |
| | add(valueFrame, vStep.stkOff, "precomputed stack offset")) |
| | continue |
| | } |
| | |
| | for _, mStep := range methodSteps { |
| | from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset") |
| | switch mStep.kind { |
| | case abiStepPointer: |
| | |
| | methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from) |
| | fallthrough |
| | case abiStepIntReg: |
| | intToReg(&methodRegs, mStep.ireg, mStep.size, from) |
| | case abiStepFloatReg: |
| | floatToReg(&methodRegs, mStep.freg, mStep.size, from) |
| | default: |
| | panic("unexpected method step") |
| | } |
| | } |
| | continue |
| | } |
| | |
| | if mStep := methodSteps[0]; mStep.kind == abiStepStack { |
| | for _, vStep := range valueSteps { |
| | to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset") |
| | switch vStep.kind { |
| | case abiStepPointer: |
| | |
| | *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg] |
| | case abiStepIntReg: |
| | intFromReg(valueRegs, vStep.ireg, vStep.size, to) |
| | case abiStepFloatReg: |
| | floatFromReg(valueRegs, vStep.freg, vStep.size, to) |
| | default: |
| | panic("unexpected value step") |
| | } |
| | } |
| | continue |
| | } |
| | |
| | if len(valueSteps) != len(methodSteps) { |
| | |
| | |
| | |
| | panic("method ABI and value ABI don't align") |
| | } |
| | for i, vStep := range valueSteps { |
| | mStep := methodSteps[i] |
| | if mStep.kind != vStep.kind { |
| | panic("method ABI and value ABI don't align") |
| | } |
| | switch vStep.kind { |
| | case abiStepPointer: |
| | |
| | methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg] |
| | fallthrough |
| | case abiStepIntReg: |
| | methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg] |
| | case abiStepFloatReg: |
| | methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg] |
| | default: |
| | panic("unexpected value step") |
| | } |
| | } |
| | } |
| |
|
| | methodFrameSize := methodFrameType.Size() |
| | |
| | |
| | methodFrameSize = align(methodFrameSize, goarch.PtrSize) |
| | methodFrameSize += methodABI.spill |
| |
|
| | |
| | methodRegs.ReturnIsPtr = methodABI.outRegPtrs |
| |
|
| | |
| | |
| | |
| | call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.Size()), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | if valueRegs != nil { |
| | *valueRegs = methodRegs |
| | } |
| | if retSize := methodFrameType.Size() - methodABI.retOffset; retSize > 0 { |
| | valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset") |
| | methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset") |
| | |
| | memmove(valueRet, methodRet, retSize) |
| | } |
| |
|
| | |
| | |
| | *retValid = true |
| |
|
| | |
| | |
| | |
| | typedmemclr(methodFrameType, methodFrame) |
| | methodFramePool.Put(methodFrame) |
| |
|
| | |
| | runtime.KeepAlive(ctxt) |
| |
|
| | |
| | |
| | |
| | runtime.KeepAlive(valueRegs) |
| | } |
| |
|
| | |
| | func funcName(f func([]Value) []Value) string { |
| | pc := *(*uintptr)(unsafe.Pointer(&f)) |
| | rf := runtime.FuncForPC(pc) |
| | if rf != nil { |
| | return rf.Name() |
| | } |
| | return "closure" |
| | } |
| |
|
| | |
| | |
| | func (v Value) Cap() int { |
| | |
| | if v.kind() == Slice { |
| | return (*unsafeheader.Slice)(v.ptr).Cap |
| | } |
| | return v.capNonSlice() |
| | } |
| |
|
| | func (v Value) capNonSlice() int { |
| | k := v.kind() |
| | switch k { |
| | case Array: |
| | return v.typ().Len() |
| | case Chan: |
| | return chancap(v.pointer()) |
| | case Ptr: |
| | if v.typ().Elem().Kind() == abi.Array { |
| | return v.typ().Elem().Len() |
| | } |
| | panic("reflect: call of reflect.Value.Cap on ptr to non-array Value") |
| | } |
| | panic(&ValueError{"reflect.Value.Cap", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Close() { |
| | v.mustBe(Chan) |
| | v.mustBeExported() |
| | tt := (*chanType)(unsafe.Pointer(v.typ())) |
| | if ChanDir(tt.Dir)&SendDir == 0 { |
| | panic("reflect: close of receive-only channel") |
| | } |
| |
|
| | chanclose(v.pointer()) |
| | } |
| |
|
| | |
| | func (v Value) CanComplex() bool { |
| | switch v.kind() { |
| | case Complex64, Complex128: |
| | return true |
| | default: |
| | return false |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) Complex() complex128 { |
| | k := v.kind() |
| | switch k { |
| | case Complex64: |
| | return complex128(*(*complex64)(v.ptr)) |
| | case Complex128: |
| | return *(*complex128)(v.ptr) |
| | } |
| | panic(&ValueError{"reflect.Value.Complex", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) Elem() Value { |
| | k := v.kind() |
| | switch k { |
| | case Interface: |
| | x := unpackEface(packIfaceValueIntoEmptyIface(v)) |
| | if x.flag != 0 { |
| | x.flag |= v.flag.ro() |
| | } |
| | return x |
| | case Pointer: |
| | ptr := v.ptr |
| | if v.flag&flagIndir != 0 { |
| | if !v.typ().IsDirectIface() { |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | if !verifyNotInHeapPtr(*(*uintptr)(ptr)) { |
| | panic("reflect: reflect.Value.Elem on an invalid notinheap pointer") |
| | } |
| | } |
| | ptr = *(*unsafe.Pointer)(ptr) |
| | } |
| | |
| | if ptr == nil { |
| | return Value{} |
| | } |
| | tt := (*ptrType)(unsafe.Pointer(v.typ())) |
| | typ := tt.Elem |
| | fl := v.flag&flagRO | flagIndir | flagAddr |
| | fl |= flag(typ.Kind()) |
| | return Value{typ, ptr, fl} |
| | } |
| | panic(&ValueError{"reflect.Value.Elem", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | func (v Value) Field(i int) Value { |
| | if v.kind() != Struct { |
| | panic(&ValueError{"reflect.Value.Field", v.kind()}) |
| | } |
| | tt := (*structType)(unsafe.Pointer(v.typ())) |
| | if uint(i) >= uint(len(tt.Fields)) { |
| | panic("reflect: Field index out of range") |
| | } |
| | field := &tt.Fields[i] |
| | typ := field.Typ |
| |
|
| | |
| | fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind()) |
| | |
| | if !field.Name.IsExported() { |
| | if field.Embedded() { |
| | fl |= flagEmbedRO |
| | } else { |
| | fl |= flagStickyRO |
| | } |
| | } |
| | if fl&flagIndir == 0 && typ.Size() == 0 { |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | return Value{typ, nil, fl | flagIndir} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | ptr := add(v.ptr, field.Offset, "same as non-reflect &v.field") |
| | return Value{typ, ptr, fl} |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) FieldByIndex(index []int) Value { |
| | if len(index) == 1 { |
| | return v.Field(index[0]) |
| | } |
| | v.mustBe(Struct) |
| | for i, x := range index { |
| | if i > 0 { |
| | if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct { |
| | if v.IsNil() { |
| | panic("reflect: indirection through nil pointer to embedded struct") |
| | } |
| | v = v.Elem() |
| | } |
| | } |
| | v = v.Field(x) |
| | } |
| | return v |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) FieldByIndexErr(index []int) (Value, error) { |
| | if len(index) == 1 { |
| | return v.Field(index[0]), nil |
| | } |
| | v.mustBe(Struct) |
| | for i, x := range index { |
| | if i > 0 { |
| | if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct { |
| | if v.IsNil() { |
| | return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem())) |
| | } |
| | v = v.Elem() |
| | } |
| | } |
| | v = v.Field(x) |
| | } |
| | return v, nil |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) FieldByName(name string) Value { |
| | v.mustBe(Struct) |
| | if f, ok := toRType(v.typ()).FieldByName(name); ok { |
| | return v.FieldByIndex(f.Index) |
| | } |
| | return Value{} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) FieldByNameFunc(match func(string) bool) Value { |
| | if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok { |
| | return v.FieldByIndex(f.Index) |
| | } |
| | return Value{} |
| | } |
| |
|
| | |
| | func (v Value) CanFloat() bool { |
| | switch v.kind() { |
| | case Float32, Float64: |
| | return true |
| | default: |
| | return false |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) Float() float64 { |
| | k := v.kind() |
| | switch k { |
| | case Float32: |
| | return float64(*(*float32)(v.ptr)) |
| | case Float64: |
| | return *(*float64)(v.ptr) |
| | } |
| | panic(&ValueError{"reflect.Value.Float", v.kind()}) |
| | } |
| |
|
| | var uint8Type = rtypeOf(uint8(0)) |
| |
|
| | |
| | |
| | func (v Value) Index(i int) Value { |
| | switch v.kind() { |
| | case Array: |
| | tt := (*arrayType)(unsafe.Pointer(v.typ())) |
| | if uint(i) >= uint(tt.Len) { |
| | panic("reflect: array index out of range") |
| | } |
| | typ := tt.Elem |
| | offset := uintptr(i) * typ.Size() |
| |
|
| | |
| | |
| | |
| | |
| | |
| | val := add(v.ptr, offset, "same as &v[i], i < tt.len") |
| | fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind()) |
| | return Value{typ, val, fl} |
| |
|
| | case Slice: |
| | |
| | |
| | s := (*unsafeheader.Slice)(v.ptr) |
| | if uint(i) >= uint(s.Len) { |
| | panic("reflect: slice index out of range") |
| | } |
| | tt := (*sliceType)(unsafe.Pointer(v.typ())) |
| | typ := tt.Elem |
| | val := arrayAt(s.Data, i, typ.Size(), "i < s.Len") |
| | fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind()) |
| | return Value{typ, val, fl} |
| |
|
| | case String: |
| | s := (*unsafeheader.String)(v.ptr) |
| | if uint(i) >= uint(s.Len) { |
| | panic("reflect: string index out of range") |
| | } |
| | p := arrayAt(s.Data, i, 1, "i < s.Len") |
| | fl := v.flag.ro() | flag(Uint8) | flagIndir |
| | return Value{uint8Type, p, fl} |
| | } |
| | panic(&ValueError{"reflect.Value.Index", v.kind()}) |
| | } |
| |
|
| | |
| | func (v Value) CanInt() bool { |
| | switch v.kind() { |
| | case Int, Int8, Int16, Int32, Int64: |
| | return true |
| | default: |
| | return false |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) Int() int64 { |
| | k := v.kind() |
| | p := v.ptr |
| | switch k { |
| | case Int: |
| | return int64(*(*int)(p)) |
| | case Int8: |
| | return int64(*(*int8)(p)) |
| | case Int16: |
| | return int64(*(*int16)(p)) |
| | case Int32: |
| | return int64(*(*int32)(p)) |
| | case Int64: |
| | return *(*int64)(p) |
| | } |
| | panic(&ValueError{"reflect.Value.Int", v.kind()}) |
| | } |
| |
|
| | |
| | func (v Value) CanInterface() bool { |
| | if v.flag == 0 { |
| | panic(&ValueError{"reflect.Value.CanInterface", Invalid}) |
| | } |
| | return v.flag&flagRO == 0 |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Interface() (i any) { |
| | return valueInterface(v, true) |
| | } |
| |
|
| | func valueInterface(v Value, safe bool) any { |
| | if v.flag == 0 { |
| | panic(&ValueError{"reflect.Value.Interface", Invalid}) |
| | } |
| | if safe && v.flag&flagRO != 0 { |
| | |
| | |
| | |
| | panic("reflect.Value.Interface: cannot return value obtained from unexported field or method") |
| | } |
| | if v.flag&flagMethod != 0 { |
| | v = makeMethodValue("Interface", v) |
| | } |
| |
|
| | if v.kind() == Interface { |
| | |
| | return packIfaceValueIntoEmptyIface(v) |
| | } |
| |
|
| | return packEface(v) |
| | } |
| |
|
| | |
| | |
| | |
| | func TypeAssert[T any](v Value) (T, bool) { |
| | if v.flag == 0 { |
| | panic(&ValueError{"reflect.TypeAssert", Invalid}) |
| | } |
| | if v.flag&flagRO != 0 { |
| | |
| | |
| | |
| | panic("reflect.TypeAssert: cannot return value obtained from unexported field or method") |
| | } |
| |
|
| | if v.flag&flagMethod != 0 { |
| | v = makeMethodValue("TypeAssert", v) |
| | } |
| |
|
| | typ := abi.TypeFor[T]() |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | if v.kind() == Interface { |
| | v, ok := packIfaceValueIntoEmptyIface(v).(T) |
| | return v, ok |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | if typ.Kind() == abi.Interface { |
| | |
| | |
| | iface := *(*any)(unsafe.Pointer(&abi.EmptyInterface{Type: v.typ(), Data: nil})) |
| | if out, ok := iface.(T); ok { |
| | |
| | |
| | |
| | (*abi.CommonInterface)(unsafe.Pointer(&out)).Data = packEfaceData(v) |
| | return out, true |
| | } |
| | var zero T |
| | return zero, false |
| | } |
| |
|
| | |
| | |
| | if typ != v.typ() { |
| | var zero T |
| | return zero, false |
| | } |
| | if v.flag&flagIndir == 0 { |
| | return *(*T)(unsafe.Pointer(&v.ptr)), true |
| | } |
| | return *(*T)(v.ptr), true |
| | } |
| |
|
| | |
| | |
| | |
| | func packIfaceValueIntoEmptyIface(v Value) any { |
| | |
| | |
| | if v.NumMethod() == 0 { |
| | return *(*any)(v.ptr) |
| | } |
| | return *(*interface { |
| | M() |
| | })(v.ptr) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) InterfaceData() [2]uintptr { |
| | v.mustBe(Interface) |
| | |
| | escapes(v.ptr) |
| | |
| | |
| | |
| | |
| | |
| | return *(*[2]uintptr)(v.ptr) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) IsNil() bool { |
| | k := v.kind() |
| | switch k { |
| | case Chan, Func, Map, Pointer, UnsafePointer: |
| | if v.flag&flagMethod != 0 { |
| | return false |
| | } |
| | ptr := v.ptr |
| | if v.flag&flagIndir != 0 { |
| | ptr = *(*unsafe.Pointer)(ptr) |
| | } |
| | return ptr == nil |
| | case Interface, Slice: |
| | |
| | |
| | return *(*unsafe.Pointer)(v.ptr) == nil |
| | } |
| | panic(&ValueError{"reflect.Value.IsNil", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) IsValid() bool { |
| | return v.flag != 0 |
| | } |
| |
|
| | |
| | |
| | func (v Value) IsZero() bool { |
| | switch v.kind() { |
| | case Bool: |
| | return !v.Bool() |
| | case Int, Int8, Int16, Int32, Int64: |
| | return v.Int() == 0 |
| | case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return v.Uint() == 0 |
| | case Float32, Float64: |
| | return v.Float() == 0 |
| | case Complex64, Complex128: |
| | return v.Complex() == 0 |
| | case Array: |
| | if v.flag&flagIndir == 0 { |
| | return v.ptr == nil |
| | } |
| | if v.ptr == unsafe.Pointer(&zeroVal[0]) { |
| | return true |
| | } |
| | typ := (*abi.ArrayType)(unsafe.Pointer(v.typ())) |
| | |
| | if typ.Equal != nil && typ.Size() <= abi.ZeroValSize { |
| | |
| | |
| | |
| | return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0])) |
| | } |
| | if typ.TFlag&abi.TFlagRegularMemory != 0 { |
| | |
| | |
| | return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size())) |
| | } |
| | n := int(typ.Len) |
| | for i := 0; i < n; i++ { |
| | if !v.Index(i).IsZero() { |
| | return false |
| | } |
| | } |
| | return true |
| | case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer: |
| | return v.IsNil() |
| | case String: |
| | return v.Len() == 0 |
| | case Struct: |
| | if v.flag&flagIndir == 0 { |
| | return v.ptr == nil |
| | } |
| | if v.ptr == unsafe.Pointer(&zeroVal[0]) { |
| | return true |
| | } |
| | typ := (*abi.StructType)(unsafe.Pointer(v.typ())) |
| | |
| | if typ.Equal != nil && typ.Size() <= abi.ZeroValSize { |
| | |
| | return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0])) |
| | } |
| | if typ.TFlag&abi.TFlagRegularMemory != 0 { |
| | |
| | |
| | return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size())) |
| | } |
| |
|
| | n := v.NumField() |
| | for i := 0; i < n; i++ { |
| | if !v.Field(i).IsZero() && v.Type().Field(i).Name != "_" { |
| | return false |
| | } |
| | } |
| | return true |
| | default: |
| | |
| | |
| | panic(&ValueError{"reflect.Value.IsZero", v.Kind()}) |
| | } |
| | } |
| |
|
| | |
| | |
| | func isZero(b []byte) bool { |
| | if len(b) == 0 { |
| | return true |
| | } |
| | const n = 32 |
| | |
| | for uintptr(unsafe.Pointer(&b[0]))%8 != 0 { |
| | if b[0] != 0 { |
| | return false |
| | } |
| | b = b[1:] |
| | if len(b) == 0 { |
| | return true |
| | } |
| | } |
| | for len(b)%8 != 0 { |
| | if b[len(b)-1] != 0 { |
| | return false |
| | } |
| | b = b[:len(b)-1] |
| | } |
| | if len(b) == 0 { |
| | return true |
| | } |
| | w := unsafe.Slice((*uint64)(unsafe.Pointer(&b[0])), len(b)/8) |
| | for len(w)%n != 0 { |
| | if w[0] != 0 { |
| | return false |
| | } |
| | w = w[1:] |
| | } |
| | for len(w) >= n { |
| | if w[0] != 0 || w[1] != 0 || w[2] != 0 || w[3] != 0 || |
| | w[4] != 0 || w[5] != 0 || w[6] != 0 || w[7] != 0 || |
| | w[8] != 0 || w[9] != 0 || w[10] != 0 || w[11] != 0 || |
| | w[12] != 0 || w[13] != 0 || w[14] != 0 || w[15] != 0 || |
| | w[16] != 0 || w[17] != 0 || w[18] != 0 || w[19] != 0 || |
| | w[20] != 0 || w[21] != 0 || w[22] != 0 || w[23] != 0 || |
| | w[24] != 0 || w[25] != 0 || w[26] != 0 || w[27] != 0 || |
| | w[28] != 0 || w[29] != 0 || w[30] != 0 || w[31] != 0 { |
| | return false |
| | } |
| | w = w[n:] |
| | } |
| | return true |
| | } |
| |
|
| | |
| | |
| | func (v Value) SetZero() { |
| | v.mustBeAssignable() |
| | switch v.kind() { |
| | case Bool: |
| | *(*bool)(v.ptr) = false |
| | case Int: |
| | *(*int)(v.ptr) = 0 |
| | case Int8: |
| | *(*int8)(v.ptr) = 0 |
| | case Int16: |
| | *(*int16)(v.ptr) = 0 |
| | case Int32: |
| | *(*int32)(v.ptr) = 0 |
| | case Int64: |
| | *(*int64)(v.ptr) = 0 |
| | case Uint: |
| | *(*uint)(v.ptr) = 0 |
| | case Uint8: |
| | *(*uint8)(v.ptr) = 0 |
| | case Uint16: |
| | *(*uint16)(v.ptr) = 0 |
| | case Uint32: |
| | *(*uint32)(v.ptr) = 0 |
| | case Uint64: |
| | *(*uint64)(v.ptr) = 0 |
| | case Uintptr: |
| | *(*uintptr)(v.ptr) = 0 |
| | case Float32: |
| | *(*float32)(v.ptr) = 0 |
| | case Float64: |
| | *(*float64)(v.ptr) = 0 |
| | case Complex64: |
| | *(*complex64)(v.ptr) = 0 |
| | case Complex128: |
| | *(*complex128)(v.ptr) = 0 |
| | case String: |
| | *(*string)(v.ptr) = "" |
| | case Slice: |
| | *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{} |
| | case Interface: |
| | *(*abi.EmptyInterface)(v.ptr) = abi.EmptyInterface{} |
| | case Chan, Func, Map, Pointer, UnsafePointer: |
| | *(*unsafe.Pointer)(v.ptr) = nil |
| | case Array, Struct: |
| | typedmemclr(v.typ(), v.ptr) |
| | default: |
| | |
| | |
| | panic(&ValueError{"reflect.Value.SetZero", v.Kind()}) |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) Kind() Kind { |
| | return v.kind() |
| | } |
| |
|
| | |
| | |
| | func (v Value) Len() int { |
| | |
| | if v.kind() == Slice { |
| | return (*unsafeheader.Slice)(v.ptr).Len |
| | } |
| | return v.lenNonSlice() |
| | } |
| |
|
| | func (v Value) lenNonSlice() int { |
| | switch k := v.kind(); k { |
| | case Array: |
| | tt := (*arrayType)(unsafe.Pointer(v.typ())) |
| | return int(tt.Len) |
| | case Chan: |
| | return chanlen(v.pointer()) |
| | case Map: |
| | return maplen(v.pointer()) |
| | case String: |
| | |
| | return (*unsafeheader.String)(v.ptr).Len |
| | case Ptr: |
| | if v.typ().Elem().Kind() == abi.Array { |
| | return v.typ().Elem().Len() |
| | } |
| | panic("reflect: call of reflect.Value.Len on ptr to non-array Value") |
| | } |
| | panic(&ValueError{"reflect.Value.Len", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value { |
| | if !typ.IsDirectIface() { |
| | |
| | |
| | c := unsafe_New(typ) |
| | typedmemmove(typ, c, ptr) |
| | return Value{typ, c, fl | flagIndir} |
| | } |
| | return Value{typ, *(*unsafe.Pointer)(ptr), fl} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Method(i int) Value { |
| | if v.typ() == nil { |
| | panic(&ValueError{"reflect.Value.Method", Invalid}) |
| | } |
| | if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) { |
| | panic("reflect: Method index out of range") |
| | } |
| | if v.typ().Kind() == abi.Interface && v.IsNil() { |
| | panic("reflect: Method on nil interface value") |
| | } |
| | fl := v.flag.ro() | (v.flag & flagIndir) |
| | fl |= flag(Func) |
| | fl |= flag(i)<<flagMethodShift | flagMethod |
| | return Value{v.typ(), v.ptr, fl} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) NumMethod() int { |
| | if v.typ() == nil { |
| | panic(&ValueError{"reflect.Value.NumMethod", Invalid}) |
| | } |
| | if v.flag&flagMethod != 0 { |
| | return 0 |
| | } |
| | return toRType(v.typ()).NumMethod() |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) MethodByName(name string) Value { |
| | if v.typ() == nil { |
| | panic(&ValueError{"reflect.Value.MethodByName", Invalid}) |
| | } |
| | if v.flag&flagMethod != 0 { |
| | return Value{} |
| | } |
| | m, ok := toRType(v.typ()).MethodByName(name) |
| | if !ok { |
| | return Value{} |
| | } |
| | return v.Method(m.Index) |
| | } |
| |
|
| | |
| | |
| | func (v Value) NumField() int { |
| | v.mustBe(Struct) |
| | tt := (*structType)(unsafe.Pointer(v.typ())) |
| | return len(tt.Fields) |
| | } |
| |
|
| | |
| | |
| | func (v Value) OverflowComplex(x complex128) bool { |
| | k := v.kind() |
| | switch k { |
| | case Complex64: |
| | return overflowFloat32(real(x)) || overflowFloat32(imag(x)) |
| | case Complex128: |
| | return false |
| | } |
| | panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | func (v Value) OverflowFloat(x float64) bool { |
| | k := v.kind() |
| | switch k { |
| | case Float32: |
| | return overflowFloat32(x) |
| | case Float64: |
| | return false |
| | } |
| | panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()}) |
| | } |
| |
|
| | func overflowFloat32(x float64) bool { |
| | if x < 0 { |
| | x = -x |
| | } |
| | return math.MaxFloat32 < x && x <= math.MaxFloat64 |
| | } |
| |
|
| | |
| | |
| | func (v Value) OverflowInt(x int64) bool { |
| | k := v.kind() |
| | switch k { |
| | case Int, Int8, Int16, Int32, Int64: |
| | bitSize := v.typ().Size() * 8 |
| | trunc := (x << (64 - bitSize)) >> (64 - bitSize) |
| | return x != trunc |
| | } |
| | panic(&ValueError{"reflect.Value.OverflowInt", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | func (v Value) OverflowUint(x uint64) bool { |
| | k := v.kind() |
| | switch k { |
| | case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64: |
| | bitSize := v.typ_.Size() * 8 |
| | trunc := (x << (64 - bitSize)) >> (64 - bitSize) |
| | return x != trunc |
| | } |
| | panic(&ValueError{"reflect.Value.OverflowUint", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Pointer() uintptr { |
| | |
| | escapes(v.ptr) |
| |
|
| | k := v.kind() |
| | switch k { |
| | case Pointer: |
| | if !v.typ().Pointers() { |
| | val := *(*uintptr)(v.ptr) |
| | |
| | |
| | if !verifyNotInHeapPtr(val) { |
| | panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer") |
| | } |
| | return val |
| | } |
| | fallthrough |
| | case Chan, Map, UnsafePointer: |
| | return uintptr(v.pointer()) |
| | case Func: |
| | if v.flag&flagMethod != 0 { |
| | |
| | |
| | |
| | |
| | |
| | |
| | return methodValueCallCodePtr() |
| | } |
| | p := v.pointer() |
| | |
| | |
| | if p != nil { |
| | p = *(*unsafe.Pointer)(p) |
| | } |
| | return uintptr(p) |
| | case Slice: |
| | return uintptr((*unsafeheader.Slice)(v.ptr).Data) |
| | case String: |
| | return uintptr((*unsafeheader.String)(v.ptr).Data) |
| | } |
| | panic(&ValueError{"reflect.Value.Pointer", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Recv() (x Value, ok bool) { |
| | v.mustBe(Chan) |
| | v.mustBeExported() |
| | return v.recv(false) |
| | } |
| |
|
| | |
| | |
| | func (v Value) recv(nb bool) (val Value, ok bool) { |
| | tt := (*chanType)(unsafe.Pointer(v.typ())) |
| | if ChanDir(tt.Dir)&RecvDir == 0 { |
| | panic("reflect: recv on send-only channel") |
| | } |
| | t := tt.Elem |
| | val = Value{t, nil, flag(t.Kind())} |
| | var p unsafe.Pointer |
| | if !t.IsDirectIface() { |
| | p = unsafe_New(t) |
| | val.ptr = p |
| | val.flag |= flagIndir |
| | } else { |
| | p = unsafe.Pointer(&val.ptr) |
| | } |
| | selected, ok := chanrecv(v.pointer(), nb, p) |
| | if !selected { |
| | val = Value{} |
| | } |
| | return |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Send(x Value) { |
| | v.mustBe(Chan) |
| | v.mustBeExported() |
| | v.send(x, false) |
| | } |
| |
|
| | |
| | |
| | func (v Value) send(x Value, nb bool) (selected bool) { |
| | tt := (*chanType)(unsafe.Pointer(v.typ())) |
| | if ChanDir(tt.Dir)&SendDir == 0 { |
| | panic("reflect: send on recv-only channel") |
| | } |
| | x.mustBeExported() |
| | x = x.assignTo("reflect.Value.Send", tt.Elem, nil) |
| | var p unsafe.Pointer |
| | if x.flag&flagIndir != 0 { |
| | p = x.ptr |
| | } else { |
| | p = unsafe.Pointer(&x.ptr) |
| | } |
| | return chansend(v.pointer(), p, nb) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) Set(x Value) { |
| | v.mustBeAssignable() |
| | x.mustBeExported() |
| | var target unsafe.Pointer |
| | if v.kind() == Interface { |
| | target = v.ptr |
| | } |
| | x = x.assignTo("reflect.Set", v.typ(), target) |
| | if x.flag&flagIndir != 0 { |
| | if x.ptr == unsafe.Pointer(&zeroVal[0]) { |
| | typedmemclr(v.typ(), v.ptr) |
| | } else { |
| | typedmemmove(v.typ(), v.ptr, x.ptr) |
| | } |
| | } else { |
| | *(*unsafe.Pointer)(v.ptr) = x.ptr |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) SetBool(x bool) { |
| | v.mustBeAssignable() |
| | v.mustBe(Bool) |
| | *(*bool)(v.ptr) = x |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetBytes(x []byte) { |
| | v.mustBeAssignable() |
| | v.mustBe(Slice) |
| | if toRType(v.typ()).Elem().Kind() != Uint8 { |
| | panic("reflect.Value.SetBytes of non-byte slice") |
| | } |
| | *(*[]byte)(v.ptr) = x |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) setRunes(x []rune) { |
| | v.mustBeAssignable() |
| | v.mustBe(Slice) |
| | if v.typ().Elem().Kind() != abi.Int32 { |
| | panic("reflect.Value.setRunes of non-rune slice") |
| | } |
| | *(*[]rune)(v.ptr) = x |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetComplex(x complex128) { |
| | v.mustBeAssignable() |
| | switch k := v.kind(); k { |
| | default: |
| | panic(&ValueError{"reflect.Value.SetComplex", v.kind()}) |
| | case Complex64: |
| | *(*complex64)(v.ptr) = complex64(x) |
| | case Complex128: |
| | *(*complex128)(v.ptr) = x |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetFloat(x float64) { |
| | v.mustBeAssignable() |
| | switch k := v.kind(); k { |
| | default: |
| | panic(&ValueError{"reflect.Value.SetFloat", v.kind()}) |
| | case Float32: |
| | *(*float32)(v.ptr) = float32(x) |
| | case Float64: |
| | *(*float64)(v.ptr) = x |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetInt(x int64) { |
| | v.mustBeAssignable() |
| | switch k := v.kind(); k { |
| | default: |
| | panic(&ValueError{"reflect.Value.SetInt", v.kind()}) |
| | case Int: |
| | *(*int)(v.ptr) = int(x) |
| | case Int8: |
| | *(*int8)(v.ptr) = int8(x) |
| | case Int16: |
| | *(*int16)(v.ptr) = int16(x) |
| | case Int32: |
| | *(*int32)(v.ptr) = int32(x) |
| | case Int64: |
| | *(*int64)(v.ptr) = x |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) SetLen(n int) { |
| | v.mustBeAssignable() |
| | v.mustBe(Slice) |
| | s := (*unsafeheader.Slice)(v.ptr) |
| | if uint(n) > uint(s.Cap) { |
| | panic("reflect: slice length out of range in SetLen") |
| | } |
| | s.Len = n |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) SetCap(n int) { |
| | v.mustBeAssignable() |
| | v.mustBe(Slice) |
| | s := (*unsafeheader.Slice)(v.ptr) |
| | if n < s.Len || n > s.Cap { |
| | panic("reflect: slice capacity out of range in SetCap") |
| | } |
| | s.Cap = n |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetUint(x uint64) { |
| | v.mustBeAssignable() |
| | switch k := v.kind(); k { |
| | default: |
| | panic(&ValueError{"reflect.Value.SetUint", v.kind()}) |
| | case Uint: |
| | *(*uint)(v.ptr) = uint(x) |
| | case Uint8: |
| | *(*uint8)(v.ptr) = uint8(x) |
| | case Uint16: |
| | *(*uint16)(v.ptr) = uint16(x) |
| | case Uint32: |
| | *(*uint32)(v.ptr) = uint32(x) |
| | case Uint64: |
| | *(*uint64)(v.ptr) = x |
| | case Uintptr: |
| | *(*uintptr)(v.ptr) = uintptr(x) |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) SetPointer(x unsafe.Pointer) { |
| | v.mustBeAssignable() |
| | v.mustBe(UnsafePointer) |
| | *(*unsafe.Pointer)(v.ptr) = x |
| | } |
| |
|
| | |
| | |
| | func (v Value) SetString(x string) { |
| | v.mustBeAssignable() |
| | v.mustBe(String) |
| | *(*string)(v.ptr) = x |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Slice(i, j int) Value { |
| | var ( |
| | cap int |
| | typ *sliceType |
| | base unsafe.Pointer |
| | ) |
| | switch kind := v.kind(); kind { |
| | default: |
| | panic(&ValueError{"reflect.Value.Slice", v.kind()}) |
| |
|
| | case Array: |
| | if v.flag&flagAddr == 0 { |
| | panic("reflect.Value.Slice: slice of unaddressable array") |
| | } |
| | tt := (*arrayType)(unsafe.Pointer(v.typ())) |
| | cap = int(tt.Len) |
| | typ = (*sliceType)(unsafe.Pointer(tt.Slice)) |
| | base = v.ptr |
| |
|
| | case Slice: |
| | typ = (*sliceType)(unsafe.Pointer(v.typ())) |
| | s := (*unsafeheader.Slice)(v.ptr) |
| | base = s.Data |
| | cap = s.Cap |
| |
|
| | case String: |
| | s := (*unsafeheader.String)(v.ptr) |
| | if i < 0 || j < i || j > s.Len { |
| | panic("reflect.Value.Slice: string slice index out of bounds") |
| | } |
| | var t unsafeheader.String |
| | if i < s.Len { |
| | t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i} |
| | } |
| | return Value{v.typ(), unsafe.Pointer(&t), v.flag} |
| | } |
| |
|
| | if i < 0 || j < i || j > cap { |
| | panic("reflect.Value.Slice: slice index out of bounds") |
| | } |
| |
|
| | |
| | var x []unsafe.Pointer |
| |
|
| | |
| | s := (*unsafeheader.Slice)(unsafe.Pointer(&x)) |
| | s.Len = j - i |
| | s.Cap = cap - i |
| | if cap-i > 0 { |
| | s.Data = arrayAt(base, i, typ.Elem.Size(), "i < cap") |
| | } else { |
| | |
| | s.Data = base |
| | } |
| |
|
| | fl := v.flag.ro() | flagIndir | flag(Slice) |
| | return Value{typ.Common(), unsafe.Pointer(&x), fl} |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Slice3(i, j, k int) Value { |
| | var ( |
| | cap int |
| | typ *sliceType |
| | base unsafe.Pointer |
| | ) |
| | switch kind := v.kind(); kind { |
| | default: |
| | panic(&ValueError{"reflect.Value.Slice3", v.kind()}) |
| |
|
| | case Array: |
| | if v.flag&flagAddr == 0 { |
| | panic("reflect.Value.Slice3: slice of unaddressable array") |
| | } |
| | tt := (*arrayType)(unsafe.Pointer(v.typ())) |
| | cap = int(tt.Len) |
| | typ = (*sliceType)(unsafe.Pointer(tt.Slice)) |
| | base = v.ptr |
| |
|
| | case Slice: |
| | typ = (*sliceType)(unsafe.Pointer(v.typ())) |
| | s := (*unsafeheader.Slice)(v.ptr) |
| | base = s.Data |
| | cap = s.Cap |
| | } |
| |
|
| | if i < 0 || j < i || k < j || k > cap { |
| | panic("reflect.Value.Slice3: slice index out of bounds") |
| | } |
| |
|
| | |
| | |
| | var x []unsafe.Pointer |
| |
|
| | |
| | s := (*unsafeheader.Slice)(unsafe.Pointer(&x)) |
| | s.Len = j - i |
| | s.Cap = k - i |
| | if k-i > 0 { |
| | s.Data = arrayAt(base, i, typ.Elem.Size(), "i < k <= cap") |
| | } else { |
| | |
| | s.Data = base |
| | } |
| |
|
| | fl := v.flag.ro() | flagIndir | flag(Slice) |
| | return Value{typ.Common(), unsafe.Pointer(&x), fl} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) String() string { |
| | |
| | if v.kind() == String { |
| | return *(*string)(v.ptr) |
| | } |
| | return v.stringNonString() |
| | } |
| |
|
| | func (v Value) stringNonString() string { |
| | if v.kind() == Invalid { |
| | return "<invalid Value>" |
| | } |
| | |
| | |
| | return "<" + v.Type().String() + " Value>" |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) TryRecv() (x Value, ok bool) { |
| | v.mustBe(Chan) |
| | v.mustBeExported() |
| | return v.recv(true) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) TrySend(x Value) bool { |
| | v.mustBe(Chan) |
| | v.mustBeExported() |
| | return v.send(x, true) |
| | } |
| |
|
| | |
| | func (v Value) Type() Type { |
| | if v.flag != 0 && v.flag&flagMethod == 0 { |
| | return (*rtype)(abi.NoEscape(unsafe.Pointer(v.typ_))) |
| | } |
| | return v.typeSlow() |
| | } |
| |
|
| | |
| | func (v Value) typeSlow() Type { |
| | return toRType(v.abiTypeSlow()) |
| | } |
| |
|
| | func (v Value) abiType() *abi.Type { |
| | if v.flag != 0 && v.flag&flagMethod == 0 { |
| | return v.typ() |
| | } |
| | return v.abiTypeSlow() |
| | } |
| |
|
| | func (v Value) abiTypeSlow() *abi.Type { |
| | if v.flag == 0 { |
| | panic(&ValueError{"reflect.Value.Type", Invalid}) |
| | } |
| |
|
| | typ := v.typ() |
| | if v.flag&flagMethod == 0 { |
| | return v.typ() |
| | } |
| |
|
| | |
| | |
| | i := int(v.flag) >> flagMethodShift |
| | if v.typ().Kind() == abi.Interface { |
| | |
| | tt := (*interfaceType)(unsafe.Pointer(typ)) |
| | if uint(i) >= uint(len(tt.Methods)) { |
| | panic("reflect: internal error: invalid method index") |
| | } |
| | m := &tt.Methods[i] |
| | return typeOffFor(typ, m.Typ) |
| | } |
| | |
| | ms := typ.ExportedMethods() |
| | if uint(i) >= uint(len(ms)) { |
| | panic("reflect: internal error: invalid method index") |
| | } |
| | m := ms[i] |
| | return typeOffFor(typ, m.Mtyp) |
| | } |
| |
|
| | |
| | func (v Value) CanUint() bool { |
| | switch v.kind() { |
| | case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return true |
| | default: |
| | return false |
| | } |
| | } |
| |
|
| | |
| | |
| | func (v Value) Uint() uint64 { |
| | k := v.kind() |
| | p := v.ptr |
| | switch k { |
| | case Uint: |
| | return uint64(*(*uint)(p)) |
| | case Uint8: |
| | return uint64(*(*uint8)(p)) |
| | case Uint16: |
| | return uint64(*(*uint16)(p)) |
| | case Uint32: |
| | return uint64(*(*uint32)(p)) |
| | case Uint64: |
| | return *(*uint64)(p) |
| | case Uintptr: |
| | return uint64(*(*uintptr)(p)) |
| | } |
| | panic(&ValueError{"reflect.Value.Uint", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) UnsafeAddr() uintptr { |
| | if v.typ() == nil { |
| | panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid}) |
| | } |
| | if v.flag&flagAddr == 0 { |
| | panic("reflect.Value.UnsafeAddr of unaddressable value") |
| | } |
| | |
| | escapes(v.ptr) |
| | return uintptr(v.ptr) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) UnsafePointer() unsafe.Pointer { |
| | k := v.kind() |
| | switch k { |
| | case Pointer: |
| | if !v.typ().Pointers() { |
| | |
| | |
| | if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) { |
| | panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer") |
| | } |
| | return *(*unsafe.Pointer)(v.ptr) |
| | } |
| | fallthrough |
| | case Chan, Map, UnsafePointer: |
| | return v.pointer() |
| | case Func: |
| | if v.flag&flagMethod != 0 { |
| | |
| | |
| | |
| | |
| | |
| | |
| | code := methodValueCallCodePtr() |
| | return *(*unsafe.Pointer)(unsafe.Pointer(&code)) |
| | } |
| | p := v.pointer() |
| | |
| | |
| | if p != nil { |
| | p = *(*unsafe.Pointer)(p) |
| | } |
| | return p |
| | case Slice: |
| | return (*unsafeheader.Slice)(v.ptr).Data |
| | case String: |
| | return (*unsafeheader.String)(v.ptr).Data |
| | } |
| | panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()}) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Fields() iter.Seq2[StructField, Value] { |
| | t := v.Type() |
| | if t.Kind() != Struct { |
| | panic("reflect: Fields of non-struct type " + t.String()) |
| | } |
| | return func(yield func(StructField, Value) bool) { |
| | for i := range v.NumField() { |
| | if !yield(t.Field(i), v.Field(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Methods() iter.Seq2[Method, Value] { |
| | return func(yield func(Method, Value) bool) { |
| | rtype := v.Type() |
| | for i := range v.NumMethod() { |
| | if !yield(rtype.Method(i), v.Method(i)) { |
| | return |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type StringHeader struct { |
| | Data uintptr |
| | Len int |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type SliceHeader struct { |
| | Data uintptr |
| | Len int |
| | Cap int |
| | } |
| |
|
| | func typesMustMatch(what string, t1, t2 Type) { |
| | if t1 != t2 { |
| | panic(what + ": " + t1.String() + " != " + t2.String()) |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer { |
| | return add(p, uintptr(i)*eltSize, "i < len") |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Grow(n int) { |
| | v.mustBeAssignable() |
| | v.mustBe(Slice) |
| | v.grow(n) |
| | } |
| |
|
| | |
| | func (v Value) grow(n int) { |
| | p := (*unsafeheader.Slice)(v.ptr) |
| | switch { |
| | case n < 0: |
| | panic("reflect.Value.Grow: negative len") |
| | case p.Len+n < 0: |
| | panic("reflect.Value.Grow: slice overflow") |
| | case p.Len+n > p.Cap: |
| | t := v.typ().Elem() |
| | *p = growslice(t, *p, n) |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) extendSlice(n int) Value { |
| | v.mustBeExported() |
| | v.mustBe(Slice) |
| |
|
| | |
| | sh := *(*unsafeheader.Slice)(v.ptr) |
| | s := &sh |
| | v.ptr = unsafe.Pointer(s) |
| | v.flag = flagIndir | flag(Slice) |
| |
|
| | v.grow(n) |
| | s.Len += n |
| | return v |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Clear() { |
| | switch v.Kind() { |
| | case Slice: |
| | sh := *(*unsafeheader.Slice)(v.ptr) |
| | st := (*sliceType)(unsafe.Pointer(v.typ())) |
| | typedarrayclear(st.Elem, sh.Data, sh.Len) |
| | case Map: |
| | mapclear(v.typ(), v.pointer()) |
| | default: |
| | panic(&ValueError{"reflect.Value.Clear", v.Kind()}) |
| | } |
| | } |
| |
|
| | |
| | |
| | func Append(s Value, x ...Value) Value { |
| | s.mustBe(Slice) |
| | n := s.Len() |
| | s = s.extendSlice(len(x)) |
| | for i, v := range x { |
| | s.Index(n + i).Set(v) |
| | } |
| | return s |
| | } |
| |
|
| | |
| | |
| | func AppendSlice(s, t Value) Value { |
| | s.mustBe(Slice) |
| | t.mustBe(Slice) |
| | typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem()) |
| | ns := s.Len() |
| | nt := t.Len() |
| | s = s.extendSlice(nt) |
| | Copy(s.Slice(ns, ns+nt), t) |
| | return s |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func Copy(dst, src Value) int { |
| | dk := dst.kind() |
| | if dk != Array && dk != Slice { |
| | panic(&ValueError{"reflect.Copy", dk}) |
| | } |
| | if dk == Array { |
| | dst.mustBeAssignable() |
| | } |
| | dst.mustBeExported() |
| |
|
| | sk := src.kind() |
| | var stringCopy bool |
| | if sk != Array && sk != Slice { |
| | stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8 |
| | if !stringCopy { |
| | panic(&ValueError{"reflect.Copy", sk}) |
| | } |
| | } |
| | src.mustBeExported() |
| |
|
| | de := dst.typ().Elem() |
| | if !stringCopy { |
| | se := src.typ().Elem() |
| | typesMustMatch("reflect.Copy", toType(de), toType(se)) |
| | } |
| |
|
| | var ds, ss unsafeheader.Slice |
| | if dk == Array { |
| | ds.Data = dst.ptr |
| | ds.Len = dst.Len() |
| | ds.Cap = ds.Len |
| | } else { |
| | ds = *(*unsafeheader.Slice)(dst.ptr) |
| | } |
| | if sk == Array { |
| | ss.Data = src.ptr |
| | ss.Len = src.Len() |
| | ss.Cap = ss.Len |
| | } else if sk == Slice { |
| | ss = *(*unsafeheader.Slice)(src.ptr) |
| | } else { |
| | sh := *(*unsafeheader.String)(src.ptr) |
| | ss.Data = sh.Data |
| | ss.Len = sh.Len |
| | ss.Cap = sh.Len |
| | } |
| |
|
| | return typedslicecopy(de.Common(), ds, ss) |
| | } |
| |
|
| | |
| | |
| | type runtimeSelect struct { |
| | dir SelectDir |
| | typ *rtype |
| | ch unsafe.Pointer |
| | val unsafe.Pointer |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func rselect([]runtimeSelect) (chosen int, recvOK bool) |
| |
|
| | |
| | type SelectDir int |
| |
|
| | |
| |
|
| | const ( |
| | _ SelectDir = iota |
| | SelectSend |
| | SelectRecv |
| | SelectDefault |
| | ) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | type SelectCase struct { |
| | Dir SelectDir |
| | Chan Value |
| | Send Value |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) { |
| | if len(cases) > 65536 { |
| | panic("reflect.Select: too many cases (max 65536)") |
| | } |
| | |
| | |
| | |
| | var runcases []runtimeSelect |
| | if len(cases) > 4 { |
| | |
| | runcases = make([]runtimeSelect, len(cases)) |
| | } else { |
| | |
| | runcases = make([]runtimeSelect, len(cases), 4) |
| | } |
| |
|
| | haveDefault := false |
| | for i, c := range cases { |
| | rc := &runcases[i] |
| | rc.dir = c.Dir |
| | switch c.Dir { |
| | default: |
| | panic("reflect.Select: invalid Dir") |
| |
|
| | case SelectDefault: |
| | if haveDefault { |
| | panic("reflect.Select: multiple default cases") |
| | } |
| | haveDefault = true |
| | if c.Chan.IsValid() { |
| | panic("reflect.Select: default case has Chan value") |
| | } |
| | if c.Send.IsValid() { |
| | panic("reflect.Select: default case has Send value") |
| | } |
| |
|
| | case SelectSend: |
| | ch := c.Chan |
| | if !ch.IsValid() { |
| | break |
| | } |
| | ch.mustBe(Chan) |
| | ch.mustBeExported() |
| | tt := (*chanType)(unsafe.Pointer(ch.typ())) |
| | if ChanDir(tt.Dir)&SendDir == 0 { |
| | panic("reflect.Select: SendDir case using recv-only channel") |
| | } |
| | rc.ch = ch.pointer() |
| | rc.typ = toRType(&tt.Type) |
| | v := c.Send |
| | if !v.IsValid() { |
| | panic("reflect.Select: SendDir case missing Send value") |
| | } |
| | v.mustBeExported() |
| | v = v.assignTo("reflect.Select", tt.Elem, nil) |
| | if v.flag&flagIndir != 0 { |
| | rc.val = v.ptr |
| | } else { |
| | rc.val = unsafe.Pointer(&v.ptr) |
| | } |
| | |
| | |
| | escapes(rc.val) |
| |
|
| | case SelectRecv: |
| | if c.Send.IsValid() { |
| | panic("reflect.Select: RecvDir case has Send value") |
| | } |
| | ch := c.Chan |
| | if !ch.IsValid() { |
| | break |
| | } |
| | ch.mustBe(Chan) |
| | ch.mustBeExported() |
| | tt := (*chanType)(unsafe.Pointer(ch.typ())) |
| | if ChanDir(tt.Dir)&RecvDir == 0 { |
| | panic("reflect.Select: RecvDir case using send-only channel") |
| | } |
| | rc.ch = ch.pointer() |
| | rc.typ = toRType(&tt.Type) |
| | rc.val = unsafe_New(tt.Elem) |
| | } |
| | } |
| |
|
| | chosen, recvOK = rselect(runcases) |
| | if runcases[chosen].dir == SelectRecv { |
| | tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ)) |
| | t := tt.Elem |
| | p := runcases[chosen].val |
| | fl := flag(t.Kind()) |
| | if !t.IsDirectIface() { |
| | recv = Value{t, p, fl | flagIndir} |
| | } else { |
| | recv = Value{t, *(*unsafe.Pointer)(p), fl} |
| | } |
| | } |
| | return chosen, recv, recvOK |
| | } |
| |
|
| | |
| | |
| | |
| |
|
| | |
| |
|
| | |
| | func unsafe_New(*abi.Type) unsafe.Pointer |
| |
|
| | |
| | func unsafe_NewArray(*abi.Type, int) unsafe.Pointer |
| |
|
| | |
| | |
| | func MakeSlice(typ Type, len, cap int) Value { |
| | if typ.Kind() != Slice { |
| | panic("reflect.MakeSlice of non-slice type") |
| | } |
| | if len < 0 { |
| | panic("reflect.MakeSlice: negative len") |
| | } |
| | if cap < 0 { |
| | panic("reflect.MakeSlice: negative cap") |
| | } |
| | if len > cap { |
| | panic("reflect.MakeSlice: len > cap") |
| | } |
| |
|
| | s := unsafeheader.Slice{Data: unsafe_NewArray(&(typ.Elem().(*rtype).t), cap), Len: len, Cap: cap} |
| | return Value{&typ.(*rtype).t, unsafe.Pointer(&s), flagIndir | flag(Slice)} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func SliceAt(typ Type, p unsafe.Pointer, n int) Value { |
| | unsafeslice(typ.common(), p, n) |
| | s := unsafeheader.Slice{Data: p, Len: n, Cap: n} |
| | return Value{SliceOf(typ).common(), unsafe.Pointer(&s), flagIndir | flag(Slice)} |
| | } |
| |
|
| | |
| | func MakeChan(typ Type, buffer int) Value { |
| | if typ.Kind() != Chan { |
| | panic("reflect.MakeChan of non-chan type") |
| | } |
| | if buffer < 0 { |
| | panic("reflect.MakeChan: negative buffer size") |
| | } |
| | if typ.ChanDir() != BothDir { |
| | panic("reflect.MakeChan: unidirectional channel type") |
| | } |
| | t := typ.common() |
| | ch := makechan(t, buffer) |
| | return Value{t, ch, flag(Chan)} |
| | } |
| |
|
| | |
| | func MakeMap(typ Type) Value { |
| | return MakeMapWithSize(typ, 0) |
| | } |
| |
|
| | |
| | |
| | func MakeMapWithSize(typ Type, n int) Value { |
| | if typ.Kind() != Map { |
| | panic("reflect.MakeMapWithSize of non-map type") |
| | } |
| | t := typ.common() |
| | m := makemap(t, n) |
| | return Value{t, m, flag(Map)} |
| | } |
| |
|
| | |
| | |
| | |
| | func Indirect(v Value) Value { |
| | if v.Kind() != Pointer { |
| | return v |
| | } |
| | return v.Elem() |
| | } |
| |
|
| | |
| | |
| | func ValueOf(i any) Value { |
| | if i == nil { |
| | return Value{} |
| | } |
| | return unpackEface(i) |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func Zero(typ Type) Value { |
| | if typ == nil { |
| | panic("reflect: Zero(nil)") |
| | } |
| | t := &typ.(*rtype).t |
| | fl := flag(t.Kind()) |
| | if !t.IsDirectIface() { |
| | var p unsafe.Pointer |
| | if t.Size() <= abi.ZeroValSize { |
| | p = unsafe.Pointer(&zeroVal[0]) |
| | } else { |
| | p = unsafe_New(t) |
| | } |
| | return Value{t, p, fl | flagIndir} |
| | } |
| | return Value{t, nil, fl} |
| | } |
| |
|
| | |
| | var zeroVal [abi.ZeroValSize]byte |
| |
|
| | |
| | |
| | func New(typ Type) Value { |
| | if typ == nil { |
| | panic("reflect: New(nil)") |
| | } |
| | t := &typ.(*rtype).t |
| | pt := ptrTo(t) |
| | if !pt.IsDirectIface() { |
| | |
| | panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)") |
| | } |
| | ptr := unsafe_New(t) |
| | fl := flag(Pointer) |
| | return Value{pt, ptr, fl} |
| | } |
| |
|
| | |
| | |
| | func NewAt(typ Type, p unsafe.Pointer) Value { |
| | fl := flag(Pointer) |
| | t := typ.(*rtype) |
| | return Value{t.ptrTo(), p, fl} |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Value { |
| | if v.flag&flagMethod != 0 { |
| | v = makeMethodValue(context, v) |
| | } |
| |
|
| | switch { |
| | case directlyAssignable(dst, v.typ()): |
| | |
| | |
| | fl := v.flag&(flagAddr|flagIndir) | v.flag.ro() |
| | fl |= flag(dst.Kind()) |
| | return Value{dst, v.ptr, fl} |
| |
|
| | case implements(dst, v.typ()): |
| | if v.Kind() == Interface && v.IsNil() { |
| | |
| | |
| | |
| | return Value{dst, nil, flag(Interface)} |
| | } |
| | x := valueInterface(v, false) |
| | if target == nil { |
| | target = unsafe_New(dst) |
| | } |
| | if dst.NumMethod() == 0 { |
| | *(*any)(target) = x |
| | } else { |
| | ifaceE2I(dst, x, target) |
| | } |
| | return Value{dst, target, flagIndir | flag(Interface)} |
| | } |
| |
|
| | |
| | panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst)) |
| | } |
| |
|
| | |
| | |
| | |
| | func (v Value) Convert(t Type) Value { |
| | if v.flag&flagMethod != 0 { |
| | v = makeMethodValue("Convert", v) |
| | } |
| | op := convertOp(t.common(), v.typ()) |
| | if op == nil { |
| | panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String()) |
| | } |
| | return op(v, t) |
| | } |
| |
|
| | |
| | |
| | func (v Value) CanConvert(t Type) bool { |
| | vt := v.Type() |
| | if !vt.ConvertibleTo(t) { |
| | return false |
| | } |
| | |
| | |
| | switch { |
| | case vt.Kind() == Slice && t.Kind() == Array: |
| | if t.Len() > v.Len() { |
| | return false |
| | } |
| | case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array: |
| | n := t.Elem().Len() |
| | if n > v.Len() { |
| | return false |
| | } |
| | } |
| | return true |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func (v Value) Comparable() bool { |
| | k := v.Kind() |
| | switch k { |
| | case Invalid: |
| | return false |
| |
|
| | case Array: |
| | switch v.Type().Elem().Kind() { |
| | case Interface, Array, Struct: |
| | for i := 0; i < v.Type().Len(); i++ { |
| | if !v.Index(i).Comparable() { |
| | return false |
| | } |
| | } |
| | return true |
| | } |
| | return v.Type().Comparable() |
| |
|
| | case Interface: |
| | return v.IsNil() || v.Elem().Comparable() |
| |
|
| | case Struct: |
| | for _, value := range v.Fields() { |
| | if !value.Comparable() { |
| | return false |
| | } |
| | } |
| | return true |
| |
|
| | default: |
| | return v.Type().Comparable() |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func (v Value) Equal(u Value) bool { |
| | if v.Kind() == Interface { |
| | v = v.Elem() |
| | } |
| | if u.Kind() == Interface { |
| | u = u.Elem() |
| | } |
| |
|
| | if !v.IsValid() || !u.IsValid() { |
| | return v.IsValid() == u.IsValid() |
| | } |
| |
|
| | if v.Kind() != u.Kind() || v.Type() != u.Type() { |
| | return false |
| | } |
| |
|
| | |
| | |
| | switch v.Kind() { |
| | default: |
| | panic("reflect.Value.Equal: invalid Kind") |
| | case Bool: |
| | return v.Bool() == u.Bool() |
| | case Int, Int8, Int16, Int32, Int64: |
| | return v.Int() == u.Int() |
| | case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return v.Uint() == u.Uint() |
| | case Float32, Float64: |
| | return v.Float() == u.Float() |
| | case Complex64, Complex128: |
| | return v.Complex() == u.Complex() |
| | case String: |
| | return v.String() == u.String() |
| | case Chan, Pointer, UnsafePointer: |
| | return v.Pointer() == u.Pointer() |
| | case Array: |
| | |
| | vl := v.Len() |
| | if vl == 0 { |
| | |
| | if !v.Type().Elem().Comparable() { |
| | break |
| | } |
| | return true |
| | } |
| | for i := 0; i < vl; i++ { |
| | if !v.Index(i).Equal(u.Index(i)) { |
| | return false |
| | } |
| | } |
| | return true |
| | case Struct: |
| | |
| | nf := v.NumField() |
| | for i := 0; i < nf; i++ { |
| | if !v.Field(i).Equal(u.Field(i)) { |
| | return false |
| | } |
| | } |
| | return true |
| | case Func, Map, Slice: |
| | break |
| | } |
| | panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable") |
| | } |
| |
|
| | |
| | |
| | func convertOp(dst, src *abi.Type) func(Value, Type) Value { |
| | switch Kind(src.Kind()) { |
| | case Int, Int8, Int16, Int32, Int64: |
| | switch Kind(dst.Kind()) { |
| | case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return cvtInt |
| | case Float32, Float64: |
| | return cvtIntFloat |
| | case String: |
| | return cvtIntString |
| | } |
| |
|
| | case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | switch Kind(dst.Kind()) { |
| | case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return cvtUint |
| | case Float32, Float64: |
| | return cvtUintFloat |
| | case String: |
| | return cvtUintString |
| | } |
| |
|
| | case Float32, Float64: |
| | switch Kind(dst.Kind()) { |
| | case Int, Int8, Int16, Int32, Int64: |
| | return cvtFloatInt |
| | case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr: |
| | return cvtFloatUint |
| | case Float32, Float64: |
| | return cvtFloat |
| | } |
| |
|
| | case Complex64, Complex128: |
| | switch Kind(dst.Kind()) { |
| | case Complex64, Complex128: |
| | return cvtComplex |
| | } |
| |
|
| | case String: |
| | if dst.Kind() == abi.Slice && pkgPathFor(dst.Elem()) == "" { |
| | switch Kind(dst.Elem().Kind()) { |
| | case Uint8: |
| | return cvtStringBytes |
| | case Int32: |
| | return cvtStringRunes |
| | } |
| | } |
| |
|
| | case Slice: |
| | if dst.Kind() == abi.String && pkgPathFor(src.Elem()) == "" { |
| | switch Kind(src.Elem().Kind()) { |
| | case Uint8: |
| | return cvtBytesString |
| | case Int32: |
| | return cvtRunesString |
| | } |
| | } |
| | |
| | |
| | if dst.Kind() == abi.Pointer && dst.Elem().Kind() == abi.Array && src.Elem() == dst.Elem().Elem() { |
| | return cvtSliceArrayPtr |
| | } |
| | |
| | |
| | if dst.Kind() == abi.Array && src.Elem() == dst.Elem() { |
| | return cvtSliceArray |
| | } |
| |
|
| | case Chan: |
| | if dst.Kind() == abi.Chan && specialChannelAssignability(dst, src) { |
| | return cvtDirect |
| | } |
| | } |
| |
|
| | |
| | if haveIdenticalUnderlyingType(dst, src, false) { |
| | return cvtDirect |
| | } |
| |
|
| | |
| | if dst.Kind() == abi.Pointer && nameFor(dst) == "" && |
| | src.Kind() == abi.Pointer && nameFor(src) == "" && |
| | haveIdenticalUnderlyingType(elem(dst), elem(src), false) { |
| | return cvtDirect |
| | } |
| |
|
| | if implements(dst, src) { |
| | if src.Kind() == abi.Interface { |
| | return cvtI2I |
| | } |
| | return cvtT2I |
| | } |
| |
|
| | return nil |
| | } |
| |
|
| | |
| | |
| | func makeInt(f flag, bits uint64, t Type) Value { |
| | typ := t.common() |
| | ptr := unsafe_New(typ) |
| | switch typ.Size() { |
| | case 1: |
| | *(*uint8)(ptr) = uint8(bits) |
| | case 2: |
| | *(*uint16)(ptr) = uint16(bits) |
| | case 4: |
| | *(*uint32)(ptr) = uint32(bits) |
| | case 8: |
| | *(*uint64)(ptr) = bits |
| | } |
| | return Value{typ, ptr, f | flagIndir | flag(typ.Kind())} |
| | } |
| |
|
| | |
| | |
| | func makeFloat(f flag, v float64, t Type) Value { |
| | typ := t.common() |
| | ptr := unsafe_New(typ) |
| | switch typ.Size() { |
| | case 4: |
| | *(*float32)(ptr) = float32(v) |
| | case 8: |
| | *(*float64)(ptr) = v |
| | } |
| | return Value{typ, ptr, f | flagIndir | flag(typ.Kind())} |
| | } |
| |
|
| | |
| | func makeFloat32(f flag, v float32, t Type) Value { |
| | typ := t.common() |
| | ptr := unsafe_New(typ) |
| | *(*float32)(ptr) = v |
| | return Value{typ, ptr, f | flagIndir | flag(typ.Kind())} |
| | } |
| |
|
| | |
| | |
| | func makeComplex(f flag, v complex128, t Type) Value { |
| | typ := t.common() |
| | ptr := unsafe_New(typ) |
| | switch typ.Size() { |
| | case 8: |
| | *(*complex64)(ptr) = complex64(v) |
| | case 16: |
| | *(*complex128)(ptr) = v |
| | } |
| | return Value{typ, ptr, f | flagIndir | flag(typ.Kind())} |
| | } |
| |
|
| | func makeString(f flag, v string, t Type) Value { |
| | ret := New(t).Elem() |
| | ret.SetString(v) |
| | ret.flag = ret.flag&^flagAddr | f |
| | return ret |
| | } |
| |
|
| | func makeBytes(f flag, v []byte, t Type) Value { |
| | ret := New(t).Elem() |
| | ret.SetBytes(v) |
| | ret.flag = ret.flag&^flagAddr | f |
| | return ret |
| | } |
| |
|
| | func makeRunes(f flag, v []rune, t Type) Value { |
| | ret := New(t).Elem() |
| | ret.setRunes(v) |
| | ret.flag = ret.flag&^flagAddr | f |
| | return ret |
| | } |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | |
| | func cvtInt(v Value, t Type) Value { |
| | return makeInt(v.flag.ro(), uint64(v.Int()), t) |
| | } |
| |
|
| | |
| | func cvtUint(v Value, t Type) Value { |
| | return makeInt(v.flag.ro(), v.Uint(), t) |
| | } |
| |
|
| | |
| | func cvtFloatInt(v Value, t Type) Value { |
| | return makeInt(v.flag.ro(), uint64(int64(v.Float())), t) |
| | } |
| |
|
| | |
| | func cvtFloatUint(v Value, t Type) Value { |
| | return makeInt(v.flag.ro(), uint64(v.Float()), t) |
| | } |
| |
|
| | |
| | func cvtIntFloat(v Value, t Type) Value { |
| | return makeFloat(v.flag.ro(), float64(v.Int()), t) |
| | } |
| |
|
| | |
| | func cvtUintFloat(v Value, t Type) Value { |
| | return makeFloat(v.flag.ro(), float64(v.Uint()), t) |
| | } |
| |
|
| | |
| | func cvtFloat(v Value, t Type) Value { |
| | if v.Type().Kind() == Float32 && t.Kind() == Float32 { |
| | |
| | |
| | |
| | return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t) |
| | } |
| | return makeFloat(v.flag.ro(), v.Float(), t) |
| | } |
| |
|
| | |
| | func cvtComplex(v Value, t Type) Value { |
| | return makeComplex(v.flag.ro(), v.Complex(), t) |
| | } |
| |
|
| | |
| | func cvtIntString(v Value, t Type) Value { |
| | s := "\uFFFD" |
| | if x := v.Int(); int64(rune(x)) == x { |
| | s = string(rune(x)) |
| | } |
| | return makeString(v.flag.ro(), s, t) |
| | } |
| |
|
| | |
| | func cvtUintString(v Value, t Type) Value { |
| | s := "\uFFFD" |
| | if x := v.Uint(); uint64(rune(x)) == x { |
| | s = string(rune(x)) |
| | } |
| | return makeString(v.flag.ro(), s, t) |
| | } |
| |
|
| | |
| | func cvtBytesString(v Value, t Type) Value { |
| | return makeString(v.flag.ro(), string(v.Bytes()), t) |
| | } |
| |
|
| | |
| | func cvtStringBytes(v Value, t Type) Value { |
| | return makeBytes(v.flag.ro(), []byte(v.String()), t) |
| | } |
| |
|
| | |
| | func cvtRunesString(v Value, t Type) Value { |
| | return makeString(v.flag.ro(), string(v.runes()), t) |
| | } |
| |
|
| | |
| | func cvtStringRunes(v Value, t Type) Value { |
| | return makeRunes(v.flag.ro(), []rune(v.String()), t) |
| | } |
| |
|
| | |
| | func cvtSliceArrayPtr(v Value, t Type) Value { |
| | n := t.Elem().Len() |
| | if n > v.Len() { |
| | panic("reflect: cannot convert slice with length " + strconv.Itoa(v.Len()) + " to pointer to array with length " + strconv.Itoa(n)) |
| | } |
| | h := (*unsafeheader.Slice)(v.ptr) |
| | return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)} |
| | } |
| |
|
| | |
| | func cvtSliceArray(v Value, t Type) Value { |
| | n := t.Len() |
| | if n > v.Len() { |
| | panic("reflect: cannot convert slice with length " + strconv.Itoa(v.Len()) + " to array with length " + strconv.Itoa(n)) |
| | } |
| | h := (*unsafeheader.Slice)(v.ptr) |
| | typ := t.common() |
| | ptr := h.Data |
| | c := unsafe_New(typ) |
| | typedmemmove(typ, c, ptr) |
| | ptr = c |
| |
|
| | return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)} |
| | } |
| |
|
| | |
| | func cvtDirect(v Value, typ Type) Value { |
| | f := v.flag |
| | t := typ.common() |
| | ptr := v.ptr |
| | if f&flagAddr != 0 { |
| | |
| | c := unsafe_New(t) |
| | typedmemmove(t, c, ptr) |
| | ptr = c |
| | f &^= flagAddr |
| | } |
| | return Value{t, ptr, v.flag.ro() | f} |
| | } |
| |
|
| | |
| | func cvtT2I(v Value, typ Type) Value { |
| | target := unsafe_New(typ.common()) |
| | x := valueInterface(v, false) |
| | if typ.NumMethod() == 0 { |
| | *(*any)(target) = x |
| | } else { |
| | ifaceE2I(typ.common(), x, target) |
| | } |
| | return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)} |
| | } |
| |
|
| | |
| | func cvtI2I(v Value, typ Type) Value { |
| | if v.IsNil() { |
| | ret := Zero(typ) |
| | ret.flag |= v.flag.ro() |
| | return ret |
| | } |
| | return cvtT2I(v.Elem(), typ) |
| | } |
| |
|
| | |
| | |
| | |
| | func chancap(ch unsafe.Pointer) int |
| |
|
| | |
| | func chanclose(ch unsafe.Pointer) |
| |
|
| | |
| | func chanlen(ch unsafe.Pointer) int |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool) |
| |
|
| | |
| | func chansend0(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool |
| |
|
| | func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool { |
| | contentEscapes(val) |
| | return chansend0(ch, val, nb) |
| | } |
| |
|
| | func makechan(typ *abi.Type, size int) (ch unsafe.Pointer) |
| | func makemap(t *abi.Type, cap int) (m unsafe.Pointer) |
| |
|
| | |
| | func mapaccess(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer) |
| |
|
| | |
| | func mapaccess_faststr(t *abi.Type, m unsafe.Pointer, key string) (val unsafe.Pointer) |
| |
|
| | |
| | func mapassign0(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func mapassign(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) { |
| | contentEscapes(key) |
| | contentEscapes(val) |
| | mapassign0(t, m, key, val) |
| | } |
| |
|
| | |
| | func mapassign_faststr0(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) |
| |
|
| | func mapassign_faststr(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) { |
| | contentEscapes((*unsafeheader.String)(unsafe.Pointer(&key)).Data) |
| | contentEscapes(val) |
| | mapassign_faststr0(t, m, key, val) |
| | } |
| |
|
| | |
| | func mapdelete(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) |
| |
|
| | |
| | func mapdelete_faststr(t *abi.Type, m unsafe.Pointer, key string) |
| |
|
| | |
| | func maplen(m unsafe.Pointer) int |
| |
|
| | func mapclear(t *abi.Type, m unsafe.Pointer) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | func call(stackArgsType *abi.Type, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs) |
| |
|
| | func ifaceE2I(t *abi.Type, src any, dst unsafe.Pointer) |
| |
|
| | |
| | |
| | |
| | func memmove(dst, src unsafe.Pointer, size uintptr) |
| |
|
| | |
| | |
| | |
| | func typedmemmove(t *abi.Type, dst, src unsafe.Pointer) |
| |
|
| | |
| | |
| | |
| | func typedmemclr(t *abi.Type, ptr unsafe.Pointer) |
| |
|
| | |
| | |
| | |
| | |
| | func typedmemclrpartial(t *abi.Type, ptr unsafe.Pointer, off, size uintptr) |
| |
|
| | |
| | |
| | |
| | |
| | func typedslicecopy(t *abi.Type, dst, src unsafeheader.Slice) int |
| |
|
| | |
| | |
| | |
| | |
| | func typedarrayclear(elemType *abi.Type, ptr unsafe.Pointer, len int) |
| |
|
| | |
| | func typehash(t *abi.Type, p unsafe.Pointer, h uintptr) uintptr |
| |
|
| | func verifyNotInHeapPtr(p uintptr) bool |
| |
|
| | |
| | func growslice(t *abi.Type, old unsafeheader.Slice, num int) unsafeheader.Slice |
| |
|
| | |
| | func unsafeslice(t *abi.Type, ptr unsafe.Pointer, len int) |
| |
|
| | |
| | |
| | |
| | func escapes(x any) { |
| | if dummy.b { |
| | dummy.x = x |
| | } |
| | } |
| |
|
| | var dummy struct { |
| | b bool |
| | x any |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | func contentEscapes(x unsafe.Pointer) { |
| | if dummy.b { |
| | escapes(*(*any)(x)) |
| | } |
| | } |
| |
|