| |
| |
| |
|
|
| package walk |
|
|
| import ( |
| "go/constant" |
| "internal/abi" |
|
|
| "cmd/compile/internal/base" |
| "cmd/compile/internal/ir" |
| "cmd/compile/internal/reflectdata" |
| "cmd/compile/internal/typecheck" |
| "cmd/compile/internal/types" |
| "cmd/internal/src" |
| ) |
|
|
| |
| func walkAssign(init *ir.Nodes, n ir.Node) ir.Node { |
| init.Append(ir.TakeInit(n)...) |
|
|
| var left, right ir.Node |
| switch n.Op() { |
| case ir.OAS: |
| n := n.(*ir.AssignStmt) |
| left, right = n.X, n.Y |
| case ir.OASOP: |
| n := n.(*ir.AssignOpStmt) |
| left, right = n.X, n.Y |
| } |
|
|
| |
| |
| var mapAppend *ir.CallExpr |
| if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND { |
| left := left.(*ir.IndexExpr) |
| mapAppend = right.(*ir.CallExpr) |
| if !ir.SameSafeExpr(left, mapAppend.Args[0]) { |
| base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0]) |
| } |
| } |
|
|
| left = walkExpr(left, init) |
| left = safeExpr(left, init) |
| if mapAppend != nil { |
| mapAppend.Args[0] = left |
| } |
|
|
| if n.Op() == ir.OASOP { |
| |
| n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right))) |
| } else { |
| n.(*ir.AssignStmt).X = left |
| } |
| as := n.(*ir.AssignStmt) |
|
|
| if oaslit(as, init) { |
| return ir.NewBlockStmt(as.Pos(), nil) |
| } |
|
|
| if as.Y == nil { |
| |
| return as |
| } |
|
|
| if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) { |
| return as |
| } |
|
|
| switch as.Y.Op() { |
| default: |
| as.Y = walkExpr(as.Y, init) |
|
|
| case ir.ORECV: |
| |
| |
| recv := as.Y.(*ir.UnaryExpr) |
| recv.X = walkExpr(recv.X, init) |
|
|
| n1 := typecheck.NodAddr(as.X) |
| r := recv.X |
| return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1) |
|
|
| case ir.OAPPEND: |
| |
| call := as.Y.(*ir.CallExpr) |
| if call.Type().Elem().NotInHeap() { |
| base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem()) |
| } |
| var r ir.Node |
| switch { |
| case isAppendOfMake(call): |
| |
| r = extendSlice(call, init) |
| case call.IsDDD: |
| r = appendSlice(call, init) |
| default: |
| r = walkAppend(call, init, as) |
| } |
| as.Y = r |
| if r.Op() == ir.OAPPEND { |
| r := r.(*ir.CallExpr) |
| |
| |
| |
| r.Fun = reflectdata.AppendElemRType(base.Pos, r) |
| return as |
| } |
| |
| |
| } |
|
|
| if as.X != nil && as.Y != nil { |
| return convas(as, init) |
| } |
| return as |
| } |
|
|
| |
| func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node { |
| walkExprListSafe(n.Lhs, init) |
| n.Rhs[0] = walkExpr(n.Rhs[0], init) |
| return n |
| } |
|
|
| |
| func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { |
| init.Append(ir.TakeInit(n)...) |
|
|
| r := n.Rhs[0] |
| walkExprListSafe(n.Lhs, init) |
| r = walkExpr(r, init) |
|
|
| if ir.IsIntrinsicCall(r.(*ir.CallExpr)) { |
| n.Rhs = []ir.Node{r} |
| return n |
| } |
| init.Append(r) |
|
|
| ll := ascompatet(n.Lhs, r.Type()) |
| return ir.NewBlockStmt(src.NoXPos, ll) |
| } |
|
|
| |
| func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { |
| init.Append(ir.TakeInit(n)...) |
| return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs)) |
| } |
|
|
| |
| func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { |
| init.Append(ir.TakeInit(n)...) |
|
|
| r := n.Rhs[0].(*ir.IndexExpr) |
| walkExprListSafe(n.Lhs, init) |
| r.X = walkExpr(r.X, init) |
| r.Index = walkExpr(r.Index, init) |
| t := r.X.Type() |
|
|
| fast := mapfast(t) |
| key := mapKeyArg(fast, r, r.Index, false) |
|
|
| |
| |
| |
| |
| |
| a := n.Lhs[0] |
|
|
| var call *ir.CallExpr |
| if w := t.Elem().Size(); w <= abi.ZeroValSize { |
| fn := mapfn(mapaccess2[fast], t, false) |
| call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key) |
| } else { |
| fn := mapfn("mapaccess2_fat", t, true) |
| z := reflectdata.ZeroAddr(w) |
| call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key, z) |
| } |
|
|
| |
| |
| |
| if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() { |
| call.Type().Field(1).Type = ok.Type() |
| } |
| n.Rhs = []ir.Node{call} |
| n.SetOp(ir.OAS2FUNC) |
|
|
| |
| if ir.IsBlank(a) { |
| return walkExpr(typecheck.Stmt(n), init) |
| } |
|
|
| var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem())) |
| var_.SetTypecheck(1) |
| var_.MarkNonNil() |
|
|
| n.Lhs[0] = var_ |
| init.Append(walkExpr(n, init)) |
|
|
| as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_)) |
| return walkExpr(typecheck.Stmt(as), init) |
| } |
|
|
| |
| func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { |
| init.Append(ir.TakeInit(n)...) |
|
|
| r := n.Rhs[0].(*ir.UnaryExpr) |
| walkExprListSafe(n.Lhs, init) |
| r.X = walkExpr(r.X, init) |
| var n1 ir.Node |
| if ir.IsBlank(n.Lhs[0]) { |
| n1 = typecheck.NodNil() |
| } else { |
| n1 = typecheck.NodAddr(n.Lhs[0]) |
| } |
| fn := chanfn("chanrecv2", 2, r.X.Type()) |
| ok := n.Lhs[1] |
| call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1) |
| return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call)) |
| } |
|
|
| |
| func walkReturn(n *ir.ReturnStmt) ir.Node { |
| fn := ir.CurFunc |
|
|
| fn.NumReturns++ |
| if len(n.Results) == 0 { |
| return n |
| } |
|
|
| results := fn.Type().Results() |
| dsts := make([]ir.Node, len(results)) |
| for i, v := range results { |
| |
| dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name)) |
| } |
|
|
| n.Results = ascompatee(n.Op(), dsts, n.Results) |
| return n |
| } |
|
|
| |
| |
| |
| |
| func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node { |
| if len(nl) != nr.NumFields() { |
| base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields()) |
| } |
|
|
| var nn ir.Nodes |
| for i, l := range nl { |
| if ir.IsBlank(l) { |
| continue |
| } |
| r := nr.Field(i) |
|
|
| |
| |
| if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) { |
| base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l) |
| } |
|
|
| res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH) |
| res.Index = int64(i) |
| res.SetType(r.Type) |
| res.SetTypecheck(1) |
|
|
| nn.Append(ir.NewAssignStmt(base.Pos, l, res)) |
| } |
| return nn |
| } |
|
|
| |
| |
| |
| |
| func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node { |
| |
| if len(nl) != len(nr) { |
| base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr)) |
| } |
|
|
| var assigned ir.NameSet |
| var memWrite, deferResultWrite bool |
|
|
| |
| |
| affected := func(n ir.Node) bool { |
| if deferResultWrite { |
| return true |
| } |
| return ir.Any(n, func(n ir.Node) bool { |
| if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) { |
| return true |
| } |
| if memWrite && readsMemory(n) { |
| return true |
| } |
| return false |
| }) |
| } |
|
|
| |
| |
| |
| var early ir.Nodes |
| save := func(np *ir.Node) { |
| if n := *np; affected(n) { |
| *np = copyExpr(n, n.Type(), &early) |
| } |
| } |
|
|
| var late ir.Nodes |
| for i, lorig := range nl { |
| l, r := lorig, nr[i] |
|
|
| |
| if op == ir.ORETURN && ir.SameSafeExpr(l, r) { |
| continue |
| } |
|
|
| |
| |
| for { |
| |
| |
| |
| init := ir.TakeInit(l) |
| walkStmtList(init) |
| early.Append(init...) |
|
|
| switch ll := l.(type) { |
| case *ir.IndexExpr: |
| if ll.X.Type().IsArray() { |
| save(&ll.Index) |
| l = ll.X |
| continue |
| } |
| case *ir.ParenExpr: |
| l = ll.X |
| continue |
| case *ir.SelectorExpr: |
| if ll.Op() == ir.ODOT { |
| l = ll.X |
| continue |
| } |
| } |
| break |
| } |
|
|
| var name *ir.Name |
| switch l.Op() { |
| default: |
| base.Fatalf("unexpected lvalue %v", l.Op()) |
| case ir.ONAME: |
| name = l.(*ir.Name) |
| case ir.OINDEX, ir.OINDEXMAP: |
| l := l.(*ir.IndexExpr) |
| save(&l.X) |
| save(&l.Index) |
| case ir.ODEREF: |
| l := l.(*ir.StarExpr) |
| save(&l.X) |
| case ir.ODOTPTR: |
| l := l.(*ir.SelectorExpr) |
| save(&l.X) |
| } |
|
|
| |
| save(&r) |
|
|
| appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late)) |
|
|
| |
| |
|
|
| if name == nil { |
| |
| |
| memWrite = true |
| continue |
| } |
|
|
| if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() { |
| |
| |
| |
| deferResultWrite = true |
| continue |
| } |
|
|
| if ir.IsBlank(name) { |
| |
| |
| continue |
| } |
|
|
| if name.Addrtaken() || !name.OnStack() { |
| |
| |
| memWrite = true |
| continue |
| } |
|
|
| |
| |
| assigned.Add(name) |
| } |
|
|
| early.Append(late.Take()...) |
| return early |
| } |
|
|
| |
| |
| func readsMemory(n ir.Node) bool { |
| switch n.Op() { |
| case ir.ONAME: |
| n := n.(*ir.Name) |
| if n.Class == ir.PFUNC { |
| return false |
| } |
| return n.Addrtaken() || !n.OnStack() |
|
|
| case ir.OADD, |
| ir.OAND, |
| ir.OANDAND, |
| ir.OANDNOT, |
| ir.OBITNOT, |
| ir.OCONV, |
| ir.OCONVIFACE, |
| ir.OCONVNOP, |
| ir.ODIV, |
| ir.ODOT, |
| ir.ODOTTYPE, |
| ir.OLITERAL, |
| ir.OLSH, |
| ir.OMOD, |
| ir.OMUL, |
| ir.ONEG, |
| ir.ONIL, |
| ir.OOR, |
| ir.OOROR, |
| ir.OPAREN, |
| ir.OPLUS, |
| ir.ORSH, |
| ir.OSUB, |
| ir.OXOR: |
| return false |
| } |
|
|
| |
| return true |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { |
| walkAppendArgs(n, init) |
|
|
| l1 := n.Args[0] |
| l2 := n.Args[1] |
| l2 = cheapExpr(l2, init) |
| n.Args[1] = l2 |
|
|
| var nodes ir.Nodes |
|
|
| |
| s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type()) |
| nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) |
|
|
| elemtype := s.Type().Elem() |
|
|
| |
| oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s) |
| oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s) |
| oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s) |
|
|
| |
| num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2) |
|
|
| |
| newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT]) |
| nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num))) |
|
|
| |
| nif := ir.NewIfStmt(base.Pos, nil, nil, nil) |
| nuint := typecheck.Conv(newLen, types.Types[types.TUINT]) |
| scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT]) |
| nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint) |
| nif.Likely = true |
|
|
| |
| slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil) |
| slice.SetBounded(true) |
| nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)} |
|
|
| |
| call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num) |
| nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)} |
|
|
| nodes.Append(nif) |
|
|
| |
| |
| |
| |
| |
| |
| idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)) |
|
|
| var ncopy ir.Node |
| if elemtype.HasPointers() { |
| |
| slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil) |
| slice.SetType(s.Type()) |
| slice.SetBounded(true) |
|
|
| ir.CurFunc.SetWBPos(n.Pos()) |
|
|
| |
| fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem()) |
| ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes)) |
| ptr2, len2 := backingArrayPtrLen(l2) |
| ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2) |
| } else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime { |
| |
| |
| |
| slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil) |
| slice.SetType(s.Type()) |
| slice.SetBounded(true) |
|
|
| ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes)) |
| ptr2, len2 := backingArrayPtrLen(l2) |
|
|
| fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem()) |
| ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size())) |
| } else { |
| |
| ix := ir.NewIndexExpr(base.Pos, s, idx) |
| ix.SetBounded(true) |
| addr := typecheck.NodAddr(ix) |
|
|
| sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2) |
|
|
| nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes) |
| nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size())) |
|
|
| |
| fn := typecheck.LookupRuntime("memmove", elemtype, elemtype) |
| ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid) |
| } |
| ln := append(nodes, ncopy) |
|
|
| typecheck.Stmts(ln) |
| walkStmtList(ln) |
| init.Append(ln...) |
| return s |
| } |
|
|
| |
| |
| func isAppendOfMake(n ir.Node) bool { |
| if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting { |
| return false |
| } |
|
|
| if n.Typecheck() == 0 { |
| base.Fatalf("missing typecheck: %+v", n) |
| } |
|
|
| if n.Op() != ir.OAPPEND { |
| return false |
| } |
| call := n.(*ir.CallExpr) |
| if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE { |
| return false |
| } |
|
|
| mk := call.Args[1].(*ir.MakeExpr) |
| if mk.Cap != nil { |
| return false |
| } |
|
|
| |
| |
|
|
| |
|
|
| |
| y := mk.Len |
| if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() { |
| return false |
| } |
|
|
| return true |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { |
| |
| |
| |
| l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT]) |
| l2 = typecheck.Expr(l2) |
| n.Args[1] = l2 |
|
|
| walkAppendArgs(n, init) |
|
|
| l1 := n.Args[0] |
| l2 = n.Args[1] |
|
|
| var nodes []ir.Node |
|
|
| |
| nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil) |
| nifneg.Likely = true |
|
|
| |
| nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)} |
| nodes = append(nodes, nifneg) |
|
|
| |
| s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type()) |
| nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1)) |
|
|
| |
| |
| |
| nifnz := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, l2, ir.NewInt(base.Pos, 0)), nil, nil) |
| nifnz.Likely = true |
| nodes = append(nodes, nifnz) |
|
|
| elemtype := s.Type().Elem() |
|
|
| |
| nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT]) |
| nifnz.Body = append(nifnz.Body, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))) |
|
|
| |
| nuint := typecheck.Conv(nn, types.Types[types.TUINT]) |
| capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]) |
| nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil) |
| nif.Likely = true |
|
|
| |
| nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil) |
| nt.SetBounded(true) |
| nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)} |
|
|
| |
| nif.Else = []ir.Node{ |
| ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(), |
| ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), |
| nn, |
| ir.NewUnaryExpr(base.Pos, ir.OCAP, s), |
| l2)), |
| } |
|
|
| nifnz.Body = append(nifnz.Body, nif) |
|
|
| |
| |
| ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)) |
| ix.SetBounded(true) |
| hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR]) |
|
|
| |
| hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR]) |
|
|
| clrname := "memclrNoHeapPointers" |
| hasPointers := elemtype.HasPointers() |
| if hasPointers { |
| clrname = "memclrHasPointers" |
| ir.CurFunc.SetWBPos(n.Pos()) |
| } |
|
|
| var clr ir.Nodes |
| clrfn := mkcall(clrname, nil, &clr, hp, hn) |
| clr.Append(clrfn) |
| if hasPointers { |
| |
| |
| nif.Body = append(nif.Body, clr...) |
| } else { |
| nifnz.Body = append(nifnz.Body, clr...) |
| } |
|
|
| typecheck.Stmts(nodes) |
| walkStmtList(nodes) |
| init.Append(nodes...) |
| return s |
| } |
|
|