| |
| |
| |
|
|
| package walk |
|
|
| import ( |
| "encoding/binary" |
| "fmt" |
| "go/constant" |
| "hash/fnv" |
| "io" |
|
|
| "cmd/compile/internal/base" |
| "cmd/compile/internal/compare" |
| "cmd/compile/internal/ir" |
| "cmd/compile/internal/reflectdata" |
| "cmd/compile/internal/ssagen" |
| "cmd/compile/internal/typecheck" |
| "cmd/compile/internal/types" |
| ) |
|
|
| func fakePC(n ir.Node) ir.Node { |
| |
| |
| hash := fnv.New32() |
| |
| io.WriteString(hash, base.Ctxt.Pkgpath) |
| io.WriteString(hash, base.Ctxt.PosTable.Pos(n.Pos()).AbsFilename()) |
| binary.Write(hash, binary.LittleEndian, int64(n.Pos().Line())) |
| binary.Write(hash, binary.LittleEndian, int64(n.Pos().Col())) |
| |
| |
| io.WriteString(hash, fmt.Sprintf("%v", n)) |
|
|
| return ir.NewInt(base.Pos, int64(hash.Sum32())) |
| } |
|
|
| |
| |
| |
| func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { |
| if n.X.Type().IsInterface() && n.Y.Type().IsInterface() && n.X.Op() != ir.ONIL && n.Y.Op() != ir.ONIL { |
| return walkCompareInterface(n, init) |
| } |
|
|
| if n.X.Type().IsString() && n.Y.Type().IsString() { |
| return walkCompareString(n, init) |
| } |
|
|
| n.X = walkExpr(n.X, init) |
| n.Y = walkExpr(n.Y, init) |
|
|
| |
| |
| |
| |
| |
| |
| |
| if n.X.Type().IsInterface() != n.Y.Type().IsInterface() { |
| |
| l := cheapExpr(n.X, init) |
| r := cheapExpr(n.Y, init) |
| |
| if n.Y.Type().IsInterface() { |
| l, r = r, l |
| } |
|
|
| |
| eq := n.Op() |
| andor := ir.OOROR |
| if eq == ir.OEQ { |
| andor = ir.OANDAND |
| } |
| |
| |
| |
| |
| |
| |
| |
| |
| var eqtype ir.Node |
| tab := ir.NewUnaryExpr(base.Pos, ir.OITAB, l) |
| rtyp := reflectdata.CompareRType(base.Pos, n) |
| if l.Type().IsEmptyInterface() { |
| tab.SetType(types.NewPtr(types.Types[types.TUINT8])) |
| tab.SetTypecheck(1) |
| eqtype = ir.NewBinaryExpr(base.Pos, eq, tab, rtyp) |
| } else { |
| nonnil := ir.NewBinaryExpr(base.Pos, brcom(eq), typecheck.NodNil(), tab) |
| match := ir.NewBinaryExpr(base.Pos, eq, itabType(tab), rtyp) |
| eqtype = ir.NewLogicalExpr(base.Pos, andor, nonnil, match) |
| } |
| |
| eqdata := ir.NewBinaryExpr(base.Pos, eq, ifaceData(n.Pos(), l, r.Type()), r) |
| |
| expr := ir.NewLogicalExpr(base.Pos, andor, eqtype, eqdata) |
| return finishCompare(n, expr, init) |
| } |
|
|
| |
| |
| |
| |
| t := n.X.Type() |
| var inline bool |
|
|
| maxcmpsize := int64(4) |
| unalignedLoad := ssagen.Arch.LinkArch.CanMergeLoads |
| if unalignedLoad { |
| |
| maxcmpsize = 2 * int64(ssagen.Arch.LinkArch.RegSize) |
| } |
|
|
| switch t.Kind() { |
| default: |
| if base.Debug.Libfuzzer != 0 && t.IsInteger() && (n.X.Name() == nil || !n.X.Name().Libfuzzer8BitCounter()) { |
| n.X = cheapExpr(n.X, init) |
| n.Y = cheapExpr(n.Y, init) |
|
|
| |
| |
| |
| |
| l, r := n.X, n.Y |
| if r.Op() == ir.OLITERAL { |
| l, r = r, l |
| } |
| constcmp := l.Op() == ir.OLITERAL && r.Op() != ir.OLITERAL |
|
|
| var fn string |
| var paramType *types.Type |
| switch t.Size() { |
| case 1: |
| fn = "libfuzzerTraceCmp1" |
| if constcmp { |
| fn = "libfuzzerTraceConstCmp1" |
| } |
| paramType = types.Types[types.TUINT8] |
| case 2: |
| fn = "libfuzzerTraceCmp2" |
| if constcmp { |
| fn = "libfuzzerTraceConstCmp2" |
| } |
| paramType = types.Types[types.TUINT16] |
| case 4: |
| fn = "libfuzzerTraceCmp4" |
| if constcmp { |
| fn = "libfuzzerTraceConstCmp4" |
| } |
| paramType = types.Types[types.TUINT32] |
| case 8: |
| fn = "libfuzzerTraceCmp8" |
| if constcmp { |
| fn = "libfuzzerTraceConstCmp8" |
| } |
| paramType = types.Types[types.TUINT64] |
| default: |
| base.Fatalf("unexpected integer size %d for %v", t.Size(), t) |
| } |
| init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), fakePC(n))) |
| } |
| return n |
| case types.TARRAY: |
| |
| inline = t.NumElem() <= 1 || (types.IsSimple[t.Elem().Kind()] && (t.NumElem() <= 4 || t.Elem().Size()*t.NumElem() <= maxcmpsize)) |
| case types.TSTRUCT: |
| inline = compare.EqStructCost(t) <= 4 |
| } |
|
|
| cmpl := n.X |
| for cmpl != nil && cmpl.Op() == ir.OCONVNOP { |
| cmpl = cmpl.(*ir.ConvExpr).X |
| } |
| cmpr := n.Y |
| for cmpr != nil && cmpr.Op() == ir.OCONVNOP { |
| cmpr = cmpr.(*ir.ConvExpr).X |
| } |
|
|
| |
| if !inline { |
| |
| if !ir.IsAddressable(cmpl) || !ir.IsAddressable(cmpr) { |
| base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) |
| } |
|
|
| |
| |
| |
| |
| fn, needsLength := reflectdata.EqFor(t) |
| call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil) |
| addrCmpl := typecheck.NodAddr(cmpl) |
| addrCmpR := typecheck.NodAddr(cmpr) |
| if !types.IsNoRacePkg(types.LocalPkg) && base.Flag.Race { |
| ptrL := typecheck.Conv(typecheck.Conv(addrCmpl, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR]) |
| ptrR := typecheck.Conv(typecheck.Conv(addrCmpR, types.Types[types.TUNSAFEPTR]), types.Types[types.TUINTPTR]) |
| raceFn := typecheck.LookupRuntime("racereadrange") |
| size := ir.NewInt(base.Pos, t.Size()) |
| call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrL, size)) |
| call.PtrInit().Append(mkcall1(raceFn, nil, init, ptrR, size)) |
| } |
| call.Args.Append(addrCmpl) |
| call.Args.Append(addrCmpR) |
| if needsLength { |
| call.Args.Append(ir.NewInt(base.Pos, t.Size())) |
| } |
| res := ir.Node(call) |
| if n.Op() != ir.OEQ { |
| res = ir.NewUnaryExpr(base.Pos, ir.ONOT, res) |
| } |
| return finishCompare(n, res, init) |
| } |
|
|
| |
| andor := ir.OANDAND |
| if n.Op() == ir.ONE { |
| andor = ir.OOROR |
| } |
| var expr ir.Node |
| comp := func(el, er ir.Node) { |
| a := ir.NewBinaryExpr(base.Pos, n.Op(), el, er) |
| if expr == nil { |
| expr = a |
| } else { |
| expr = ir.NewLogicalExpr(base.Pos, andor, expr, a) |
| } |
| } |
| and := func(cond ir.Node) { |
| if expr == nil { |
| expr = cond |
| } else { |
| expr = ir.NewLogicalExpr(base.Pos, andor, expr, cond) |
| } |
| } |
| cmpl = safeExpr(cmpl, init) |
| cmpr = safeExpr(cmpr, init) |
| if t.IsStruct() { |
| conds, _ := compare.EqStruct(t, cmpl, cmpr) |
| if n.Op() == ir.OEQ { |
| for _, cond := range conds { |
| and(cond) |
| } |
| } else { |
| for _, cond := range conds { |
| notCond := ir.NewUnaryExpr(base.Pos, ir.ONOT, cond) |
| and(notCond) |
| } |
| } |
| } else { |
| step := int64(1) |
| remains := t.NumElem() * t.Elem().Size() |
| combine64bit := unalignedLoad && types.RegSize == 8 && t.Elem().Size() <= 4 && t.Elem().IsInteger() |
| combine32bit := unalignedLoad && t.Elem().Size() <= 2 && t.Elem().IsInteger() |
| combine16bit := unalignedLoad && t.Elem().Size() == 1 && t.Elem().IsInteger() |
| for i := int64(0); remains > 0; { |
| var convType *types.Type |
| switch { |
| case remains >= 8 && combine64bit: |
| convType = types.Types[types.TINT64] |
| step = 8 / t.Elem().Size() |
| case remains >= 4 && combine32bit: |
| convType = types.Types[types.TUINT32] |
| step = 4 / t.Elem().Size() |
| case remains >= 2 && combine16bit: |
| convType = types.Types[types.TUINT16] |
| step = 2 / t.Elem().Size() |
| default: |
| step = 1 |
| } |
| if step == 1 { |
| comp( |
| ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)), |
| ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)), |
| ) |
| i++ |
| remains -= t.Elem().Size() |
| } else { |
| elemType := t.Elem().ToUnsigned() |
| cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i))) |
| cmplw = typecheck.Conv(cmplw, elemType) |
| cmplw = typecheck.Conv(cmplw, convType) |
| cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i))) |
| cmprw = typecheck.Conv(cmprw, elemType) |
| cmprw = typecheck.Conv(cmprw, convType) |
| |
| |
| for offset := int64(1); offset < step; offset++ { |
| lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i+offset))) |
| lb = typecheck.Conv(lb, elemType) |
| lb = typecheck.Conv(lb, convType) |
| lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset)) |
| cmplw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmplw, lb) |
| rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i+offset))) |
| rb = typecheck.Conv(rb, elemType) |
| rb = typecheck.Conv(rb, convType) |
| rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset)) |
| cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb) |
| } |
| comp(cmplw, cmprw) |
| i += step |
| remains -= step * t.Elem().Size() |
| } |
| } |
| } |
| if expr == nil { |
| expr = ir.NewBool(base.Pos, n.Op() == ir.OEQ) |
| |
| |
| a1 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpl)) |
| a2 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpr)) |
| init.Append(a1, a2) |
| } |
| return finishCompare(n, expr, init) |
| } |
|
|
| func walkCompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { |
| swap := n.X.Op() != ir.OCONVIFACE && n.Y.Op() == ir.OCONVIFACE |
| n.Y = cheapExpr(n.Y, init) |
| n.X = cheapExpr(n.X, init) |
| if swap { |
| |
| |
| |
| |
| n.X, n.Y = n.Y, n.X |
| } |
|
|
| eqtab, eqdata := compare.EqInterface(n.X, n.Y) |
| var cmp ir.Node |
| if n.Op() == ir.OEQ { |
| cmp = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqtab, eqdata) |
| } else { |
| eqtab.SetOp(ir.ONE) |
| cmp = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqtab, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqdata)) |
| } |
| return finishCompare(n, cmp, init) |
| } |
|
|
| func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { |
| if base.Debug.Libfuzzer != 0 { |
| if !ir.IsConst(n.X, constant.String) || !ir.IsConst(n.Y, constant.String) { |
| fn := "libfuzzerHookStrCmp" |
| n.X = cheapExpr(n.X, init) |
| n.Y = cheapExpr(n.Y, init) |
| paramType := types.Types[types.TSTRING] |
| init.Append(mkcall(fn, nil, init, tracecmpArg(n.X, paramType, init), tracecmpArg(n.Y, paramType, init), fakePC(n))) |
| } |
| } |
| |
| var cs, ncs ir.Node |
| switch { |
| case ir.IsConst(n.X, constant.String) && ir.IsConst(n.Y, constant.String): |
| |
| case ir.IsConst(n.X, constant.String): |
| cs = n.X |
| ncs = n.Y |
| case ir.IsConst(n.Y, constant.String): |
| cs = n.Y |
| ncs = n.X |
| } |
| if cs != nil { |
| cmp := n.Op() |
| |
| |
| |
| if ir.IsConst(n.X, constant.String) { |
| cmp = brrev(cmp) |
| } |
|
|
| |
| |
| |
| |
| maxRewriteLen := 6 |
| |
| |
| canCombineLoads := ssagen.Arch.LinkArch.CanMergeLoads |
| combine64bit := false |
| if canCombineLoads { |
| |
| maxRewriteLen = 2 * ssagen.Arch.LinkArch.RegSize |
| combine64bit = ssagen.Arch.LinkArch.RegSize >= 8 |
| } |
|
|
| var and ir.Op |
| switch cmp { |
| case ir.OEQ: |
| and = ir.OANDAND |
| case ir.ONE: |
| and = ir.OOROR |
| default: |
| |
| |
| |
| maxRewriteLen = 0 |
| } |
| if s := ir.StringVal(cs); len(s) <= maxRewriteLen { |
| if len(s) > 0 { |
| ncs = safeExpr(ncs, init) |
| } |
| r := ir.Node(ir.NewBinaryExpr(base.Pos, cmp, ir.NewUnaryExpr(base.Pos, ir.OLEN, ncs), ir.NewInt(base.Pos, int64(len(s))))) |
| remains := len(s) |
| for i := 0; remains > 0; { |
| if remains == 1 || !canCombineLoads { |
| cb := ir.NewInt(base.Pos, int64(s[i])) |
| ncb := ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))) |
| r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, ncb, cb)) |
| remains-- |
| i++ |
| continue |
| } |
| var step int |
| var convType *types.Type |
| switch { |
| case remains >= 8 && combine64bit: |
| convType = types.Types[types.TINT64] |
| step = 8 |
| case remains >= 4: |
| convType = types.Types[types.TUINT32] |
| step = 4 |
| case remains >= 2: |
| convType = types.Types[types.TUINT16] |
| step = 2 |
| } |
| ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))), convType) |
| csubstr := int64(s[i]) |
| |
| |
| |
| for offset := 1; offset < step; offset++ { |
| b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i+offset))), convType) |
| b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(base.Pos, int64(8*offset))) |
| ncsubstr = ir.NewBinaryExpr(base.Pos, ir.OOR, ncsubstr, b) |
| csubstr |= int64(s[i+offset]) << uint8(8*offset) |
| } |
| csubstrPart := ir.NewInt(base.Pos, csubstr) |
| |
| r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, csubstrPart, ncsubstr)) |
| remains -= step |
| i += step |
| } |
| return finishCompare(n, r, init) |
| } |
| } |
|
|
| var r ir.Node |
| if n.Op() == ir.OEQ || n.Op() == ir.ONE { |
| |
| n.X = cheapExpr(n.X, init) |
| n.Y = cheapExpr(n.Y, init) |
| eqlen, eqmem := compare.EqString(n.X, n.Y) |
| |
| |
| if n.Op() == ir.OEQ { |
| |
| r = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqlen, eqmem) |
| } else { |
| |
| eqlen.SetOp(ir.ONE) |
| r = ir.NewLogicalExpr(base.Pos, ir.OOROR, eqlen, ir.NewUnaryExpr(base.Pos, ir.ONOT, eqmem)) |
| } |
| } else { |
| |
| r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING])) |
| r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(base.Pos, 0)) |
| } |
|
|
| return finishCompare(n, r, init) |
| } |
|
|
| |
| |
| |
| func finishCompare(n *ir.BinaryExpr, r ir.Node, init *ir.Nodes) ir.Node { |
| r = typecheck.Expr(r) |
| r = typecheck.Conv(r, n.Type()) |
| r = walkExpr(r, init) |
| return r |
| } |
|
|
| |
| |
| func brcom(op ir.Op) ir.Op { |
| switch op { |
| case ir.OEQ: |
| return ir.ONE |
| case ir.ONE: |
| return ir.OEQ |
| case ir.OLT: |
| return ir.OGE |
| case ir.OGT: |
| return ir.OLE |
| case ir.OLE: |
| return ir.OGT |
| case ir.OGE: |
| return ir.OLT |
| } |
| base.Fatalf("brcom: no com for %v\n", op) |
| return op |
| } |
|
|
| |
| |
| func brrev(op ir.Op) ir.Op { |
| switch op { |
| case ir.OEQ: |
| return ir.OEQ |
| case ir.ONE: |
| return ir.ONE |
| case ir.OLT: |
| return ir.OGT |
| case ir.OGT: |
| return ir.OLT |
| case ir.OLE: |
| return ir.OGE |
| case ir.OGE: |
| return ir.OLE |
| } |
| base.Fatalf("brrev: no rev for %v\n", op) |
| return op |
| } |
|
|
| func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node { |
| |
| if n.Op() == ir.OLITERAL && n.Type().IsSigned() && ir.Int64Val(n) < 0 { |
| n = copyExpr(n, n.Type(), init) |
| } |
|
|
| return typecheck.Conv(n, t) |
| } |
|
|