Skip to content

Commit 775f8eb

Browse files
committed
libfuzzer: Address review
1 parent 949dfe2 commit 775f8eb

File tree

7 files changed

+84
-41
lines changed

7 files changed

+84
-41
lines changed

src/cmd/compile/internal/typecheck/builtin.go

Lines changed: 7 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/cmd/compile/internal/typecheck/builtin/runtime.go

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -265,8 +265,11 @@ func libfuzzerTraceConstCmp1(uint8, uint8, int)
265265
func libfuzzerTraceConstCmp2(uint16, uint16, int)
266266
func libfuzzerTraceConstCmp4(uint32, uint32, int)
267267
func libfuzzerTraceConstCmp8(uint64, uint64, int)
268-
func libfuzzerHookStrCmp(string, string, int, int)
268+
func libfuzzerHookStrCmp(string, string, bool, int)
269+
func libfuzzerHookEqualFold(string, string, int)
269270
func libfuzzerIncrementCounter(*uint8)
271+
272+
// This function should be called by the fuzz target on start to register the 8bit counters with libfuzzer
270273
func LibfuzzerInitializeCounters()
271274

272275
// architecture variants

src/cmd/compile/internal/walk/compare.go

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,9 @@
55
package walk
66

77
import (
8+
"crypto/sha1"
89
"go/constant"
9-
"math/rand"
10+
"strconv"
1011

1112
"cmd/compile/internal/base"
1213
"cmd/compile/internal/ir"
@@ -16,8 +17,15 @@ import (
1617
"cmd/compile/internal/types"
1718
)
1819

19-
func fakePC() ir.Node {
20-
return ir.NewInt(int64(rand.Uint32()))
20+
func fakePC(n ir.Node) ir.Node {
21+
// In order to get deterministic IDs, we include the package path, file index, line number, column number
22+
// in the calculation of the fakePC for the IR node
23+
data := base.Ctxt.Pkgpath
24+
data += strconv.FormatInt(int64(n.Pos().FileIndex()), 10)
25+
data += n.Pos().LineNumber()
26+
data += strconv.FormatUint(uint64(n.Pos().Col()), 10)
27+
hash := sha1.Sum([]byte(data))
28+
return ir.NewInt(int64(uint32(hash[0]) | uint32(hash[1])<<8 | uint32(hash[2])<<16 | uint32(hash[3])<<24))
2129
}
2230

2331
// The result of walkCompare MUST be assigned back to n, e.g.
@@ -136,7 +144,8 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
136144
default:
137145
base.Fatalf("unexpected integer size %d for %v", t.Size(), t)
138146
}
139-
init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), fakePC()))
147+
148+
init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), fakePC(n)))
140149
}
141150
return n
142151
case types.TARRAY:
@@ -286,14 +295,6 @@ func walkCompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
286295
}
287296

288297
func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
289-
if base.Debug.Libfuzzer != 0 {
290-
fn := "libfuzzerHookStrCmp"
291-
l := cheapExpr(n.X, init)
292-
r := cheapExpr(n.Y, init)
293-
paramType := types.Types[types.TSTRING]
294-
init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init), ir.NewInt(1), fakePC()))
295-
}
296-
297298
// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
298299
var cs, ncs ir.Node // const string, non-const string
299300
switch {
@@ -412,8 +413,16 @@ func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
412413
r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING]))
413414
r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(0))
414415
}
415-
416-
return finishCompare(n, r, init)
416+
result := finishCompare(n, r, init)
417+
if base.Debug.Libfuzzer != 0 {
418+
fn := "libfuzzerHookStrCmp"
419+
x := cheapExpr(n.X, init)
420+
y := cheapExpr(n.Y, init)
421+
z := cheapExpr(result, init)
422+
paramType := types.Types[types.TSTRING]
423+
init.Append(mkcall(fn, nil, init, tracecmpArg(x, paramType, init), tracecmpArg(y, paramType, init), tracecmpArg(z, n.Type(), init), fakePC(n)))
424+
}
425+
return result
417426
}
418427

419428
// The result of finishCompare MUST be assigned back to n, e.g.

src/cmd/compile/internal/walk/expr.go

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -495,6 +495,16 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
495495
return r1
496496
}
497497

498+
type hookInfo struct {
499+
paramType types.Kind
500+
argsNum int
501+
runtimeFunc string
502+
}
503+
504+
var hooks = map[string]hookInfo{
505+
"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
506+
}
507+
498508
// walkCall walks an OCALLFUNC or OCALLINTER node.
499509
func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
500510
if n.Op() == ir.OCALLMETH {
@@ -590,13 +600,15 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
590600
}
591601

592602
n.Args = args
593-
if base.Debug.Libfuzzer != 0 && n.X.Sym() != nil {
594-
switch n.X.Sym().Pkg.Path {
595-
case "strings":
596-
if n.X.Sym().Name == "EqualFold" && len(args) == 2 {
597-
paramType := types.Types[types.TSTRING]
598-
init.Append(mkcall("libfuzzerHookStrCmp", nil, init, tracecmpArg(args[0], paramType, init), tracecmpArg(args[1], paramType, init), ir.NewInt(1), fakePC()))
603+
funSym := n.X.Sym()
604+
if base.Debug.Libfuzzer != 0 && funSym != nil {
605+
if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found && len(args) == hook.argsNum {
606+
var hookArgs []ir.Node
607+
for _, arg := range args {
608+
hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
599609
}
610+
hookArgs = append(hookArgs, fakePC(n))
611+
init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
600612
}
601613
}
602614
}

src/internal/fuzz/trace.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ func libfuzzerTraceConstCmp2(arg0, arg1 uint16, fakePC int) {}
3232
func libfuzzerTraceConstCmp4(arg0, arg1 uint32, fakePC int) {}
3333
func libfuzzerTraceConstCmp8(arg0, arg1 uint64, fakePC int) {}
3434

35-
func libfuzzerHookStrCmp(arg0, arg1 string, result, fakePC int) {}
35+
func libfuzzerHookStrCmp(arg0, arg1 string, result bool, fakePC int) {}
36+
func libfuzzerHookEqualFold(arg0, arg1 string, fakePC int) {}
3637

3738
func libfuzzerIncrementCounter(counter *uint8) {}

src/runtime/libfuzzer.go

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ package runtime
88

99
import "unsafe" // for go:linkname
1010

11+
// Keep in sync with the definition of ret_sled in src/runtime/libfuzzer_amd64.s
1112
const retSledSize = 512
1213

1314
func libfuzzerCallTraceIntCmp(fn *byte, arg0, arg1, fakePC uintptr)
@@ -54,31 +55,35 @@ func libfuzzerTraceConstCmp8(arg0, arg1 uint64, fakePC int) {
5455
libfuzzerCallTraceIntCmp(&__sanitizer_cov_trace_const_cmp8, uintptr(arg0), uintptr(arg1), uintptr(fakePC))
5556
}
5657

57-
func libfuzzerHookStrCmp(s1, s2 string, result, fakePC int) {
58-
libfuzzerCallHookStrCmp(&__sanitizer_weak_hook_strcmp, uintptr(fakePC), cstring(s1), cstring(s2), uintptr(result))
58+
func libfuzzerHookStrCmp(s1, s2 string, equal bool, fakePC int) {
59+
if !equal {
60+
libfuzzerCallHookStrCmp(&__sanitizer_weak_hook_strcmp, uintptr(fakePC), cstring(s1), cstring(s2), uintptr(1))
61+
}
62+
}
63+
64+
func libfuzzerHookEqualFold(s1, s2 string, fakePC int) {
65+
libfuzzerCallHookStrCmp(&__sanitizer_weak_hook_strcmp, uintptr(fakePC), cstring(s1), cstring(s2), uintptr(1))
5966
}
6067

6168
var pcTables []byte
6269

6370
func LibfuzzerInitializeCounters() {
6471
libfuzzerCallTraceInit(&__sanitizer_cov_8bit_counters_init, &__start___sancov_cntrs, &__stop___sancov_cntrs)
65-
var offset uintptr = 0
6672
start := unsafe.Pointer(&__start___sancov_cntrs)
6773
end := unsafe.Pointer(&__stop___sancov_cntrs)
6874

69-
cur := start
70-
for cur != end {
71-
offset++
72-
cur = unsafe.Pointer(uintptr(start) + offset)
73-
}
74-
75-
size := (offset + 1) * unsafe.Sizeof(uintptr(0)) * 2
75+
// PC tables are arrays of ptr-sized integers representing pairs [PC,PCFlags] for every instrumented block.
76+
// The number of PCs and PCFlags is the same as the number of 8-bit counters. Each PC table entry has
77+
// the size of two ptr-sized integers. We allocate one more byte that what we actually need so that we can
78+
// get a pointer representing the end of the PC table array.
79+
size := (uintptr(end)-uintptr(start))*unsafe.Sizeof(uintptr(0))*2 + 1
7680
pcTables = make([]byte, size)
7781
libfuzzerCallTraceInit(&__sanitizer_cov_pcs_init, &pcTables[0], &pcTables[size-1])
7882
}
7983

80-
// libfuzzerIncrementCounter guarantees that the counter never becomes zero again once
81-
// it is incremented. It implements the NeverZero optimization presented by the paper:
84+
// libfuzzerIncrementCounter guarantees that the counter never becomes zero
85+
// again once it has been incremented once. It implements the NeverZero
86+
// optimization presented by the paper:
8287
// "AFL++: Combining Incremental Steps of Fuzzing Research"
8388
func libfuzzerIncrementCounter(counter *uint8) {
8489
if *counter == 0xff {

src/runtime/libfuzzer_amd64.s

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ TEXT runtime·libfuzzerCallHookStrCmp(SB), NOSPLIT, $0-40
2828
MOVQ fn+0(FP), AX
2929
MOVQ hookId+8(FP), RARG0
3030
MOVQ s1+16(FP), RARG1
31-
MOVQ s2+24(FP), RARG2
31+
MOVQ s2+24(FP), RARG2
3232
MOVQ result+32(FP), RARG3
3333

3434
get_tls(R12)
@@ -116,13 +116,24 @@ TEXT runtime·libfuzzerCallTraceIntCmp(SB), NOSPLIT, $0-32
116116
MOVQ (g_sched+gobuf_sp)(R10), SP
117117
call:
118118
ANDQ $~15, SP // alignment for gcc ABI
119+
// Load the address of the end of the function and push it into the stack.
120+
// This address will be jumped to after executing the return instruction
121+
// from the return sled. There we reset the stack pointer and return.
119122
MOVQ $end_of_function(SB), BX
120123
PUSHQ BX
124+
// Load the starting address of the return sled into BX.
121125
MOVQ $ret_sled(SB), BX
126+
// Load the address of the i'th return instruction fron the return sled.
127+
// The index is given in the fakePC argument.
122128
ADDQ RARG2, BX
123129
PUSHQ BX
130+
// Call the original function with the fakePC return address on the stack.
131+
// Function arguments arg0 and arg1 are passed unchanged in the registers
132+
// RDI and RSI as specified by the x64 calling convention.
124133
JMP AX
125-
non_reachable:
134+
// This code will not be executed and is only there to statisfy assembler
135+
// check of a balanced stack.
136+
not_reachable:
126137
POPQ BX
127138
POPQ BX
128139
RET

0 commit comments

Comments
 (0)