Skip to content

Commit 074c9dd

Browse files
committed
cmd/compile: inline calls to libfuzzerIncrementCounter
1 parent fce40f8 commit 074c9dd

File tree

6 files changed

+16
-27
lines changed

6 files changed

+16
-27
lines changed

src/cmd/compile/internal/typecheck/builtin.go

+1-4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/cmd/compile/internal/typecheck/builtin/runtime.go

-1
Original file line numberDiff line numberDiff line change
@@ -265,7 +265,6 @@ func libfuzzerTraceConstCmp1(uint8, uint8)
265265
func libfuzzerTraceConstCmp2(uint16, uint16)
266266
func libfuzzerTraceConstCmp4(uint32, uint32)
267267
func libfuzzerTraceConstCmp8(uint64, uint64)
268-
func libfuzzerIncrementCounter(*uint8)
269268

270269
// This function should be called by the fuzz target on start to register the 8bit counters with libfuzzer
271270
func LibfuzzerInitializeCounters()

src/cmd/compile/internal/walk/order.go

+15-5
Original file line numberDiff line numberDiff line change
@@ -451,11 +451,21 @@ func (o *orderState) edge() {
451451
// is still necessary.
452452
counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
453453

454-
var init ir.Nodes
455-
init.Append(mkcall("libfuzzerIncrementCounter", nil, &init, ir.NewAddrExpr(base.Pos, counter)))
456-
for _, n := range init.Take() {
457-
o.append(n)
458-
}
454+
// We guarantee that the counter never becomes zero again once it has been
455+
// incremented once. This implementation follows the NeverZero optimization
456+
// presented by the paper:
457+
// "AFL++: Combining Incremental Steps of Fuzzing Research"
458+
// The NeverZero policy avoids the overflow to 0 by always adding the carry flag
459+
// to the counter and so, if an edge is executed at least one time, the entry is
460+
// never 0. The Saturated Counters policy freezes the counter when it reaches
461+
// the value of 255. In a range of experiments performed, it is observed that
462+
// NeverZero is very effective and improves performance in terms of coverage and
463+
// speed (the seed selection now takes into account edges that were hidden before).
464+
// Saturated Counters, however, decreases the overall performance.
465+
o.append(ir.NewIfStmt(base.Pos,
466+
ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(0xff)),
467+
[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(1))},
468+
[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(1))}))
459469
}
460470

461471
// orderBlock orders the block of statements in n into a new slice,

src/cmd/internal/goobj/builtinlist.go

-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/internal/fuzz/trace.go

-4
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@ import _ "unsafe" // for go:linkname
1818
//go:linkname libfuzzerTraceConstCmp4 runtime.libfuzzerTraceConstCmp4
1919
//go:linkname libfuzzerTraceConstCmp8 runtime.libfuzzerTraceConstCmp8
2020

21-
//go:linkname libfuzzerIncrementCounter runtime.libfuzzerIncrementCounter
22-
2321
func libfuzzerTraceCmp1(arg0, arg1 uint8) {}
2422
func libfuzzerTraceCmp2(arg0, arg1 uint16) {}
2523
func libfuzzerTraceCmp4(arg0, arg1 uint32) {}
@@ -29,5 +27,3 @@ func libfuzzerTraceConstCmp1(arg0, arg1 uint8) {}
2927
func libfuzzerTraceConstCmp2(arg0, arg1 uint16) {}
3028
func libfuzzerTraceConstCmp4(arg0, arg1 uint32) {}
3129
func libfuzzerTraceConstCmp8(arg0, arg1 uint64) {}
32-
33-
func libfuzzerIncrementCounter(counter *uint8) {}

src/runtime/libfuzzer.go

-12
Original file line numberDiff line numberDiff line change
@@ -59,18 +59,6 @@ func LibfuzzerInitializeCounters() {
5959
libfuzzerCallTraceInit(&__sanitizer_cov_pcs_init, &pcTables[0], &pcTables[size-1])
6060
}
6161

62-
// libfuzzerIncrementCounter guarantees that the counter never becomes zero
63-
// again once it has been incremented once. It implements the NeverZero
64-
// optimization presented by the paper:
65-
// "AFL++: Combining Incremental Steps of Fuzzing Research"
66-
func libfuzzerIncrementCounter(counter *uint8) {
67-
if *counter == 0xff {
68-
*counter = 1
69-
} else {
70-
*counter++
71-
}
72-
}
73-
7462
//go:linkname __sanitizer_cov_trace_cmp1 __sanitizer_cov_trace_cmp1
7563
//go:cgo_import_static __sanitizer_cov_trace_cmp1
7664
var __sanitizer_cov_trace_cmp1 byte

0 commit comments

Comments
 (0)