aboutsummaryrefslogtreecommitdiff
path: root/agent
diff options
context:
space:
mode:
authorFabian Meumertzheim <meumertzheim@code-intelligence.com>2021-02-22 08:12:56 +0100
committerFabian Meumertzheim <fabian@meumertzhe.im>2021-02-22 14:33:55 +0100
commit4fb408bdcbfb32b207c0b92cc98bc3e95c9f7665 (patch)
treefeb9831b957c20c6d39b9e6a45202bd0b9a01106 /agent
parent615d76fdaf6f0f055b2d7970c7f4a3ea126885a6 (diff)
downloadjazzer-api-4fb408bdcbfb32b207c0b92cc98bc3e95c9f7665.tar.gz
Use NeverZero instead of Saturated Counters for coverage
According to https://www.usenix.org/system/files/woot20-paper-fioraldi.pdf, letting coverage 8-bit counters wrap from 255 to 1 on increment performs better than having them stay at 255. In fact, the latter has been observed to hurt overall coverage.
Diffstat (limited to 'agent')
-rw-r--r--agent/src/main/java/com/code_intelligence/jazzer/instrumentor/EdgeCoverageInstrumentor.kt10
-rw-r--r--agent/src/test/java/com/code_intelligence/jazzer/instrumentor/CoverageInstrumentationTest.kt9
2 files changed, 13 insertions, 6 deletions
diff --git a/agent/src/main/java/com/code_intelligence/jazzer/instrumentor/EdgeCoverageInstrumentor.kt b/agent/src/main/java/com/code_intelligence/jazzer/instrumentor/EdgeCoverageInstrumentor.kt
index 1aa9597c..f60bc667 100644
--- a/agent/src/main/java/com/code_intelligence/jazzer/instrumentor/EdgeCoverageInstrumentor.kt
+++ b/agent/src/main/java/com/code_intelligence/jazzer/instrumentor/EdgeCoverageInstrumentor.kt
@@ -110,6 +110,10 @@ private fun instrumentControlFlowEdge(mv: MethodVisitor, edgeId: Int, variable:
visitInsn(Opcodes.DUP2)
// Stack: mem | edgeId | mem | edgeId
visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/nio/ByteBuffer", "get", "(I)B", false)
+ // Increment the counter, but ensure that it never stays at 0 after an overflow by incrementing it again in that
+ // case.
+ // This approach performs better than saturating the counter at 255 (see Section 3.3 of
+ // https://www.usenix.org/system/files/woot20-paper-fioraldi.pdf)
// Stack: mem | edgeId | counter (sign-extended to int)
push(0xff)
// Stack: mem | edgeId | counter (sign-extended to int) | 0x000000ff
@@ -124,9 +128,9 @@ private fun instrumentControlFlowEdge(mv: MethodVisitor, edgeId: Int, variable:
push(8)
// Stack: mem | edgeId | counter + 1 | counter + 1 | 8 (maxStack: +5)
visitInsn(Opcodes.ISHR)
- // Stack: mem | edgeId | counter + 1 | 1 if the increment overflowed, 0 otherwise
- visitInsn(Opcodes.ISUB)
- // Stack: mem | edgeId | counter if the increment overflowed, counter + 1 otherwise
+ // Stack: mem | edgeId | counter + 1 | 1 if the increment overflowed to 0, 0 otherwise
+ visitInsn(Opcodes.IADD)
+ // Stack: mem | edgeId | counter + 2 if the increment overflowed, counter + 1 otherwise
visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/nio/ByteBuffer", "put", "(IB)Ljava/nio/ByteBuffer;", false)
// Stack: mem
visitInsn(Opcodes.POP)
diff --git a/agent/src/test/java/com/code_intelligence/jazzer/instrumentor/CoverageInstrumentationTest.kt b/agent/src/test/java/com/code_intelligence/jazzer/instrumentor/CoverageInstrumentationTest.kt
index 31f40575..e52231cd 100644
--- a/agent/src/test/java/com/code_intelligence/jazzer/instrumentor/CoverageInstrumentationTest.kt
+++ b/agent/src/test/java/com/code_intelligence/jazzer/instrumentor/CoverageInstrumentationTest.kt
@@ -109,11 +109,14 @@ class CoverageInstrumentationTest {
// Control flows through the first if branch once per run.
val takenOnEveryRunEdge = ifFirstBranch
- for (i in 1..300) {
+ var lastCounter = 0.toUByte()
+ for (i in 1..600) {
assertSelfCheck(target)
assertEquals(1, MockCoverageMap.mem[takenOnceEdge])
- // Verify that the counter does not overflow.
- val expectedCounter = i.coerceAtMost(255).toUByte()
+ // Verify that the counter increments, but is never zero.
+ val expectedCounter = (lastCounter + 1U).toUByte().takeUnless { it == 0.toUByte() }
+ ?: (lastCounter + 2U).toUByte()
+ lastCounter = expectedCounter
val actualCounter = MockCoverageMap.mem[takenOnEveryRunEdge].toUByte()
assertEquals(expectedCounter, actualCounter, "After $i runs:")
}