aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChris Peterson <cpeterson@mozilla.com>2016-03-27 23:28:39 -0700
committerJason Evans <je@fb.com>2016-03-31 11:23:29 -0700
commita82070ef5fc3aa81fda43086cdcc22bfa826b894 (patch)
tree18b435a9a510f17fba79d20ddcfe9f84998210e1
parentf86bc081d6190be14c64aeaae9d02863b440bfb3 (diff)
downloadjemalloc-a82070ef5fc3aa81fda43086cdcc22bfa826b894.tar.gz
Add JEMALLOC_ALLOC_JUNK and JEMALLOC_FREE_JUNK macros
Replace hardcoded 0xa5 and 0x5a junk values with JEMALLOC_ALLOC_JUNK and JEMALLOC_FREE_JUNK macros, respectively.
-rw-r--r--include/jemalloc/internal/tcache.h7
-rw-r--r--include/jemalloc/internal/util.h4
-rw-r--r--src/arena.c36
-rw-r--r--src/ckh.c2
-rw-r--r--src/huge.c15
-rw-r--r--src/quarantine.c2
-rw-r--r--test/unit/junk.c6
7 files changed, 40 insertions, 32 deletions
diff --git a/include/jemalloc/internal/tcache.h b/include/jemalloc/internal/tcache.h
index 8357820..1edd39f 100644
--- a/include/jemalloc/internal/tcache.h
+++ b/include/jemalloc/internal/tcache.h
@@ -381,9 +381,10 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
}
if (likely(!zero)) {
if (slow_path && config_fill) {
- if (unlikely(opt_junk_alloc))
- memset(ret, 0xa5, usize);
- else if (unlikely(opt_zero))
+ if (unlikely(opt_junk_alloc)) {
+ memset(ret, JEMALLOC_ALLOC_JUNK,
+ usize);
+ } else if (unlikely(opt_zero))
memset(ret, 0, usize);
}
} else
diff --git a/include/jemalloc/internal/util.h b/include/jemalloc/internal/util.h
index 228584a..949a0e0 100644
--- a/include/jemalloc/internal/util.h
+++ b/include/jemalloc/internal/util.h
@@ -40,6 +40,10 @@
*/
#define MALLOC_PRINTF_BUFSIZE 4096
+/* Junk fill patterns. */
+#define JEMALLOC_ALLOC_JUNK 0xa5
+#define JEMALLOC_FREE_JUNK 0x5a
+
/*
* Wrap a cpp argument that contains commas such that it isn't broken up into
* multiple arguments.
diff --git a/src/arena.c b/src/arena.c
index 38a1ce3..1d30de5 100644
--- a/src/arena.c
+++ b/src/arena.c
@@ -2249,15 +2249,16 @@ void
arena_alloc_junk_small(void *ptr, arena_bin_info_t *bin_info, bool zero)
{
+ size_t redzone_size = bin_info->redzone_size;
+
if (zero) {
- size_t redzone_size = bin_info->redzone_size;
- memset((void *)((uintptr_t)ptr - redzone_size), 0xa5,
- redzone_size);
- memset((void *)((uintptr_t)ptr + bin_info->reg_size), 0xa5,
- redzone_size);
+ memset((void *)((uintptr_t)ptr - redzone_size),
+ JEMALLOC_ALLOC_JUNK, redzone_size);
+ memset((void *)((uintptr_t)ptr + bin_info->reg_size),
+ JEMALLOC_ALLOC_JUNK, redzone_size);
} else {
- memset((void *)((uintptr_t)ptr - bin_info->redzone_size), 0xa5,
- bin_info->reg_interval);
+ memset((void *)((uintptr_t)ptr - redzone_size),
+ JEMALLOC_ALLOC_JUNK, bin_info->reg_interval);
}
}
@@ -2293,22 +2294,22 @@ arena_redzones_validate(void *ptr, arena_bin_info_t *bin_info, bool reset)
for (i = 1; i <= redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr - i);
- if (*byte != 0xa5) {
+ if (*byte != JEMALLOC_ALLOC_JUNK) {
error = true;
arena_redzone_corruption(ptr, size, false, i,
*byte);
if (reset)
- *byte = 0xa5;
+ *byte = JEMALLOC_ALLOC_JUNK;
}
}
for (i = 0; i < redzone_size; i++) {
uint8_t *byte = (uint8_t *)((uintptr_t)ptr + size + i);
- if (*byte != 0xa5) {
+ if (*byte != JEMALLOC_ALLOC_JUNK) {
error = true;
arena_redzone_corruption(ptr, size, true, i,
*byte);
if (reset)
- *byte = 0xa5;
+ *byte = JEMALLOC_ALLOC_JUNK;
}
}
}
@@ -2327,7 +2328,7 @@ arena_dalloc_junk_small(void *ptr, arena_bin_info_t *bin_info)
size_t redzone_size = bin_info->redzone_size;
arena_redzones_validate(ptr, bin_info, false);
- memset((void *)((uintptr_t)ptr - redzone_size), 0x5a,
+ memset((void *)((uintptr_t)ptr - redzone_size), JEMALLOC_FREE_JUNK,
bin_info->reg_interval);
}
#ifdef JEMALLOC_JET
@@ -2458,7 +2459,7 @@ arena_malloc_large(tsd_t *tsd, arena_t *arena, szind_t binind, bool zero)
if (!zero) {
if (config_fill) {
if (unlikely(opt_junk_alloc))
- memset(ret, 0xa5, usize);
+ memset(ret, JEMALLOC_ALLOC_JUNK, usize);
else if (unlikely(opt_zero))
memset(ret, 0, usize);
}
@@ -2563,7 +2564,7 @@ arena_palloc_large(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
if (config_fill && !zero) {
if (unlikely(opt_junk_alloc))
- memset(ret, 0xa5, usize);
+ memset(ret, JEMALLOC_ALLOC_JUNK, usize);
else if (unlikely(opt_zero))
memset(ret, 0, usize);
}
@@ -2776,7 +2777,7 @@ arena_dalloc_junk_large(void *ptr, size_t usize)
{
if (config_fill && unlikely(opt_junk_free))
- memset(ptr, 0x5a, usize);
+ memset(ptr, JEMALLOC_FREE_JUNK, usize);
}
#ifdef JEMALLOC_JET
#undef arena_dalloc_junk_large
@@ -2977,7 +2978,7 @@ arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
{
if (config_fill && unlikely(opt_junk_free)) {
- memset((void *)((uintptr_t)ptr + usize), 0x5a,
+ memset((void *)((uintptr_t)ptr + usize), JEMALLOC_FREE_JUNK,
old_usize - usize);
}
}
@@ -3012,7 +3013,8 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t usize_min,
usize_min, usize_max, zero);
if (config_fill && !ret && !zero) {
if (unlikely(opt_junk_alloc)) {
- memset((void *)((uintptr_t)ptr + oldsize), 0xa5,
+ memset((void *)((uintptr_t)ptr + oldsize),
+ JEMALLOC_ALLOC_JUNK,
isalloc(ptr, config_prof) - oldsize);
} else if (unlikely(opt_zero)) {
memset((void *)((uintptr_t)ptr + oldsize), 0,
diff --git a/src/ckh.c b/src/ckh.c
index 3b423aa..07b49dd 100644
--- a/src/ckh.c
+++ b/src/ckh.c
@@ -423,7 +423,7 @@ ckh_delete(tsd_t *tsd, ckh_t *ckh)
idalloctm(tsd, ckh->tab, tcache_get(tsd, false), true, true);
if (config_debug)
- memset(ckh, 0x5a, sizeof(ckh_t));
+ memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
}
size_t
diff --git a/src/huge.c b/src/huge.c
index 5f7ceaf..a63c825 100644
--- a/src/huge.c
+++ b/src/huge.c
@@ -92,7 +92,7 @@ huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
if (!is_zeroed)
memset(ret, 0, usize);
} else if (config_fill && unlikely(opt_junk_alloc))
- memset(ret, 0xa5, usize);
+ memset(ret, JEMALLOC_ALLOC_JUNK, usize);
arena_decay_tick(tsd, arena);
return (ret);
@@ -112,7 +112,7 @@ huge_dalloc_junk(void *ptr, size_t usize)
* unmapped.
*/
if (!config_munmap || (have_dss && chunk_in_dss(ptr)))
- memset(ptr, 0x5a, usize);
+ memset(ptr, JEMALLOC_FREE_JUNK, usize);
}
}
#ifdef JEMALLOC_JET
@@ -147,7 +147,8 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
if (oldsize > usize) {
size_t sdiff = oldsize - usize;
if (config_fill && unlikely(opt_junk_free)) {
- memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff);
+ memset((void *)((uintptr_t)ptr + usize),
+ JEMALLOC_FREE_JUNK, sdiff);
post_zeroed = false;
} else {
post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks,
@@ -174,8 +175,8 @@ huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min,
usize - oldsize);
}
} else if (config_fill && unlikely(opt_junk_alloc)) {
- memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
- oldsize);
+ memset((void *)((uintptr_t)ptr + oldsize),
+ JEMALLOC_ALLOC_JUNK, usize - oldsize);
}
}
}
@@ -268,8 +269,8 @@ huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t usize, bool zero) {
CHUNK_CEILING(oldsize));
}
} else if (config_fill && unlikely(opt_junk_alloc)) {
- memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize -
- oldsize);
+ memset((void *)((uintptr_t)ptr + oldsize), JEMALLOC_ALLOC_JUNK,
+ usize - oldsize);
}
return (false);
diff --git a/src/quarantine.c b/src/quarantine.c
index ff8801c..c024dea 100644
--- a/src/quarantine.c
+++ b/src/quarantine.c
@@ -160,7 +160,7 @@ quarantine(tsd_t *tsd, void *ptr)
&& usize <= SMALL_MAXCLASS)
arena_quarantine_junk_small(ptr, usize);
else
- memset(ptr, 0x5a, usize);
+ memset(ptr, JEMALLOC_FREE_JUNK, usize);
}
} else {
assert(quarantine->curbytes == 0);
diff --git a/test/unit/junk.c b/test/unit/junk.c
index b23dd1e..f4e6226 100644
--- a/test/unit/junk.c
+++ b/test/unit/junk.c
@@ -29,7 +29,7 @@ arena_dalloc_junk_small_intercept(void *ptr, arena_bin_info_t *bin_info)
arena_dalloc_junk_small_orig(ptr, bin_info);
for (i = 0; i < bin_info->reg_size; i++) {
- assert_c_eq(((char *)ptr)[i], 0x5a,
+ assert_c_eq(((char *)ptr)[i], JEMALLOC_FREE_JUNK,
"Missing junk fill for byte %zu/%zu of deallocated region",
i, bin_info->reg_size);
}
@@ -44,7 +44,7 @@ arena_dalloc_junk_large_intercept(void *ptr, size_t usize)
arena_dalloc_junk_large_orig(ptr, usize);
for (i = 0; i < usize; i++) {
- assert_c_eq(((char *)ptr)[i], 0x5a,
+ assert_c_eq(((char *)ptr)[i], JEMALLOC_FREE_JUNK,
"Missing junk fill for byte %zu/%zu of deallocated region",
i, usize);
}
@@ -98,7 +98,7 @@ test_junk(size_t sz_min, size_t sz_max)
for (i = sz_prev; i < sz; i++) {
if (opt_junk_alloc) {
- assert_c_eq(s[i], 0xa5,
+ assert_c_eq(s[i], JEMALLOC_ALLOC_JUNK,
"Newly allocated byte %zu/%zu isn't "
"junk-filled", i, sz);
}