aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2015-05-19 17:42:31 -0700
committerJason Evans <jasone@canonware.com>2015-05-19 17:42:31 -0700
commit5154175cf1e6e7b1a2ed0295c232e60384944b3f (patch)
treec3d3cc203a5ee4d27d350b46def01c33f863bde6 /src
parent5aa50a2834fb09c5338f0e7b9db49cc0edd1a38a (diff)
downloadjemalloc-5154175cf1e6e7b1a2ed0295c232e60384944b3f.tar.gz
Fix performance regression in arena_palloc().
Pass large allocation requests to arena_malloc() when possible. This regression was introduced by 155bfa7da18cab0d21d87aa2dce4554166836f5d (Normalize size classes.).
Diffstat (limited to 'src')
-rw-r--r--src/arena.c15
1 files changed, 13 insertions, 2 deletions
diff --git a/src/arena.c b/src/arena.c
index a053adf..a3f36b3 100644
--- a/src/arena.c
+++ b/src/arena.c
@@ -2175,9 +2175,20 @@ arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
void *ret;
if (usize <= SMALL_MAXCLASS && (alignment < PAGE || (alignment == PAGE
- && (usize & PAGE_MASK) == 0)))
+ && (usize & PAGE_MASK) == 0))) {
+ /* Small; alignment doesn't require special run placement. */
ret = arena_malloc(tsd, arena, usize, zero, tcache);
- else {
+ } else if (usize <= arena_maxclass && alignment <= PAGE) {
+ /*
+ * Large; alignment doesn't require special run placement.
+ * However, the cached pointer may be at a random offset from
+ * the base of the run, so do some bit manipulation to retrieve
+ * the base.
+ */
+ ret = arena_malloc(tsd, arena, usize, zero, tcache);
+ if (config_cache_oblivious)
+ ret = (void *)((uintptr_t)ret & ~PAGE_MASK);
+ } else {
if (likely(usize <= arena_maxclass)) {
ret = arena_palloc_large(tsd, arena, usize, alignment,
zero);