aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJason Evans <jasone@canonware.com>2015-07-24 18:21:42 -0700
committerJason Evans <jasone@canonware.com>2015-07-24 18:39:14 -0700
commitd059b9d6a1ac3e7f834260ba001bf0d1599fb0bf (patch)
tree68780f70e4f9037966e80b388de301ccf2550e13 /src
parent40cbd30d508b0d4e6462f5c36ffdbf6c1f29da22 (diff)
downloadjemalloc-d059b9d6a1ac3e7f834260ba001bf0d1599fb0bf.tar.gz
Implement support for non-coalescing maps on MinGW.
- Do not reallocate huge objects in place if the number of backing chunks would change. - Do not cache multi-chunk mappings. This resolves #213.
Diffstat (limited to 'src')
-rw-r--r--src/chunk.c6
-rw-r--r--src/huge.c3
2 files changed, 9 insertions, 0 deletions
diff --git a/src/chunk.c b/src/chunk.c
index 5945482..7a4ede8 100644
--- a/src/chunk.c
+++ b/src/chunk.c
@@ -337,6 +337,7 @@ chunk_record(arena_t *arena, extent_tree_t *chunks_szad,
extent_node_t *node, *prev;
extent_node_t key;
+ assert(maps_coalesce || size == chunksize);
assert(!cache || !zeroed);
unzeroed = cache || !zeroed;
JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(chunk, size);
@@ -421,6 +422,11 @@ chunk_dalloc_cache(arena_t *arena, void *chunk, size_t size)
assert(size != 0);
assert((size & chunksize_mask) == 0);
+ if (!maps_coalesce && size != chunksize) {
+ chunk_dalloc_arena(arena, chunk, size, false);
+ return;
+ }
+
chunk_record(arena, &arena->chunks_szad_cache, &arena->chunks_ad_cache,
true, chunk, size, false);
arena_maybe_purge(arena);
diff --git a/src/huge.c b/src/huge.c
index a7993f8..7cd0d7d 100644
--- a/src/huge.c
+++ b/src/huge.c
@@ -304,6 +304,9 @@ huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra,
return (false);
}
+ if (!maps_coalesce)
+ return (true);
+
/* Shrink the allocation in-place. */
if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize)) {
huge_ralloc_no_move_shrink(ptr, oldsize, usize);