diff options
author | jackpoz <giacomopoz@gmail.com> | 2014-06-19 21:59:49 +0200 |
---|---|---|
committer | jackpoz <giacomopoz@gmail.com> | 2014-06-19 22:20:55 +0200 |
commit | 57069b393d7698bc0f860ad1b2d0b16785f9e8e0 (patch) | |
tree | 9426f07459164e6cbe3f9d1486b64a0dbb1b7cd3 | |
parent | dc96fb1cee4a91e9828d085717b50fc163393bc6 (diff) |
Core/Dependencies: Upgrade to jemalloc-3.6.0
-rw-r--r-- | dep/PackageList.txt | 2 | ||||
-rw-r--r-- | dep/jemalloc/ChangeLog | 24 | ||||
-rw-r--r-- | dep/jemalloc/VERSION | 2 | ||||
-rw-r--r-- | dep/jemalloc/include/jemalloc/internal/hash.h | 2 | ||||
-rw-r--r-- | dep/jemalloc/include/jemalloc/internal/huge.h | 8 | ||||
-rw-r--r-- | dep/jemalloc/include/jemalloc/internal/jemalloc_internal.h | 10 | ||||
-rw-r--r-- | dep/jemalloc/include/jemalloc/internal/private_namespace.h | 1 | ||||
-rw-r--r-- | dep/jemalloc/include/jemalloc/jemalloc.h | 8 | ||||
-rw-r--r-- | dep/jemalloc/src/arena.c | 5 | ||||
-rw-r--r-- | dep/jemalloc/src/huge.c | 26 | ||||
-rw-r--r-- | dep/jemalloc/src/jemalloc.c | 2 | ||||
-rw-r--r-- | dep/jemalloc/src/prof.c | 5 |
12 files changed, 67 insertions, 28 deletions
diff --git a/dep/PackageList.txt b/dep/PackageList.txt index 602e9f12bb8..e4ecd80d08f 100644 --- a/dep/PackageList.txt +++ b/dep/PackageList.txt @@ -14,7 +14,7 @@ G3D (a commercial-grade C++ 3D engine available as Open Source (BSD License) jemalloc (a general-purpose scalable concurrent malloc-implementation) http://www.canonware.com/jemalloc/ - Version: 3.5.1 + Version: 3.6.0 libMPQ (a library for reading MPQ files) https://libmpq.org/ diff --git a/dep/jemalloc/ChangeLog b/dep/jemalloc/ChangeLog index c0ca338b076..d56ee999e69 100644 --- a/dep/jemalloc/ChangeLog +++ b/dep/jemalloc/ChangeLog @@ -5,6 +5,30 @@ found in the git revision history: https://github.com/jemalloc/jemalloc +* 3.6.0 (March 31, 2014) + + This version contains a critical bug fix for a regression present in 3.5.0 and + 3.5.1. + + Bug fixes: + - Fix a regression in arena_chunk_alloc() that caused crashes during + small/large allocation if chunk allocation failed. In the absence of this + bug, chunk allocation failure would result in allocation failure, e.g. NULL + return from malloc(). This regression was introduced in 3.5.0. + - Fix backtracing for gcc intrinsics-based backtracing by specifying + -fno-omit-frame-pointer to gcc. Note that the application (and all the + libraries it links to) must also be compiled with this option for + backtracing to be reliable. + - Use dss allocation precedence for huge allocations as well as small/large + allocations. + - Fix test assertion failure message formatting. This bug did not manifect on + x86_64 systems because of implementation subtleties in va_list. + - Fix inconsequential test failures for hash and SFMT code. + + New features: + - Support heap profiling on FreeBSD. This feature depends on the proc + filesystem being mounted during heap profile dumping. + * 3.5.1 (February 25, 2014) This version primarily addresses minor bugs in test code. diff --git a/dep/jemalloc/VERSION b/dep/jemalloc/VERSION index 24870eef790..dace31ba7b6 100644 --- a/dep/jemalloc/VERSION +++ b/dep/jemalloc/VERSION @@ -1 +1 @@ -3.5.1-0-g7709a64c59daf0b1f938be49472fcc499e1bd136 +3.6.0-0-g46c0af68bd248b04df75e4f92d5fb804c3d75340 diff --git a/dep/jemalloc/include/jemalloc/internal/hash.h b/dep/jemalloc/include/jemalloc/internal/hash.h index 09b69df515b..c7183ede82d 100644 --- a/dep/jemalloc/include/jemalloc/internal/hash.h +++ b/dep/jemalloc/include/jemalloc/internal/hash.h @@ -320,7 +320,7 @@ hash_x64_128(const void *key, const int len, const uint32_t seed, JEMALLOC_INLINE void hash(const void *key, size_t len, const uint32_t seed, size_t r_hash[2]) { -#if (LG_SIZEOF_PTR == 3) +#if (LG_SIZEOF_PTR == 3 && !defined(JEMALLOC_BIG_ENDIAN)) hash_x64_128(key, len, seed, (uint64_t *)r_hash); #else uint64_t hashes[2]; diff --git a/dep/jemalloc/include/jemalloc/internal/huge.h b/dep/jemalloc/include/jemalloc/internal/huge.h index ddf13138ad7..a2b9c779191 100644 --- a/dep/jemalloc/include/jemalloc/internal/huge.h +++ b/dep/jemalloc/include/jemalloc/internal/huge.h @@ -17,18 +17,20 @@ extern size_t huge_allocated; /* Protects chunk-related data structures. */ extern malloc_mutex_t huge_mtx; -void *huge_malloc(size_t size, bool zero); -void *huge_palloc(size_t size, size_t alignment, bool zero); +void *huge_malloc(size_t size, bool zero, dss_prec_t dss_prec); +void *huge_palloc(size_t size, size_t alignment, bool zero, + dss_prec_t dss_prec); bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra); void *huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, - size_t alignment, bool zero, bool try_tcache_dalloc); + size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec); #ifdef JEMALLOC_JET typedef void (huge_dalloc_junk_t)(void *, size_t); extern huge_dalloc_junk_t *huge_dalloc_junk; #endif void huge_dalloc(void *ptr, bool unmap); size_t huge_salloc(const void *ptr); +dss_prec_t huge_dss_prec_get(arena_t *arena); prof_ctx_t *huge_prof_ctx_get(const void *ptr); void huge_prof_ctx_set(const void *ptr, prof_ctx_t *ctx); bool huge_boot(void); diff --git a/dep/jemalloc/include/jemalloc/internal/jemalloc_internal.h b/dep/jemalloc/include/jemalloc/internal/jemalloc_internal.h index b64cc4bed87..cf171326c29 100644 --- a/dep/jemalloc/include/jemalloc/internal/jemalloc_internal.h +++ b/dep/jemalloc/include/jemalloc/internal/jemalloc_internal.h @@ -801,7 +801,7 @@ imalloct(size_t size, bool try_tcache, arena_t *arena) if (size <= arena_maxclass) return (arena_malloc(arena, size, false, try_tcache)); else - return (huge_malloc(size, false)); + return (huge_malloc(size, false, huge_dss_prec_get(arena))); } JEMALLOC_ALWAYS_INLINE void * @@ -818,7 +818,7 @@ icalloct(size_t size, bool try_tcache, arena_t *arena) if (size <= arena_maxclass) return (arena_malloc(arena, size, true, try_tcache)); else - return (huge_malloc(size, true)); + return (huge_malloc(size, true, huge_dss_prec_get(arena))); } JEMALLOC_ALWAYS_INLINE void * @@ -844,9 +844,9 @@ ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache, ret = arena_palloc(choose_arena(arena), usize, alignment, zero); } else if (alignment <= chunksize) - ret = huge_malloc(usize, zero); + ret = huge_malloc(usize, zero, huge_dss_prec_get(arena)); else - ret = huge_palloc(usize, alignment, zero); + ret = huge_palloc(usize, alignment, zero, huge_dss_prec_get(arena)); } assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret); @@ -1015,7 +1015,7 @@ iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero, try_tcache_dalloc)); } else { return (huge_ralloc(ptr, oldsize, size, extra, - alignment, zero, try_tcache_dalloc)); + alignment, zero, try_tcache_dalloc, huge_dss_prec_get(arena))); } } diff --git a/dep/jemalloc/include/jemalloc/internal/private_namespace.h b/dep/jemalloc/include/jemalloc/internal/private_namespace.h index a99bf7293ac..35c3b0c6c74 100644 --- a/dep/jemalloc/include/jemalloc/internal/private_namespace.h +++ b/dep/jemalloc/include/jemalloc/internal/private_namespace.h @@ -197,6 +197,7 @@ #define huge_boot JEMALLOC_N(huge_boot) #define huge_dalloc JEMALLOC_N(huge_dalloc) #define huge_dalloc_junk JEMALLOC_N(huge_dalloc_junk) +#define huge_dss_prec_get JEMALLOC_N(huge_dss_prec_get) #define huge_malloc JEMALLOC_N(huge_malloc) #define huge_mtx JEMALLOC_N(huge_mtx) #define huge_ndalloc JEMALLOC_N(huge_ndalloc) diff --git a/dep/jemalloc/include/jemalloc/jemalloc.h b/dep/jemalloc/include/jemalloc/jemalloc.h index 84e2e7294d1..b8ea851e525 100644 --- a/dep/jemalloc/include/jemalloc/jemalloc.h +++ b/dep/jemalloc/include/jemalloc/jemalloc.h @@ -7,12 +7,12 @@ extern "C" { #include <limits.h> #include <strings.h> -#define JEMALLOC_VERSION "3.5.1-0-g7709a64c59daf0b1f938be49472fcc499e1bd136" +#define JEMALLOC_VERSION "3.6.0-0-g46c0af68bd248b04df75e4f92d5fb804c3d75340" #define JEMALLOC_VERSION_MAJOR 3 -#define JEMALLOC_VERSION_MINOR 5 -#define JEMALLOC_VERSION_BUGFIX 1 +#define JEMALLOC_VERSION_MINOR 6 +#define JEMALLOC_VERSION_BUGFIX 0 #define JEMALLOC_VERSION_NREV 0 -#define JEMALLOC_VERSION_GID "7709a64c59daf0b1f938be49472fcc499e1bd136" +#define JEMALLOC_VERSION_GID "46c0af68bd248b04df75e4f92d5fb804c3d75340" # define MALLOCX_LG_ALIGN(la) (la) # if LG_SIZEOF_PTR == 2 diff --git a/dep/jemalloc/src/arena.c b/dep/jemalloc/src/arena.c index 390ab0f8230..dad707b63d0 100644 --- a/dep/jemalloc/src/arena.c +++ b/dep/jemalloc/src/arena.c @@ -614,8 +614,11 @@ arena_chunk_alloc(arena_t *arena) if (arena->spare != NULL) chunk = arena_chunk_init_spare(arena); - else + else { chunk = arena_chunk_init_hard(arena); + if (chunk == NULL) + return (NULL); + } /* Insert the run into the runs_avail tree. */ arena_avail_insert(arena, chunk, map_bias, chunk_npages-map_bias, diff --git a/dep/jemalloc/src/huge.c b/dep/jemalloc/src/huge.c index 6d86aed881b..d72f2135702 100644 --- a/dep/jemalloc/src/huge.c +++ b/dep/jemalloc/src/huge.c @@ -16,14 +16,14 @@ malloc_mutex_t huge_mtx; static extent_tree_t huge; void * -huge_malloc(size_t size, bool zero) +huge_malloc(size_t size, bool zero, dss_prec_t dss_prec) { - return (huge_palloc(size, chunksize, zero)); + return (huge_palloc(size, chunksize, zero, dss_prec)); } void * -huge_palloc(size_t size, size_t alignment, bool zero) +huge_palloc(size_t size, size_t alignment, bool zero, dss_prec_t dss_prec) { void *ret; size_t csize; @@ -48,8 +48,7 @@ huge_palloc(size_t size, size_t alignment, bool zero) * it is possible to make correct junk/zero fill decisions below. */ is_zeroed = zero; - ret = chunk_alloc(csize, alignment, false, &is_zeroed, - chunk_dss_prec_get()); + ret = chunk_alloc(csize, alignment, false, &is_zeroed, dss_prec); if (ret == NULL) { base_node_dealloc(node); return (NULL); @@ -98,7 +97,7 @@ huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size, size_t extra) void * huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, - size_t alignment, bool zero, bool try_tcache_dalloc) + size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec) { void *ret; size_t copysize; @@ -113,18 +112,18 @@ huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra, * space and copying. */ if (alignment > chunksize) - ret = huge_palloc(size + extra, alignment, zero); + ret = huge_palloc(size + extra, alignment, zero, dss_prec); else - ret = huge_malloc(size + extra, zero); + ret = huge_malloc(size + extra, zero, dss_prec); if (ret == NULL) { if (extra == 0) return (NULL); /* Try again, this time without extra. */ if (alignment > chunksize) - ret = huge_palloc(size, alignment, zero); + ret = huge_palloc(size, alignment, zero, dss_prec); else - ret = huge_malloc(size, zero); + ret = huge_malloc(size, zero, dss_prec); if (ret == NULL) return (NULL); @@ -264,6 +263,13 @@ huge_salloc(const void *ptr) return (ret); } +dss_prec_t +huge_dss_prec_get(arena_t *arena) +{ + + return (arena_dss_prec_get(choose_arena(arena))); +} + prof_ctx_t * huge_prof_ctx_get(const void *ptr) { diff --git a/dep/jemalloc/src/jemalloc.c b/dep/jemalloc/src/jemalloc.c index 563d99f8816..204778bc89d 100644 --- a/dep/jemalloc/src/jemalloc.c +++ b/dep/jemalloc/src/jemalloc.c @@ -2076,7 +2076,7 @@ a0alloc(size_t size, bool zero) if (size <= arena_maxclass) return (arena_malloc(arenas[0], size, zero, false)); else - return (huge_malloc(size, zero)); + return (huge_malloc(size, zero, huge_dss_prec_get(arenas[0]))); } void * diff --git a/dep/jemalloc/src/prof.c b/dep/jemalloc/src/prof.c index 1d8ccbd60ae..7722b7b4373 100644 --- a/dep/jemalloc/src/prof.c +++ b/dep/jemalloc/src/prof.c @@ -935,9 +935,12 @@ prof_dump_maps(bool propagate_err) char filename[PATH_MAX + 1]; cassert(config_prof); - +#ifdef __FreeBSD__ + malloc_snprintf(filename, sizeof(filename), "/proc/curproc/map"); +#else malloc_snprintf(filename, sizeof(filename), "/proc/%d/maps", (int)getpid()); +#endif mfd = open(filename, O_RDONLY); if (mfd != -1) { ssize_t nread; |