// (!!). http://goo.gl/xpmctm
if (mallocx == nullptr || rallocx == nullptr || xallocx == nullptr
|| sallocx == nullptr || dallocx == nullptr || nallocx == nullptr
- || mallctl == nullptr) {
+ || mallctl == nullptr || mallctlnametomib == nullptr
+ || mallctlbymib == nullptr) {
return false;
}
#pragma GCC system_header
/**
- * Declare *allocx() and mallctl() as weak symbols. These will be provided by
+ * Declare *allocx() and mallctl*() as weak symbols. These will be provided by
* jemalloc if we are using jemalloc, or will be NULL if we are using another
* malloc implementation.
*/
__attribute__((__weak__));
extern "C" int mallctl(const char*, void*, size_t*, void*, size_t)
__attribute__((__weak__));
+extern "C" int mallctlnametomib(const char*, size_t*, size_t*)
+__attribute__((__weak__));
+extern "C" int mallctlbymib(const size_t*, size_t, void*, size_t*, void*,
+ size_t)
+__attribute__((__weak__));
#include <bits/functexcept.h>
#define FOLLY_HAVE_MALLOC_H 1
size_t nallocx(size_t, int) __attribute__((__weak__));
int mallctl(const char*, void*, size_t*, void*, size_t)
__attribute__((__weak__));
+int mallctlnametomib(const char*, size_t*, size_t*) __attribute__((__weak__));
+int mallctlbymib(const size_t*, size_t, void*, size_t*, void*, size_t)
+ __attribute__((__weak__));
#else
extern void* (*mallocx)(size_t, int);
extern void* (*rallocx)(void*, size_t, int);
extern void (*dallocx)(void*, int);
extern size_t (*nallocx)(size_t, int);
extern int (*mallctl)(const char*, void*, size_t*, void*, size_t);
+extern int (*mallctlnametomib)(const char*, size_t*, size_t*);
+extern int (*mallctlbymib)(const size_t*, size_t, void*, size_t*, void*,
+ size_t);
#endif
}
void (*dallocx)(void*, int) = nullptr;
size_t (*nallocx)(size_t, int) = nullptr;
int (*mallctl)(const char*, void*, size_t*, void*, size_t) = nullptr;
+int (*mallctlnametomib)(const char*, size_t*, size_t*) = nullptr;
+int (*mallctlbymib)(const size_t*, size_t, void*, size_t*, void*, size_t) =
+ nullptr;
#endif
}
void MemoryIdler::flushLocalMallocCaches() {
if (usingJEMalloc()) {
- if (!mallctl) {
- FB_LOG_EVERY_MS(ERROR, 10000) << "mallctl weak link failed";
+ if (!mallctl || !mallctlnametomib || !mallctlbymib) {
+ FB_LOG_EVERY_MS(ERROR, 10000) << "mallctl* weak link failed";
return;
}
// purging the arenas is counter-productive. We use the heuristic
// that if narenas <= 2 * num_cpus then we shouldn't do anything here,
// which detects when the narenas has been reduced from the default
- unsigned narenas;
- unsigned arenaForCurrent;
- if (mallctlWrapper("arenas.narenas", nullptr, &narenas) == 0 &&
+ unsigned narenas, arenaForCurrent;
+ size_t mib[3];
+ size_t miblen = 3;
+ if (mallctlWrapper("opt.narenas", nullptr, &narenas) == 0 &&
narenas > 2 * CacheLocality::system().numCpus &&
- mallctlWrapper("thread.arena", nullptr, &arenaForCurrent) == 0) {
- (void)mallctlWrapper("arenas.purge", &arenaForCurrent, nullptr);
+ mallctlWrapper("thread.arena", nullptr, &arenaForCurrent) == 0 &&
+ mallctlnametomib("arena.0.purge", mib, &miblen) == 0) {
+ mib[1] = size_t(arenaForCurrent);
+ mallctlbymib(mib, miblen, nullptr, nullptr, nullptr, 0);
}
}
}
// Run this with.
// MALLOC_CONF=prof_leak:true
-// LD_PRELOAD=${JEMALLOC_PATH}/lib/libjemalloc.so.1
+// LD_PRELOAD=${JEMALLOC_PATH}/lib/libjemalloc.so.2
// LD_PRELOAD="$LD_PRELOAD:"${UNWIND_PATH}/lib/libunwind.so.7
TEST(small_vector, leak_test) {
for (int j = 0; j < 1000; ++j) {