mirror of
https://gitlab.torproject.org/tpo/core/tor.git
synced 2025-02-23 14:40:51 +01:00
Remove tor_malloc_roundup().
This function never actually did us any good, and it added a little complexity. See the changes file for more info.
This commit is contained in:
parent
37d19fdfcd
commit
f45cde05f9
6 changed files with 14 additions and 67 deletions
31
configure.in
31
configure.in
|
@ -317,18 +317,6 @@ AC_CHECK_FUNCS(
|
|||
vasprintf \
|
||||
)
|
||||
|
||||
using_custom_malloc=no
|
||||
if test x$enable_openbsd_malloc = xyes ; then
|
||||
AC_DEFINE(HAVE_MALLOC_GOOD_SIZE, 1, [Defined if we have the malloc_good_size function])
|
||||
using_custom_malloc=yes
|
||||
fi
|
||||
if test x$tcmalloc = xyes ; then
|
||||
using_custom_malloc=yes
|
||||
fi
|
||||
if test $using_custom_malloc = no ; then
|
||||
AC_CHECK_FUNCS(mallinfo malloc_good_size malloc_usable_size)
|
||||
fi
|
||||
|
||||
if test "$enable_threads" = "yes"; then
|
||||
AC_CHECK_HEADERS(pthread.h)
|
||||
AC_CHECK_FUNCS(pthread_create)
|
||||
|
@ -703,14 +691,6 @@ AC_CHECK_HEADERS(
|
|||
|
||||
AC_CHECK_HEADERS(sys/param.h)
|
||||
|
||||
TOR_CHECK_PROTOTYPE(malloc_good_size, HAVE_MALLOC_GOOD_SIZE_PROTOTYPE,
|
||||
[#ifdef HAVE_MALLOC_H
|
||||
#include <malloc.h>
|
||||
#endif
|
||||
#ifdef HAVE_MALLOC_MALLOC_H
|
||||
#include <malloc/malloc.h>
|
||||
#endif])
|
||||
|
||||
AC_CHECK_HEADERS(net/if.h, net_if_found=1, net_if_found=0,
|
||||
[#ifdef HAVE_SYS_TYPES_H
|
||||
#include <sys/types.h>
|
||||
|
@ -1032,6 +1012,17 @@ if test x$tcmalloc = xyes ; then
|
|||
LDFLAGS="-ltcmalloc $LDFLAGS"
|
||||
fi
|
||||
|
||||
using_custom_malloc=no
|
||||
if test x$enable_openbsd_malloc = xyes ; then
|
||||
using_custom_malloc=yes
|
||||
fi
|
||||
if test x$tcmalloc = xyes ; then
|
||||
using_custom_malloc=yes
|
||||
fi
|
||||
if test $using_custom_malloc = no ; then
|
||||
AC_CHECK_FUNCS(mallinfo)
|
||||
fi
|
||||
|
||||
# By default, we're going to assume we don't have mlockall()
|
||||
# bionic and other platforms have various broken mlockall subsystems.
|
||||
# Some systems don't have a working mlockall, some aren't linkable,
|
||||
|
|
|
@ -118,7 +118,7 @@ alloc_chunk(size_t sz, int freelist_ok)
|
|||
size_t chunk_size = freelist_ok ? CHUNK_SIZE : sz;
|
||||
memarea_chunk_t *res;
|
||||
chunk_size += SENTINEL_LEN;
|
||||
res = tor_malloc_roundup(&chunk_size);
|
||||
res = tor_malloc(chunk_size);
|
||||
res->next_chunk = NULL;
|
||||
res->mem_size = chunk_size - CHUNK_HEADER_SIZE - SENTINEL_LEN;
|
||||
res->next_mem = res->u.mem;
|
||||
|
|
|
@ -70,7 +70,6 @@
|
|||
#define ASSERT(x) tor_assert(x)
|
||||
#undef ALLOC_CAN_RETURN_NULL
|
||||
#define TOR
|
||||
//#define ALLOC_ROUNDUP(p) tor_malloc_roundup(p)
|
||||
/* End Tor dependencies */
|
||||
#else
|
||||
/* If you're not building this as part of Tor, you'll want to define the
|
||||
|
@ -165,25 +164,16 @@ static mp_chunk_t *
|
|||
mp_chunk_new(mp_pool_t *pool)
|
||||
{
|
||||
size_t sz = pool->new_chunk_capacity * pool->item_alloc_size;
|
||||
#ifdef ALLOC_ROUNDUP
|
||||
size_t alloc_size = CHUNK_OVERHEAD + sz;
|
||||
mp_chunk_t *chunk = ALLOC_ROUNDUP(&alloc_size);
|
||||
#else
|
||||
mp_chunk_t *chunk = ALLOC(CHUNK_OVERHEAD + sz);
|
||||
#endif
|
||||
|
||||
#ifdef MEMPOOL_STATS
|
||||
++pool->total_chunks_allocated;
|
||||
#endif
|
||||
CHECK_ALLOC(chunk);
|
||||
memset(chunk, 0, sizeof(mp_chunk_t)); /* Doesn't clear the whole thing. */
|
||||
chunk->magic = MP_CHUNK_MAGIC;
|
||||
#ifdef ALLOC_ROUNDUP
|
||||
chunk->mem_size = alloc_size - CHUNK_OVERHEAD;
|
||||
chunk->capacity = chunk->mem_size / pool->item_alloc_size;
|
||||
#else
|
||||
chunk->capacity = pool->new_chunk_capacity;
|
||||
chunk->mem_size = sz;
|
||||
#endif
|
||||
chunk->next_mem = chunk->mem;
|
||||
chunk->pool = pool;
|
||||
return chunk;
|
||||
|
|
|
@ -290,37 +290,6 @@ _tor_free(void *mem)
|
|||
tor_free(mem);
|
||||
}
|
||||
|
||||
#if defined(HAVE_MALLOC_GOOD_SIZE) && !defined(HAVE_MALLOC_GOOD_SIZE_PROTOTYPE)
|
||||
/* Some version of Mac OSX have malloc_good_size in their libc, but not
|
||||
* actually defined in malloc/malloc.h. We detect this and work around it by
|
||||
* prototyping.
|
||||
*/
|
||||
extern size_t malloc_good_size(size_t size);
|
||||
#endif
|
||||
|
||||
/** Allocate and return a chunk of memory of size at least *<b>size</b>, using
|
||||
* the same resources we would use to malloc *<b>sizep</b>. Set *<b>sizep</b>
|
||||
* to the number of usable bytes in the chunk of memory. */
|
||||
void *
|
||||
_tor_malloc_roundup(size_t *sizep DMALLOC_PARAMS)
|
||||
{
|
||||
#ifdef HAVE_MALLOC_GOOD_SIZE
|
||||
tor_assert(*sizep < SIZE_T_CEILING);
|
||||
*sizep = malloc_good_size(*sizep);
|
||||
return _tor_malloc(*sizep DMALLOC_FN_ARGS);
|
||||
#elif 0 && defined(HAVE_MALLOC_USABLE_SIZE) && !defined(USE_DMALLOC)
|
||||
/* Never use malloc_usable_size(); it makes valgrind really unhappy,
|
||||
* and doesn't win much in terms of usable space where it exists. */
|
||||
void *result;
|
||||
tor_assert(*sizep < SIZE_T_CEILING);
|
||||
result = _tor_malloc(*sizep DMALLOC_FN_ARGS);
|
||||
*sizep = malloc_usable_size(result);
|
||||
return result;
|
||||
#else
|
||||
return _tor_malloc(*sizep DMALLOC_FN_ARGS);
|
||||
#endif
|
||||
}
|
||||
|
||||
/** Call the platform malloc info function, and dump the results to the log at
|
||||
* level <b>severity</b>. If no such function exists, do nothing. */
|
||||
void
|
||||
|
|
|
@ -76,7 +76,6 @@
|
|||
/* Memory management */
|
||||
void *_tor_malloc(size_t size DMALLOC_PARAMS) ATTR_MALLOC;
|
||||
void *_tor_malloc_zero(size_t size DMALLOC_PARAMS) ATTR_MALLOC;
|
||||
void *_tor_malloc_roundup(size_t *size DMALLOC_PARAMS) ATTR_MALLOC;
|
||||
void *_tor_calloc(size_t nmemb, size_t size DMALLOC_PARAMS) ATTR_MALLOC;
|
||||
void *_tor_realloc(void *ptr, size_t size DMALLOC_PARAMS);
|
||||
char *_tor_strdup(const char *s DMALLOC_PARAMS) ATTR_MALLOC ATTR_NONNULL((1));
|
||||
|
|
|
@ -192,8 +192,6 @@ chunk_new_with_alloc_size(size_t alloc)
|
|||
freelist->lowest_length = freelist->cur_length;
|
||||
++freelist->n_hit;
|
||||
} else {
|
||||
/* XXXX take advantage of tor_malloc_roundup, once we know how that
|
||||
* affects freelists. */
|
||||
if (freelist)
|
||||
++freelist->n_alloc;
|
||||
else
|
||||
|
@ -216,7 +214,7 @@ static INLINE chunk_t *
|
|||
chunk_new_with_alloc_size(size_t alloc)
|
||||
{
|
||||
chunk_t *ch;
|
||||
ch = tor_malloc_roundup(&alloc);
|
||||
ch = tor_malloc(alloc);
|
||||
ch->next = NULL;
|
||||
ch->datalen = 0;
|
||||
ch->memlen = CHUNK_SIZE_WITH_ALLOC(alloc);
|
||||
|
|
Loading…
Add table
Reference in a new issue