Skip to content

Commit

Permalink
realloc optimizations:
Browse files Browse the repository at this point in the history
- when old and new allocations would be large,
if new_size <= old_size, do not realloc,
return the original pointer
- when old and new allocations would be small,
if they would belong to the same sizeclass, do not realloc,
return the original pointer
  • Loading branch information
Antonin Reitz committed Jun 27, 2024
1 parent 9d97f1e commit 9ee5b3d
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 13 deletions.
24 changes: 18 additions & 6 deletions dist/StarMalloc.c
Original file line number Diff line number Diff line change
Expand Up @@ -2215,10 +2215,11 @@ static bool slab_free(uint8_t *ptr)
return false;
}

size_t StarMalloc_threshold = (size_t)4096U - (size_t)2U;

uint8_t *StarMalloc_malloc(size_t arena_id, size_t size)
{
size_t threshold = (size_t)4096U - (size_t)2U;
if (size <= threshold)
if (size <= StarMalloc_threshold)
{
uint8_t *ptr = slab_malloc(arena_id, (uint32_t)size);
if (ptr == NULL || false)
Expand Down Expand Up @@ -2249,13 +2250,11 @@ uint8_t *StarMalloc_malloc(size_t arena_id, size_t size)

uint8_t *StarMalloc_aligned_alloc(size_t arena_id, size_t alignment, size_t size)
{
size_t page_as_sz = (size_t)4096U;
bool check = alignment > (size_t)0U && (size_t)4096U % alignment == (size_t)0U;
if (check)
{
uint32_t alignment_as_u32 = (uint32_t)alignment;
size_t threshold = page_as_sz - (size_t)2U;
if (size <= threshold)
if (size <= StarMalloc_threshold)
{
uint8_t *ptr = slab_aligned_alloc(arena_id, alignment_as_u32, (uint32_t)size);
if (ptr == NULL || false)
Expand Down Expand Up @@ -2345,9 +2344,22 @@ uint8_t *StarMalloc_realloc(size_t arena_id, uint8_t *ptr, size_t new_size)
{
size_t old_size = StarMalloc_getsize(ptr);
bool old_allocation_is_small = old_size <= (size_t)4096U;
bool new_allocation_is_small = new_size <= StarMalloc_threshold;
bool same_case = old_allocation_is_small == new_allocation_is_small;
bool small_case_optim_condition;
if (old_allocation_is_small && same_case)
{
uint32_t r0 = SizeClassSelection_inv_impl((uint32_t)old_size);
uint32_t r = SizeClassSelection_inv_impl((uint32_t)new_size);
small_case_optim_condition = (size_t)r0 == (size_t)r;
}
else
small_case_optim_condition = false;
bool
large_case_optim_condition = !old_allocation_is_small && same_case && new_size <= old_size;
if (old_size == (size_t)0U)
return NULL;
else if (new_size <= old_size && !old_allocation_is_small)
else if (small_case_optim_condition || large_case_optim_condition)
return ptr;
else
{
Expand Down
2 changes: 2 additions & 0 deletions dist/StarMalloc.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

extern void StarMalloc_malloc_zeroing_die(uint8_t *ptr);

extern size_t StarMalloc_threshold;

uint8_t *StarMalloc_malloc(size_t arena_id, size_t size);

uint8_t *StarMalloc_aligned_alloc(size_t arena_id, size_t alignment, size_t size);
Expand Down
25 changes: 18 additions & 7 deletions src/StarMalloc.fst
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,14 @@ assume val malloc_zeroing_die (ptr: array U8.t)

module G = FStar.Ghost

//TODO: [@ CConst]
let threshold : US.t =
if enable_slab_canaries_malloc
then US.sub (US.uint32_to_sizet page_size) 2sz
else US.uint32_to_sizet page_size

#push-options "--fuel 1 --ifuel 1 --z3rlimit 30"
let malloc arena_id size =
let threshold = if enable_slab_canaries_malloc
then US.sub (US.uint32_to_sizet page_size) 2sz
else US.uint32_to_sizet page_size in
if (US.lte size threshold) then (
let ptr = slab_malloc arena_id (US.sizet_to_uint32 size) in
if (A.is_null ptr || not enable_zeroing_malloc) then (
Expand Down Expand Up @@ -130,9 +133,6 @@ let aligned_alloc arena_id alignment size
let check = US.gt alignment 0sz && US.rem (US.uint32_to_sizet page_size) alignment = 0sz in
if check then (
let alignment_as_u32 = US.sizet_to_uint32 alignment in
let threshold = if enable_slab_canaries_malloc
then US.sub page_as_sz 2sz
else page_as_sz in
if (US.lte size threshold) then (
let ptr = slab_aligned_alloc arena_id alignment_as_u32 (US.sizet_to_uint32 size) in
if (A.is_null ptr || not enable_zeroing_malloc) then (
Expand Down Expand Up @@ -403,6 +403,16 @@ let realloc arena_id ptr new_size
elim_live_null_or_varray ptr;
let old_size = getsize ptr in
let old_allocation_is_small : bool = US.lte old_size (u32_to_sz page_size) in
let new_allocation_is_small : bool = US.lte new_size threshold in
let same_case : bool = old_allocation_is_small = new_allocation_is_small in
let small_case_optim_condition = old_allocation_is_small && same_case && (
[@inline_let] let old_sc = sc_selection (US.sizet_to_uint32 old_size) in
[@inline_let] let new_sc = sc_selection (US.sizet_to_uint32 new_size) in
//assume (old_sc == new_sc ==> A.length ptr >= US.v new_size);
old_sc = new_sc) in
let large_case_optim_condition = (not old_allocation_is_small) && same_case && (
US.lte new_size old_size
) in
// not a valid pointer from the allocator point of view, fail
if (old_size = 0sz) then (
// 3) invalid pointer, fail
Expand All @@ -416,7 +426,8 @@ let realloc arena_id ptr new_size
return (A.null #U8.t, G.hide (0, A.null #U8.t))
) else (
// most common case
if (US.lte new_size old_size && not old_allocation_is_small) then (
if (small_case_optim_condition || large_case_optim_condition) then (
assume (A.length ptr >= US.v new_size);
// optimization
(**) intro_live_null_or_varray ptr;
(**) change_equal_slprop
Expand Down

0 comments on commit 9ee5b3d

Please sign in to comment.