Skip to content

Commit 9b5c98f

Browse files
committed
Auto merge of #77014 - tmiasko:arena, r=Mark-Simulacrum
DroplessArena: Allocate objects from the end of memory chunk Allocating from the end of memory chunk simplifies the alignment code and reduces the number of checked arithmetic operations.
2 parents e599b53 + c7e887c commit 9b5c98f

File tree

1 file changed

+16
-21
lines changed
  • compiler/rustc_arena/src

1 file changed

+16
-21
lines changed

compiler/rustc_arena/src/lib.rs

+16-21
Original file line numberDiff line numberDiff line change
@@ -299,11 +299,13 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
299299
unsafe impl<T: Send> Send for TypedArena<T> {}
300300

301301
pub struct DroplessArena {
302-
/// A pointer to the next object to be allocated.
303-
ptr: Cell<*mut u8>,
302+
/// A pointer to the start of the free space.
303+
start: Cell<*mut u8>,
304304

305-
/// A pointer to the end of the allocated area. When this pointer is
306-
/// reached, a new chunk is allocated.
305+
/// A pointer to the end of free space.
306+
///
307+
/// The allocation proceeds from the end of the chunk towards the start.
308+
/// When this pointer crosses the start pointer, a new chunk is allocated.
307309
end: Cell<*mut u8>,
308310

309311
/// A vector of arena chunks.
@@ -316,7 +318,7 @@ impl Default for DroplessArena {
316318
#[inline]
317319
fn default() -> DroplessArena {
318320
DroplessArena {
319-
ptr: Cell::new(ptr::null_mut()),
321+
start: Cell::new(ptr::null_mut()),
320322
end: Cell::new(ptr::null_mut()),
321323
chunks: Default::default(),
322324
}
@@ -348,7 +350,7 @@ impl DroplessArena {
348350
new_cap = cmp::max(additional, new_cap);
349351

350352
let mut chunk = TypedArenaChunk::<u8>::new(new_cap);
351-
self.ptr.set(chunk.start());
353+
self.start.set(chunk.start());
352354
self.end.set(chunk.end());
353355
chunks.push(chunk);
354356
}
@@ -359,24 +361,17 @@ impl DroplessArena {
359361
/// request.
360362
#[inline]
361363
fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
362-
let ptr = self.ptr.get() as usize;
364+
let start = self.start.get() as usize;
363365
let end = self.end.get() as usize;
366+
364367
let align = layout.align();
365368
let bytes = layout.size();
366-
// The allocation request fits into the current chunk iff:
367-
//
368-
// let aligned = align_to(ptr, align);
369-
// ptr <= aligned && aligned + bytes <= end
370-
//
371-
// Except that we work with fixed width integers and need to be careful
372-
// about potential overflow in the calcuation. If the overflow does
373-
// happen, then we definitely don't have enough free and need to grow
374-
// the arena.
375-
let aligned = ptr.checked_add(align - 1)? & !(align - 1);
376-
let new_ptr = aligned.checked_add(bytes)?;
377-
if new_ptr <= end {
378-
self.ptr.set(new_ptr as *mut u8);
379-
Some(aligned as *mut u8)
369+
370+
let new_end = end.checked_sub(bytes)? & !(align - 1);
371+
if start <= new_end {
372+
let new_end = new_end as *mut u8;
373+
self.end.set(new_end);
374+
Some(new_end)
380375
} else {
381376
None
382377
}

0 commit comments

Comments
 (0)