@@ -333,13 +333,6 @@ impl Default for DroplessArena {
333
333
}
334
334
335
335
impl DroplessArena {
336
- #[ inline]
337
- fn align ( & self , align : usize ) {
338
- let final_address = ( ( self . ptr . get ( ) as usize ) + align - 1 ) & !( align - 1 ) ;
339
- self . ptr . set ( final_address as * mut u8 ) ;
340
- assert ! ( self . ptr <= self . end) ;
341
- }
342
-
343
336
#[ inline( never) ]
344
337
#[ cold]
345
338
fn grow ( & self , additional : usize ) {
@@ -370,30 +363,50 @@ impl DroplessArena {
370
363
}
371
364
}
372
365
366
+ /// Allocates a byte slice with specified size and alignment from the
367
+ /// current memory chunk. Returns `None` if there is no free space left to
368
+ /// satisfy the request.
373
369
#[ inline]
374
- pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> & mut [ u8 ] {
375
- unsafe {
376
- assert ! ( bytes != 0 ) ;
377
-
378
- self . align ( align) ;
370
+ fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < * mut u8 > {
371
+ let ptr = self . ptr . get ( ) as usize ;
372
+ let end = self . end . get ( ) as usize ;
373
+ // The allocation request fits into the current chunk iff:
374
+ //
375
+ // let aligned = align_to(ptr, align);
376
+ // ptr <= aligned && aligned + bytes <= end
377
+ //
378
+ // Except that we work with fixed width integers and need to be careful
379
+ // about potential overflow in the calcuation. If the overflow does
380
+ // happen, then we definitely don't have enough free and need to grow
381
+ // the arena.
382
+ let aligned = ptr. checked_add ( align - 1 ) ? & !( align - 1 ) ;
383
+ let new_ptr = aligned. checked_add ( bytes) ?;
384
+ if new_ptr <= end {
385
+ self . ptr . set ( new_ptr as * mut u8 ) ;
386
+ Some ( aligned as * mut u8 )
387
+ } else {
388
+ None
389
+ }
390
+ }
379
391
380
- let future_end = intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) ;
381
- if ( future_end as * mut u8 ) > self . end . get ( ) {
382
- self . grow ( bytes) ;
392
+ #[ inline]
393
+ pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> * mut u8 {
394
+ assert ! ( bytes != 0 ) ;
395
+ loop {
396
+ if let Some ( a) = self . alloc_raw_without_grow ( bytes, align) {
397
+ break a;
383
398
}
384
-
385
- let ptr = self . ptr . get ( ) ;
386
- // Set the pointer past ourselves
387
- self . ptr . set ( intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) as * mut u8 ) ;
388
- slice:: from_raw_parts_mut ( ptr, bytes)
399
+ // No free space left. Allocate a new chunk to satisfy the request.
400
+ // On failure the grow will panic or abort.
401
+ self . grow ( bytes) ;
389
402
}
390
403
}
391
404
392
405
#[ inline]
393
406
pub fn alloc < T > ( & self , object : T ) -> & mut T {
394
407
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
395
408
396
- let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
409
+ let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
397
410
398
411
unsafe {
399
412
// Write into uninitialized memory.
@@ -418,13 +431,11 @@ impl DroplessArena {
418
431
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
419
432
assert ! ( !slice. is_empty( ) ) ;
420
433
421
- let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _
422
- as * mut T ;
434
+ let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
423
435
424
436
unsafe {
425
- let arena_slice = slice:: from_raw_parts_mut ( mem, slice. len ( ) ) ;
426
- arena_slice. copy_from_slice ( slice) ;
427
- arena_slice
437
+ mem. copy_from_nonoverlapping ( slice. as_ptr ( ) , slice. len ( ) ) ;
438
+ slice:: from_raw_parts_mut ( mem, slice. len ( ) )
428
439
}
429
440
}
430
441
@@ -467,7 +478,7 @@ impl DroplessArena {
467
478
return & mut [ ] ;
468
479
}
469
480
let size = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
470
- let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
481
+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut T ;
471
482
unsafe { self . write_from_iter ( iter, len, mem) }
472
483
}
473
484
( _, _) => {
@@ -482,7 +493,7 @@ impl DroplessArena {
482
493
let len = vec. len ( ) ;
483
494
let start_ptr = self
484
495
. alloc_raw ( len * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) )
485
- as * mut _ as * mut T ;
496
+ as * mut T ;
486
497
vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
487
498
vec. set_len ( 0 ) ;
488
499
slice:: from_raw_parts_mut ( start_ptr, len)
@@ -526,8 +537,7 @@ pub struct DropArena {
526
537
impl DropArena {
527
538
#[ inline]
528
539
pub unsafe fn alloc < T > ( & self , object : T ) -> & mut T {
529
- let mem =
530
- self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
540
+ let mem = self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
531
541
// Write into uninitialized memory.
532
542
ptr:: write ( mem, object) ;
533
543
let result = & mut * mem;
@@ -550,7 +560,7 @@ impl DropArena {
550
560
let start_ptr = self
551
561
. arena
552
562
. alloc_raw ( len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) , mem:: align_of :: < T > ( ) )
553
- as * mut _ as * mut T ;
563
+ as * mut T ;
554
564
555
565
let mut destructors = self . destructors . borrow_mut ( ) ;
556
566
// Reserve space for the destructors so we can't panic while adding them
0 commit comments