@@ -14,14 +14,15 @@ use core::hint;
14
14
use core:: intrinsics:: abort;
15
15
use core:: iter;
16
16
use core:: marker:: { PhantomData , Unpin , Unsize } ;
17
- use core:: mem:: { self , align_of_val_raw , size_of_val} ;
17
+ use core:: mem:: { self , size_of_val} ;
18
18
use core:: ops:: { CoerceUnsized , Deref , DispatchFromDyn , Receiver } ;
19
19
use core:: pin:: Pin ;
20
20
use core:: ptr:: { self , NonNull } ;
21
21
use core:: slice:: from_raw_parts_mut;
22
22
use core:: sync:: atomic;
23
23
use core:: sync:: atomic:: Ordering :: { Acquire , Relaxed , Release , SeqCst } ;
24
24
25
+ use crate :: alloc:: struct_alloc:: StructAlloc ;
25
26
use crate :: alloc:: {
26
27
box_free, handle_alloc_error, AllocError , Allocator , Global , Layout , WriteCloneIntoRaw ,
27
28
} ;
@@ -296,18 +297,33 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
296
297
}
297
298
}
298
299
299
- // This is repr(C) to future-proof against possible field-reordering, which
300
- // would interfere with otherwise safe [into|from]_raw() of transmutable
301
- // inner types.
302
- #[ repr( C ) ]
303
- struct ArcInner < T : ?Sized > {
300
+ struct ArcInnerMetadata {
304
301
strong : atomic:: AtomicUsize ,
305
302
306
303
// the value usize::MAX acts as a sentinel for temporarily "locking" the
307
304
// ability to upgrade weak pointers or downgrade strong ones; this is used
308
305
// to avoid races in `make_mut` and `get_mut`.
309
306
weak : atomic:: AtomicUsize ,
307
+ }
310
308
309
+ impl ArcInnerMetadata {
310
+ // Start the weak pointer count as 1 which is the weak pointer that's
311
+ // held by all the strong pointers (kinda), see std/rc.rs for more info
312
+ #[ inline]
313
+ fn new_strong ( ) -> Self {
314
+ Self { strong : atomic:: AtomicUsize :: new ( 1 ) , weak : atomic:: AtomicUsize :: new ( 1 ) }
315
+ }
316
+
317
+ #[ inline]
318
+ fn new_weak ( ) -> Self {
319
+ Self { strong : atomic:: AtomicUsize :: new ( 0 ) , weak : atomic:: AtomicUsize :: new ( 1 ) }
320
+ }
321
+ }
322
+
323
+ // This is repr(C) to support StructAlloc
324
+ #[ repr( C ) ]
325
+ struct ArcInner < T : ?Sized > {
326
+ meta : ArcInnerMetadata ,
311
327
data : T ,
312
328
}
313
329
@@ -327,13 +343,7 @@ impl<T> Arc<T> {
327
343
#[ inline]
328
344
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
329
345
pub fn new ( data : T ) -> Arc < T > {
330
- // Start the weak pointer count as 1 which is the weak pointer that's
331
- // held by all the strong pointers (kinda), see std/rc.rs for more info
332
- let x: Box < _ > = box ArcInner {
333
- strong : atomic:: AtomicUsize :: new ( 1 ) ,
334
- weak : atomic:: AtomicUsize :: new ( 1 ) ,
335
- data,
336
- } ;
346
+ let x: Box < _ > = box ArcInner { meta : ArcInnerMetadata :: new_strong ( ) , data } ;
337
347
Self :: from_inner ( Box :: leak ( x) . into ( ) )
338
348
}
339
349
@@ -363,8 +373,7 @@ impl<T> Arc<T> {
363
373
// Construct the inner in the "uninitialized" state with a single
364
374
// weak reference.
365
375
let uninit_ptr: NonNull < _ > = Box :: leak ( box ArcInner {
366
- strong : atomic:: AtomicUsize :: new ( 0 ) ,
367
- weak : atomic:: AtomicUsize :: new ( 1 ) ,
376
+ meta : ArcInnerMetadata :: new_weak ( ) ,
368
377
data : mem:: MaybeUninit :: < T > :: uninit ( ) ,
369
378
} )
370
379
. into ( ) ;
@@ -398,7 +407,7 @@ impl<T> Arc<T> {
398
407
//
399
408
// These side effects do not impact us in any way, and no other side effects are
400
409
// possible with safe code alone.
401
- let prev_value = ( * inner) . strong . fetch_add ( 1 , Release ) ;
410
+ let prev_value = ( * inner) . meta . strong . fetch_add ( 1 , Release ) ;
402
411
debug_assert_eq ! ( prev_value, 0 , "No prior strong references should exist" ) ;
403
412
}
404
413
@@ -494,13 +503,7 @@ impl<T> Arc<T> {
494
503
#[ unstable( feature = "allocator_api" , issue = "32838" ) ]
495
504
#[ inline]
496
505
pub fn try_new ( data : T ) -> Result < Arc < T > , AllocError > {
497
- // Start the weak pointer count as 1 which is the weak pointer that's
498
- // held by all the strong pointers (kinda), see std/rc.rs for more info
499
- let x: Box < _ > = Box :: try_new ( ArcInner {
500
- strong : atomic:: AtomicUsize :: new ( 1 ) ,
501
- weak : atomic:: AtomicUsize :: new ( 1 ) ,
502
- data,
503
- } ) ?;
506
+ let x: Box < _ > = Box :: try_new ( ArcInner { meta : ArcInnerMetadata :: new_strong ( ) , data } ) ?;
504
507
Ok ( Self :: from_inner ( Box :: leak ( x) . into ( ) ) )
505
508
}
506
509
@@ -593,11 +596,11 @@ impl<T> Arc<T> {
593
596
#[ inline]
594
597
#[ stable( feature = "arc_unique" , since = "1.4.0" ) ]
595
598
pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
596
- if this. inner ( ) . strong . compare_exchange ( 1 , 0 , Relaxed , Relaxed ) . is_err ( ) {
599
+ if this. inner ( ) . meta . strong . compare_exchange ( 1 , 0 , Relaxed , Relaxed ) . is_err ( ) {
597
600
return Err ( this) ;
598
601
}
599
602
600
- acquire ! ( this. inner( ) . strong) ;
603
+ acquire ! ( this. inner( ) . meta . strong) ;
601
604
602
605
unsafe {
603
606
let elem = ptr:: read ( & this. ptr . as_ref ( ) . data ) ;
@@ -867,13 +870,13 @@ impl<T: ?Sized> Arc<T> {
867
870
pub fn downgrade ( this : & Self ) -> Weak < T > {
868
871
// This Relaxed is OK because we're checking the value in the CAS
869
872
// below.
870
- let mut cur = this. inner ( ) . weak . load ( Relaxed ) ;
873
+ let mut cur = this. inner ( ) . meta . weak . load ( Relaxed ) ;
871
874
872
875
loop {
873
876
// check if the weak counter is currently "locked"; if so, spin.
874
877
if cur == usize:: MAX {
875
878
hint:: spin_loop ( ) ;
876
- cur = this. inner ( ) . weak . load ( Relaxed ) ;
879
+ cur = this. inner ( ) . meta . weak . load ( Relaxed ) ;
877
880
continue ;
878
881
}
879
882
@@ -884,7 +887,7 @@ impl<T: ?Sized> Arc<T> {
884
887
// Unlike with Clone(), we need this to be an Acquire read to
885
888
// synchronize with the write coming from `is_unique`, so that the
886
889
// events prior to that write happen before this read.
887
- match this. inner ( ) . weak . compare_exchange_weak ( cur, cur + 1 , Acquire , Relaxed ) {
890
+ match this. inner ( ) . meta . weak . compare_exchange_weak ( cur, cur + 1 , Acquire , Relaxed ) {
888
891
Ok ( _) => {
889
892
// Make sure we do not create a dangling Weak
890
893
debug_assert ! ( !is_dangling( this. ptr. as_ptr( ) ) ) ;
@@ -918,7 +921,7 @@ impl<T: ?Sized> Arc<T> {
918
921
#[ inline]
919
922
#[ stable( feature = "arc_counts" , since = "1.15.0" ) ]
920
923
pub fn weak_count ( this : & Self ) -> usize {
921
- let cnt = this. inner ( ) . weak . load ( SeqCst ) ;
924
+ let cnt = this. inner ( ) . meta . weak . load ( SeqCst ) ;
922
925
// If the weak count is currently locked, the value of the
923
926
// count was 0 just before taking the lock.
924
927
if cnt == usize:: MAX { 0 } else { cnt - 1 }
@@ -947,7 +950,7 @@ impl<T: ?Sized> Arc<T> {
947
950
#[ inline]
948
951
#[ stable( feature = "arc_counts" , since = "1.15.0" ) ]
949
952
pub fn strong_count ( this : & Self ) -> usize {
950
- this. inner ( ) . strong . load ( SeqCst )
953
+ this. inner ( ) . meta . strong . load ( SeqCst )
951
954
}
952
955
953
956
/// Increments the strong reference count on the `Arc<T>` associated with the
@@ -1112,8 +1115,8 @@ impl<T: ?Sized> Arc<T> {
1112
1115
debug_assert_eq ! ( unsafe { Layout :: for_value( & * inner) } , layout) ;
1113
1116
1114
1117
unsafe {
1115
- ptr:: write ( & mut ( * inner) . strong , atomic:: AtomicUsize :: new ( 1 ) ) ;
1116
- ptr:: write ( & mut ( * inner) . weak , atomic:: AtomicUsize :: new ( 1 ) ) ;
1118
+ ptr:: write ( & mut ( * inner) . meta . strong , atomic:: AtomicUsize :: new ( 1 ) ) ;
1119
+ ptr:: write ( & mut ( * inner) . meta . weak , atomic:: AtomicUsize :: new ( 1 ) ) ;
1117
1120
}
1118
1121
1119
1122
Ok ( inner)
@@ -1276,7 +1279,7 @@ impl<T: ?Sized> Clone for Arc<T> {
1276
1279
// another must already provide any required synchronization.
1277
1280
//
1278
1281
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
1279
- let old_size = self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
1282
+ let old_size = self . inner ( ) . meta . strong . fetch_add ( 1 , Relaxed ) ;
1280
1283
1281
1284
// However we need to guard against massive refcounts in case someone
1282
1285
// is `mem::forget`ing Arcs. If we don't do this the count can overflow
@@ -1352,7 +1355,7 @@ impl<T: Clone> Arc<T> {
1352
1355
// before release writes (i.e., decrements) to `strong`. Since we hold a
1353
1356
// weak count, there's no chance the ArcInner itself could be
1354
1357
// deallocated.
1355
- if this. inner ( ) . strong . compare_exchange ( 1 , 0 , Acquire , Relaxed ) . is_err ( ) {
1358
+ if this. inner ( ) . meta . strong . compare_exchange ( 1 , 0 , Acquire , Relaxed ) . is_err ( ) {
1356
1359
// Another strong pointer exists, so we must clone.
1357
1360
// Pre-allocate memory to allow writing the cloned value directly.
1358
1361
let mut arc = Self :: new_uninit ( ) ;
@@ -1361,7 +1364,7 @@ impl<T: Clone> Arc<T> {
1361
1364
( * * this) . write_clone_into_raw ( data. as_mut_ptr ( ) ) ;
1362
1365
* this = arc. assume_init ( ) ;
1363
1366
}
1364
- } else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
1367
+ } else if this. inner ( ) . meta . weak . load ( Relaxed ) != 1 {
1365
1368
// Relaxed suffices in the above because this is fundamentally an
1366
1369
// optimization: we are always racing with weak pointers being
1367
1370
// dropped. Worst case, we end up allocated a new Arc unnecessarily.
@@ -1388,7 +1391,7 @@ impl<T: Clone> Arc<T> {
1388
1391
} else {
1389
1392
// We were the sole reference of either kind; bump back up the
1390
1393
// strong ref count.
1391
- this. inner ( ) . strong . store ( 1 , Release ) ;
1394
+ this. inner ( ) . meta . strong . store ( 1 , Release ) ;
1392
1395
}
1393
1396
1394
1397
// As with `get_mut()`, the unsafety is ok because our reference was
@@ -1484,16 +1487,16 @@ impl<T: ?Sized> Arc<T> {
1484
1487
// writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
1485
1488
// of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
1486
1489
// weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
1487
- if self . inner ( ) . weak . compare_exchange ( 1 , usize:: MAX , Acquire , Relaxed ) . is_ok ( ) {
1490
+ if self . inner ( ) . meta . weak . compare_exchange ( 1 , usize:: MAX , Acquire , Relaxed ) . is_ok ( ) {
1488
1491
// This needs to be an `Acquire` to synchronize with the decrement of the `strong`
1489
1492
// counter in `drop` -- the only access that happens when any but the last reference
1490
1493
// is being dropped.
1491
- let unique = self . inner ( ) . strong . load ( Acquire ) == 1 ;
1494
+ let unique = self . inner ( ) . meta . strong . load ( Acquire ) == 1 ;
1492
1495
1493
1496
// The release write here synchronizes with a read in `downgrade`,
1494
1497
// effectively preventing the above read of `strong` from happening
1495
1498
// after the write.
1496
- self . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
1499
+ self . inner ( ) . meta . weak . store ( 1 , Release ) ; // release the lock
1497
1500
unique
1498
1501
} else {
1499
1502
false
@@ -1533,7 +1536,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
1533
1536
// Because `fetch_sub` is already atomic, we do not need to synchronize
1534
1537
// with other threads unless we are going to delete the object. This
1535
1538
// same logic applies to the below `fetch_sub` to the `weak` count.
1536
- if self . inner ( ) . strong . fetch_sub ( 1 , Release ) != 1 {
1539
+ if self . inner ( ) . meta . strong . fetch_sub ( 1 , Release ) != 1 {
1537
1540
return ;
1538
1541
}
1539
1542
@@ -1565,7 +1568,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
1565
1568
//
1566
1569
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
1567
1570
// [2]: (https://github.com/rust-lang/rust/pull/41714)
1568
- acquire ! ( self . inner( ) . strong) ;
1571
+ acquire ! ( self . inner( ) . meta . strong) ;
1569
1572
1570
1573
unsafe {
1571
1574
self . drop_slow ( ) ;
@@ -1883,7 +1886,7 @@ impl<T: ?Sized> Weak<T> {
1883
1886
// is dropped, the data field will be dropped in-place).
1884
1887
Some ( unsafe {
1885
1888
let ptr = self . ptr . as_ptr ( ) ;
1886
- WeakInner { strong : & ( * ptr) . strong , weak : & ( * ptr) . weak }
1889
+ WeakInner { strong : & ( * ptr) . meta . strong , weak : & ( * ptr) . meta . weak }
1887
1890
} )
1888
1891
}
1889
1892
}
@@ -2455,24 +2458,21 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {
2455
2458
#[ stable( feature = "pin" , since = "1.33.0" ) ]
2456
2459
impl < T : ?Sized > Unpin for Arc < T > { }
2457
2460
2461
+ type ArcStructAlloc = StructAlloc < ArcInnerMetadata > ;
2462
+
2458
2463
/// Get the offset within an `ArcInner` for the payload behind a pointer.
2459
2464
///
2460
2465
/// # Safety
2461
2466
///
2462
2467
/// The pointer must point to (and have valid metadata for) a previously
2463
2468
/// valid instance of T, but the T is allowed to be dropped.
2464
- unsafe fn data_offset < T : ?Sized > ( ptr : * const T ) -> isize {
2465
- // Align the unsized value to the end of the ArcInner.
2466
- // Because RcBox is repr(C), it will always be the last field in memory.
2467
- // SAFETY: since the only unsized types possible are slices, trait objects,
2468
- // and extern types, the input safety requirement is currently enough to
2469
- // satisfy the requirements of align_of_val_raw; this is an implementation
2470
- // detail of the language that may not be relied upon outside of std.
2471
- unsafe { data_offset_align ( align_of_val_raw ( ptr) ) }
2472
- }
2473
-
2474
- #[ inline]
2475
- fn data_offset_align ( align : usize ) -> isize {
2476
- let layout = Layout :: new :: < ArcInner < ( ) > > ( ) ;
2477
- ( layout. size ( ) + layout. padding_needed_for ( align) ) as isize
2469
+ unsafe fn data_offset < T : ?Sized > ( data_ptr : * const T ) -> isize {
2470
+ unsafe {
2471
+ // SAFETY: since the only unsized types possible are slices, trait objects,
2472
+ // and extern types, the input safety requirement is currently enough to
2473
+ // satisfy the requirements of for_value_raw; this is an implementation
2474
+ // detail of the language that may not be relied upon outside of std.
2475
+ let data_layout = Layout :: for_value_raw ( data_ptr) ;
2476
+ ArcStructAlloc :: offset_of_data ( data_layout) as isize
2477
+ }
2478
2478
}
0 commit comments