@@ -693,7 +693,7 @@ impl<T> Arc<T> {
693
693
694
694
/// Attempts to map the value in an `Arc`, reusing the allocation if possible.
695
695
///
696
- /// `f` is called on a reference to the value in the box , and if the operation succeeds, the
696
+ /// `f` is called on a reference to the value in the `Arc` , and if the operation succeeds, the
697
697
/// result is returned, also in an `Arc`.
698
698
///
699
699
/// Note: this is an associated function, which means that you have
@@ -4505,6 +4505,127 @@ impl<T> UniqueArc<T, Global> {
4505
4505
pub fn new ( value : T ) -> Self {
4506
4506
Self :: new_in ( value, Global )
4507
4507
}
4508
+
4509
+ /// Maps the value in a `UniqueArc`, reusing the allocation if possible.
4510
+ ///
4511
+ /// `f` is called on a reference to the value in the `UniqueArc`, and the result is returned,
4512
+ /// also in a `UniqueArc`.
4513
+ ///
4514
+ /// Note: this is an associated function, which means that you have
4515
+ /// to call it as `UniqueArc::map(&u, f)` instead of `u.map(f)`. This
4516
+ /// is so that there is no conflict with a method on the inner type.
4517
+ ///
4518
+ /// # Examples
4519
+ ///
4520
+ /// ```
4521
+ /// #![feature(smart_pointer_try_map)]
4522
+ /// #![feature(unique_rc_arc)]
4523
+ ///
4524
+ /// use std::sync::UniqueArc;
4525
+ ///
4526
+ /// let r = UniqueArc::new(7);
4527
+ /// let new = UniqueArc::map(r, |i| i + 7);
4528
+ /// assert_eq!(*new, 14);
4529
+ /// ```
4530
+ #[ cfg( not( no_global_oom_handling) ) ]
4531
+ #[ unstable( feature = "smart_pointer_try_map" , issue = "144419" ) ]
4532
+ pub fn map < U > ( this : Self , f : impl FnOnce ( T ) -> U ) -> UniqueArc < U > {
4533
+ if size_of :: < T > ( ) == size_of :: < U > ( )
4534
+ && align_of :: < T > ( ) == align_of :: < U > ( )
4535
+ && UniqueArc :: weak_count ( & this) == 0
4536
+ {
4537
+ unsafe {
4538
+ let ptr = UniqueArc :: into_raw ( this) ;
4539
+ let value = ptr. read ( ) ;
4540
+ let mut allocation = UniqueArc :: from_raw ( ptr. cast :: < mem:: MaybeUninit < U > > ( ) ) ;
4541
+
4542
+ allocation. write ( f ( value) ) ;
4543
+ allocation. assume_init ( )
4544
+ }
4545
+ } else {
4546
+ UniqueArc :: new ( f ( UniqueArc :: unwrap ( this) ) )
4547
+ }
4548
+ }
4549
+
4550
+ /// Attempts to map the value in a `UniqueArc`, reusing the allocation if possible.
4551
+ ///
4552
+ /// `f` is called on a reference to the value in the `UniqueArc`, and if the operation succeeds,
4553
+ /// the result is returned, also in a `UniqueArc`.
4554
+ ///
4555
+ /// Note: this is an associated function, which means that you have
4556
+ /// to call it as `UniqueArc::try_map(&u, f)` instead of `u.try_map(f)`. This
4557
+ /// is so that there is no conflict with a method on the inner type.
4558
+ ///
4559
+ /// # Examples
4560
+ ///
4561
+ /// ```
4562
+ /// #![feature(smart_pointer_try_map)]
4563
+ /// #![feature(unique_rc_arc)]
4564
+ ///
4565
+ /// use std::sync::UniqueArc;
4566
+ ///
4567
+ /// let b = UniqueArc::new(7);
4568
+ /// let new = UniqueArc::try_map(b, u32::try_from).unwrap();
4569
+ /// assert_eq!(*new, 7);
4570
+ /// ```
4571
+ #[ cfg( not( no_global_oom_handling) ) ]
4572
+ #[ unstable( feature = "smart_pointer_try_map" , issue = "144419" ) ]
4573
+ pub fn try_map < R > (
4574
+ this : Self ,
4575
+ f : impl FnOnce ( T ) -> R ,
4576
+ ) -> <R :: Residual as Residual < UniqueArc < R :: Output > > >:: TryType
4577
+ where
4578
+ R : Try ,
4579
+ R :: Residual : Residual < UniqueArc < R :: Output > > ,
4580
+ {
4581
+ if size_of :: < T > ( ) == size_of :: < R :: Output > ( )
4582
+ && align_of :: < T > ( ) == align_of :: < R :: Output > ( )
4583
+ && UniqueArc :: weak_count ( & this) == 0
4584
+ {
4585
+ unsafe {
4586
+ let ptr = UniqueArc :: into_raw ( this) ;
4587
+ let value = ptr. read ( ) ;
4588
+ let mut allocation = UniqueArc :: from_raw ( ptr. cast :: < mem:: MaybeUninit < R :: Output > > ( ) ) ;
4589
+
4590
+ allocation. write ( f ( value) ?) ;
4591
+ try { allocation. assume_init ( ) }
4592
+ }
4593
+ } else {
4594
+ try { UniqueArc :: new ( f ( UniqueArc :: unwrap ( this) ) ?) }
4595
+ }
4596
+ }
4597
+
4598
+ fn unwrap ( this : Self ) -> T {
4599
+ let this = ManuallyDrop :: new ( this) ;
4600
+ let val: T = unsafe { ptr:: read ( & * * this) } ;
4601
+
4602
+ // We know there's only one strong reference, so the ordering doesn't matter.
4603
+ this. inner ( ) . strong . fetch_sub ( 1 , Relaxed ) ;
4604
+ let _weak = Weak { ptr : this. ptr , alloc : Global } ;
4605
+
4606
+ val
4607
+ }
4608
+ }
4609
+
4610
+ impl < T : ?Sized > UniqueArc < T > {
4611
+ unsafe fn from_raw ( ptr : * const T ) -> Self {
4612
+ let offset = unsafe { data_offset ( ptr) } ;
4613
+
4614
+ // Reverse the offset to find the original ArcInner.
4615
+ let rc_ptr = unsafe { ptr. byte_sub ( offset) as * mut ArcInner < T > } ;
4616
+
4617
+ Self {
4618
+ ptr : unsafe { NonNull :: new_unchecked ( rc_ptr) } ,
4619
+ _marker : PhantomData ,
4620
+ _marker2 : PhantomData ,
4621
+ alloc : Global ,
4622
+ }
4623
+ }
4624
+
4625
+ fn into_raw ( this : Self ) -> * const T {
4626
+ let this = ManuallyDrop :: new ( this) ;
4627
+ Self :: as_ptr ( & * this)
4628
+ }
4508
4629
}
4509
4630
4510
4631
impl < T , A : Allocator > UniqueArc < T , A > {
@@ -4558,6 +4679,35 @@ impl<T: ?Sized, A: Allocator> UniqueArc<T, A> {
4558
4679
Arc :: from_inner_in ( this. ptr , alloc)
4559
4680
}
4560
4681
}
4682
+
4683
+ fn weak_count ( this : & Self ) -> usize {
4684
+ this. inner ( ) . weak . load ( Acquire ) - 1
4685
+ }
4686
+
4687
+ fn inner ( & self ) -> & ArcInner < T > {
4688
+ // SAFETY: while this UniqueArc is alive we're guaranteed that the inner pointer is valid.
4689
+ unsafe { self . ptr . as_ref ( ) }
4690
+ }
4691
+
4692
+ fn as_ptr ( this : & Self ) -> * const T {
4693
+ let ptr: * mut ArcInner < T > = NonNull :: as_ptr ( this. ptr ) ;
4694
+
4695
+ // SAFETY: This cannot go through Deref::deref or UniqueArc::inner because
4696
+ // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4697
+ // write through the pointer after the Rc is recovered through `from_raw`.
4698
+ unsafe { & raw mut ( * ptr) . data }
4699
+ }
4700
+
4701
+ #[ inline]
4702
+ fn into_inner_with_allocator ( this : Self ) -> ( NonNull < ArcInner < T > > , A ) {
4703
+ let this = mem:: ManuallyDrop :: new ( this) ;
4704
+ ( this. ptr , unsafe { ptr:: read ( & this. alloc ) } )
4705
+ }
4706
+
4707
+ #[ inline]
4708
+ unsafe fn from_inner_in ( ptr : NonNull < ArcInner < T > > , alloc : A ) -> Self {
4709
+ Self { ptr, _marker : PhantomData , _marker2 : PhantomData , alloc }
4710
+ }
4561
4711
}
4562
4712
4563
4713
impl < T : ?Sized , A : Allocator + Clone > UniqueArc < T , A > {
@@ -4588,6 +4738,13 @@ impl<T: ?Sized, A: Allocator + Clone> UniqueArc<T, A> {
4588
4738
}
4589
4739
}
4590
4740
4741
+ impl < T , A : Allocator > UniqueArc < mem:: MaybeUninit < T > , A > {
4742
+ unsafe fn assume_init ( self ) -> UniqueArc < T , A > {
4743
+ let ( ptr, alloc) = UniqueArc :: into_inner_with_allocator ( self ) ;
4744
+ unsafe { UniqueArc :: from_inner_in ( ptr. cast ( ) , alloc) }
4745
+ }
4746
+ }
4747
+
4591
4748
#[ unstable( feature = "unique_rc_arc" , issue = "112566" ) ]
4592
4749
impl < T : ?Sized , A : Allocator > Deref for UniqueArc < T , A > {
4593
4750
type Target = T ;
0 commit comments