Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 5f42947

Browse files
author
Julian Orth
committedFeb 12, 2021
Restructure Rc and Arc to support StructAlloc
1 parent e080a05 commit 5f42947

File tree

4 files changed

+108
-104
lines changed

4 files changed

+108
-104
lines changed
 

‎library/alloc/src/rc.rs

Lines changed: 46 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -257,12 +257,13 @@ use core::hash::{Hash, Hasher};
257257
use core::intrinsics::abort;
258258
use core::iter;
259259
use core::marker::{self, PhantomData, Unpin, Unsize};
260-
use core::mem::{self, align_of_val_raw, forget, size_of_val};
260+
use core::mem::{self, forget, size_of_val};
261261
use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
262262
use core::pin::Pin;
263263
use core::ptr::{self, NonNull};
264264
use core::slice::from_raw_parts_mut;
265265

266+
use crate::alloc::struct_alloc::StructAlloc;
266267
use crate::alloc::{
267268
box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw,
268269
};
@@ -273,13 +274,31 @@ use crate::vec::Vec;
273274
#[cfg(test)]
274275
mod tests;
275276

276-
// This is repr(C) to future-proof against possible field-reordering, which
277-
// would interfere with otherwise safe [into|from]_raw() of transmutable
278-
// inner types.
279-
#[repr(C)]
280-
struct RcBox<T: ?Sized> {
277+
struct RcBoxMetadata {
281278
strong: Cell<usize>,
282279
weak: Cell<usize>,
280+
}
281+
282+
impl RcBoxMetadata {
283+
// There is an implicit weak pointer owned by all the strong
284+
// pointers, which ensures that the weak destructor never frees
285+
// the allocation while the strong destructor is running, even
286+
// if the weak pointer is stored inside the strong one.
287+
#[inline]
288+
fn new_strong() -> Self {
289+
Self { strong: Cell::new(1), weak: Cell::new(1) }
290+
}
291+
292+
#[inline]
293+
fn new_weak() -> Self {
294+
Self { strong: Cell::new(0), weak: Cell::new(1) }
295+
}
296+
}
297+
298+
// This is repr(C) to support StructAlloc
299+
#[repr(C)]
300+
struct RcBox<T: ?Sized> {
301+
meta: RcBoxMetadata,
283302
value: T,
284303
}
285304

@@ -340,13 +359,7 @@ impl<T> Rc<T> {
340359
/// ```
341360
#[stable(feature = "rust1", since = "1.0.0")]
342361
pub fn new(value: T) -> Rc<T> {
343-
// There is an implicit weak pointer owned by all the strong
344-
// pointers, which ensures that the weak destructor never frees
345-
// the allocation while the strong destructor is running, even
346-
// if the weak pointer is stored inside the strong one.
347-
Self::from_inner(
348-
Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
349-
)
362+
Self::from_inner(Box::leak(box RcBox { meta: RcBoxMetadata::new_strong(), value }).into())
350363
}
351364

352365
/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
@@ -378,8 +391,7 @@ impl<T> Rc<T> {
378391
// Construct the inner in the "uninitialized" state with a single
379392
// weak reference.
380393
let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
381-
strong: Cell::new(0),
382-
weak: Cell::new(1),
394+
meta: RcBoxMetadata::new_weak(),
383395
value: mem::MaybeUninit::<T>::uninit(),
384396
})
385397
.into();
@@ -400,9 +412,9 @@ impl<T> Rc<T> {
400412
let inner = init_ptr.as_ptr();
401413
ptr::write(ptr::addr_of_mut!((*inner).value), data);
402414

403-
let prev_value = (*inner).strong.get();
415+
let prev_value = (*inner).meta.strong.get();
404416
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
405-
(*inner).strong.set(1);
417+
(*inner).meta.strong.set(1);
406418
}
407419

408420
let strong = Rc::from_inner(init_ptr);
@@ -489,13 +501,8 @@ impl<T> Rc<T> {
489501
/// ```
490502
#[unstable(feature = "allocator_api", issue = "32838")]
491503
pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
492-
// There is an implicit weak pointer owned by all the strong
493-
// pointers, which ensures that the weak destructor never frees
494-
// the allocation while the strong destructor is running, even
495-
// if the weak pointer is stored inside the strong one.
496504
Ok(Self::from_inner(
497-
Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
498-
.into(),
505+
Box::leak(Box::try_new(RcBox { meta: RcBoxMetadata::new_strong(), value })?).into(),
499506
))
500507
}
501508

@@ -1170,8 +1177,8 @@ impl<T: ?Sized> Rc<T> {
11701177
unsafe {
11711178
debug_assert_eq!(Layout::for_value(&*inner), layout);
11721179

1173-
ptr::write(&mut (*inner).strong, Cell::new(1));
1174-
ptr::write(&mut (*inner).weak, Cell::new(1));
1180+
ptr::write(&mut (*inner).meta.strong, Cell::new(1));
1181+
ptr::write(&mut (*inner).meta.weak, Cell::new(1));
11751182
}
11761183

11771184
Ok(inner)
@@ -2087,7 +2094,7 @@ impl<T: ?Sized> Weak<T> {
20872094
// is dropped, the data field will be dropped in-place).
20882095
Some(unsafe {
20892096
let ptr = self.ptr.as_ptr();
2090-
WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
2097+
WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak }
20912098
})
20922099
}
20932100
}
@@ -2296,12 +2303,12 @@ trait RcInnerPtr {
22962303
impl<T: ?Sized> RcInnerPtr for RcBox<T> {
22972304
#[inline(always)]
22982305
fn weak_ref(&self) -> &Cell<usize> {
2299-
&self.weak
2306+
&self.meta.weak
23002307
}
23012308

23022309
#[inline(always)]
23032310
fn strong_ref(&self) -> &Cell<usize> {
2304-
&self.strong
2311+
&self.meta.strong
23052312
}
23062313
}
23072314

@@ -2334,24 +2341,21 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {
23342341
#[stable(feature = "pin", since = "1.33.0")]
23352342
impl<T: ?Sized> Unpin for Rc<T> {}
23362343

2344+
type RcStructAlloc = StructAlloc<RcBoxMetadata>;
2345+
23372346
/// Get the offset within an `RcBox` for the payload behind a pointer.
23382347
///
23392348
/// # Safety
23402349
///
23412350
/// The pointer must point to (and have valid metadata for) a previously
23422351
/// valid instance of T, but the T is allowed to be dropped.
2343-
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
2344-
// Align the unsized value to the end of the RcBox.
2345-
// Because RcBox is repr(C), it will always be the last field in memory.
2346-
// SAFETY: since the only unsized types possible are slices, trait objects,
2347-
// and extern types, the input safety requirement is currently enough to
2348-
// satisfy the requirements of align_of_val_raw; this is an implementation
2349-
// detail of the language that may not be relied upon outside of std.
2350-
unsafe { data_offset_align(align_of_val_raw(ptr)) }
2351-
}
2352-
2353-
#[inline]
2354-
fn data_offset_align(align: usize) -> isize {
2355-
let layout = Layout::new::<RcBox<()>>();
2356-
(layout.size() + layout.padding_needed_for(align)) as isize
2352+
unsafe fn data_offset<T: ?Sized>(data_ptr: *const T) -> isize {
2353+
unsafe {
2354+
// SAFETY: since the only unsized types possible are slices, trait objects,
2355+
// and extern types, the input safety requirement is currently enough to
2356+
// satisfy the requirements of for_value_raw; this is an implementation
2357+
// detail of the language that may not be relied upon outside of std.
2358+
let data_layout = Layout::for_value_raw(data_ptr);
2359+
RcStructAlloc::offset_of_data(data_layout) as isize
2360+
}
23572361
}

‎library/alloc/src/sync.rs

Lines changed: 56 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -14,14 +14,15 @@ use core::hint;
1414
use core::intrinsics::abort;
1515
use core::iter;
1616
use core::marker::{PhantomData, Unpin, Unsize};
17-
use core::mem::{self, align_of_val_raw, size_of_val};
17+
use core::mem::{self, size_of_val};
1818
use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
1919
use core::pin::Pin;
2020
use core::ptr::{self, NonNull};
2121
use core::slice::from_raw_parts_mut;
2222
use core::sync::atomic;
2323
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
2424

25+
use crate::alloc::struct_alloc::StructAlloc;
2526
use crate::alloc::{
2627
box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw,
2728
};
@@ -296,18 +297,33 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
296297
}
297298
}
298299

299-
// This is repr(C) to future-proof against possible field-reordering, which
300-
// would interfere with otherwise safe [into|from]_raw() of transmutable
301-
// inner types.
302-
#[repr(C)]
303-
struct ArcInner<T: ?Sized> {
300+
struct ArcInnerMetadata {
304301
strong: atomic::AtomicUsize,
305302

306303
// the value usize::MAX acts as a sentinel for temporarily "locking" the
307304
// ability to upgrade weak pointers or downgrade strong ones; this is used
308305
// to avoid races in `make_mut` and `get_mut`.
309306
weak: atomic::AtomicUsize,
307+
}
310308

309+
impl ArcInnerMetadata {
310+
// Start the weak pointer count as 1 which is the weak pointer that's
311+
// held by all the strong pointers (kinda), see std/rc.rs for more info
312+
#[inline]
313+
fn new_strong() -> Self {
314+
Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) }
315+
}
316+
317+
#[inline]
318+
fn new_weak() -> Self {
319+
Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) }
320+
}
321+
}
322+
323+
// This is repr(C) to support StructAlloc
324+
#[repr(C)]
325+
struct ArcInner<T: ?Sized> {
326+
meta: ArcInnerMetadata,
311327
data: T,
312328
}
313329

@@ -327,13 +343,7 @@ impl<T> Arc<T> {
327343
#[inline]
328344
#[stable(feature = "rust1", since = "1.0.0")]
329345
pub fn new(data: T) -> Arc<T> {
330-
// Start the weak pointer count as 1 which is the weak pointer that's
331-
// held by all the strong pointers (kinda), see std/rc.rs for more info
332-
let x: Box<_> = box ArcInner {
333-
strong: atomic::AtomicUsize::new(1),
334-
weak: atomic::AtomicUsize::new(1),
335-
data,
336-
};
346+
let x: Box<_> = box ArcInner { meta: ArcInnerMetadata::new_strong(), data };
337347
Self::from_inner(Box::leak(x).into())
338348
}
339349

@@ -363,8 +373,7 @@ impl<T> Arc<T> {
363373
// Construct the inner in the "uninitialized" state with a single
364374
// weak reference.
365375
let uninit_ptr: NonNull<_> = Box::leak(box ArcInner {
366-
strong: atomic::AtomicUsize::new(0),
367-
weak: atomic::AtomicUsize::new(1),
376+
meta: ArcInnerMetadata::new_weak(),
368377
data: mem::MaybeUninit::<T>::uninit(),
369378
})
370379
.into();
@@ -398,7 +407,7 @@ impl<T> Arc<T> {
398407
//
399408
// These side effects do not impact us in any way, and no other side effects are
400409
// possible with safe code alone.
401-
let prev_value = (*inner).strong.fetch_add(1, Release);
410+
let prev_value = (*inner).meta.strong.fetch_add(1, Release);
402411
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
403412
}
404413

@@ -494,13 +503,7 @@ impl<T> Arc<T> {
494503
#[unstable(feature = "allocator_api", issue = "32838")]
495504
#[inline]
496505
pub fn try_new(data: T) -> Result<Arc<T>, AllocError> {
497-
// Start the weak pointer count as 1 which is the weak pointer that's
498-
// held by all the strong pointers (kinda), see std/rc.rs for more info
499-
let x: Box<_> = Box::try_new(ArcInner {
500-
strong: atomic::AtomicUsize::new(1),
501-
weak: atomic::AtomicUsize::new(1),
502-
data,
503-
})?;
506+
let x: Box<_> = Box::try_new(ArcInner { meta: ArcInnerMetadata::new_strong(), data })?;
504507
Ok(Self::from_inner(Box::leak(x).into()))
505508
}
506509

@@ -593,11 +596,11 @@ impl<T> Arc<T> {
593596
#[inline]
594597
#[stable(feature = "arc_unique", since = "1.4.0")]
595598
pub fn try_unwrap(this: Self) -> Result<T, Self> {
596-
if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
599+
if this.inner().meta.strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
597600
return Err(this);
598601
}
599602

600-
acquire!(this.inner().strong);
603+
acquire!(this.inner().meta.strong);
601604

602605
unsafe {
603606
let elem = ptr::read(&this.ptr.as_ref().data);
@@ -867,13 +870,13 @@ impl<T: ?Sized> Arc<T> {
867870
pub fn downgrade(this: &Self) -> Weak<T> {
868871
// This Relaxed is OK because we're checking the value in the CAS
869872
// below.
870-
let mut cur = this.inner().weak.load(Relaxed);
873+
let mut cur = this.inner().meta.weak.load(Relaxed);
871874

872875
loop {
873876
// check if the weak counter is currently "locked"; if so, spin.
874877
if cur == usize::MAX {
875878
hint::spin_loop();
876-
cur = this.inner().weak.load(Relaxed);
879+
cur = this.inner().meta.weak.load(Relaxed);
877880
continue;
878881
}
879882

@@ -884,7 +887,7 @@ impl<T: ?Sized> Arc<T> {
884887
// Unlike with Clone(), we need this to be an Acquire read to
885888
// synchronize with the write coming from `is_unique`, so that the
886889
// events prior to that write happen before this read.
887-
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
890+
match this.inner().meta.weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
888891
Ok(_) => {
889892
// Make sure we do not create a dangling Weak
890893
debug_assert!(!is_dangling(this.ptr.as_ptr()));
@@ -918,7 +921,7 @@ impl<T: ?Sized> Arc<T> {
918921
#[inline]
919922
#[stable(feature = "arc_counts", since = "1.15.0")]
920923
pub fn weak_count(this: &Self) -> usize {
921-
let cnt = this.inner().weak.load(SeqCst);
924+
let cnt = this.inner().meta.weak.load(SeqCst);
922925
// If the weak count is currently locked, the value of the
923926
// count was 0 just before taking the lock.
924927
if cnt == usize::MAX { 0 } else { cnt - 1 }
@@ -947,7 +950,7 @@ impl<T: ?Sized> Arc<T> {
947950
#[inline]
948951
#[stable(feature = "arc_counts", since = "1.15.0")]
949952
pub fn strong_count(this: &Self) -> usize {
950-
this.inner().strong.load(SeqCst)
953+
this.inner().meta.strong.load(SeqCst)
951954
}
952955

953956
/// Increments the strong reference count on the `Arc<T>` associated with the
@@ -1112,8 +1115,8 @@ impl<T: ?Sized> Arc<T> {
11121115
debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
11131116

11141117
unsafe {
1115-
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
1116-
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
1118+
ptr::write(&mut (*inner).meta.strong, atomic::AtomicUsize::new(1));
1119+
ptr::write(&mut (*inner).meta.weak, atomic::AtomicUsize::new(1));
11171120
}
11181121

11191122
Ok(inner)
@@ -1276,7 +1279,7 @@ impl<T: ?Sized> Clone for Arc<T> {
12761279
// another must already provide any required synchronization.
12771280
//
12781281
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
1279-
let old_size = self.inner().strong.fetch_add(1, Relaxed);
1282+
let old_size = self.inner().meta.strong.fetch_add(1, Relaxed);
12801283

12811284
// However we need to guard against massive refcounts in case someone
12821285
// is `mem::forget`ing Arcs. If we don't do this the count can overflow
@@ -1352,7 +1355,7 @@ impl<T: Clone> Arc<T> {
13521355
// before release writes (i.e., decrements) to `strong`. Since we hold a
13531356
// weak count, there's no chance the ArcInner itself could be
13541357
// deallocated.
1355-
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
1358+
if this.inner().meta.strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
13561359
// Another strong pointer exists, so we must clone.
13571360
// Pre-allocate memory to allow writing the cloned value directly.
13581361
let mut arc = Self::new_uninit();
@@ -1361,7 +1364,7 @@ impl<T: Clone> Arc<T> {
13611364
(**this).write_clone_into_raw(data.as_mut_ptr());
13621365
*this = arc.assume_init();
13631366
}
1364-
} else if this.inner().weak.load(Relaxed) != 1 {
1367+
} else if this.inner().meta.weak.load(Relaxed) != 1 {
13651368
// Relaxed suffices in the above because this is fundamentally an
13661369
// optimization: we are always racing with weak pointers being
13671370
// dropped. Worst case, we end up allocated a new Arc unnecessarily.
@@ -1388,7 +1391,7 @@ impl<T: Clone> Arc<T> {
13881391
} else {
13891392
// We were the sole reference of either kind; bump back up the
13901393
// strong ref count.
1391-
this.inner().strong.store(1, Release);
1394+
this.inner().meta.strong.store(1, Release);
13921395
}
13931396

13941397
// As with `get_mut()`, the unsafety is ok because our reference was
@@ -1484,16 +1487,16 @@ impl<T: ?Sized> Arc<T> {
14841487
// writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
14851488
// of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
14861489
// weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
1487-
if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
1490+
if self.inner().meta.weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
14881491
// This needs to be an `Acquire` to synchronize with the decrement of the `strong`
14891492
// counter in `drop` -- the only access that happens when any but the last reference
14901493
// is being dropped.
1491-
let unique = self.inner().strong.load(Acquire) == 1;
1494+
let unique = self.inner().meta.strong.load(Acquire) == 1;
14921495

14931496
// The release write here synchronizes with a read in `downgrade`,
14941497
// effectively preventing the above read of `strong` from happening
14951498
// after the write.
1496-
self.inner().weak.store(1, Release); // release the lock
1499+
self.inner().meta.weak.store(1, Release); // release the lock
14971500
unique
14981501
} else {
14991502
false
@@ -1533,7 +1536,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
15331536
// Because `fetch_sub` is already atomic, we do not need to synchronize
15341537
// with other threads unless we are going to delete the object. This
15351538
// same logic applies to the below `fetch_sub` to the `weak` count.
1536-
if self.inner().strong.fetch_sub(1, Release) != 1 {
1539+
if self.inner().meta.strong.fetch_sub(1, Release) != 1 {
15371540
return;
15381541
}
15391542

@@ -1565,7 +1568,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
15651568
//
15661569
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
15671570
// [2]: (https://github.com/rust-lang/rust/pull/41714)
1568-
acquire!(self.inner().strong);
1571+
acquire!(self.inner().meta.strong);
15691572

15701573
unsafe {
15711574
self.drop_slow();
@@ -1883,7 +1886,7 @@ impl<T: ?Sized> Weak<T> {
18831886
// is dropped, the data field will be dropped in-place).
18841887
Some(unsafe {
18851888
let ptr = self.ptr.as_ptr();
1886-
WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
1889+
WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak }
18871890
})
18881891
}
18891892
}
@@ -2455,24 +2458,21 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {
24552458
#[stable(feature = "pin", since = "1.33.0")]
24562459
impl<T: ?Sized> Unpin for Arc<T> {}
24572460

2461+
type ArcStructAlloc = StructAlloc<ArcInnerMetadata>;
2462+
24582463
/// Get the offset within an `ArcInner` for the payload behind a pointer.
24592464
///
24602465
/// # Safety
24612466
///
24622467
/// The pointer must point to (and have valid metadata for) a previously
24632468
/// valid instance of T, but the T is allowed to be dropped.
2464-
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
2465-
// Align the unsized value to the end of the ArcInner.
2466-
// Because RcBox is repr(C), it will always be the last field in memory.
2467-
// SAFETY: since the only unsized types possible are slices, trait objects,
2468-
// and extern types, the input safety requirement is currently enough to
2469-
// satisfy the requirements of align_of_val_raw; this is an implementation
2470-
// detail of the language that may not be relied upon outside of std.
2471-
unsafe { data_offset_align(align_of_val_raw(ptr)) }
2472-
}
2473-
2474-
#[inline]
2475-
fn data_offset_align(align: usize) -> isize {
2476-
let layout = Layout::new::<ArcInner<()>>();
2477-
(layout.size() + layout.padding_needed_for(align)) as isize
2469+
unsafe fn data_offset<T: ?Sized>(data_ptr: *const T) -> isize {
2470+
unsafe {
2471+
// SAFETY: since the only unsized types possible are slices, trait objects,
2472+
// and extern types, the input safety requirement is currently enough to
2473+
// satisfy the requirements of for_value_raw; this is an implementation
2474+
// detail of the language that may not be relied upon outside of std.
2475+
let data_layout = Layout::for_value_raw(data_ptr);
2476+
ArcStructAlloc::offset_of_data(data_layout) as isize
2477+
}
24782478
}

‎src/etc/gdb_providers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,8 +146,8 @@ def __init__(self, valobj, is_atomic=False):
146146
self.is_atomic = is_atomic
147147
self.ptr = unwrap_unique_or_non_null(valobj["ptr"])
148148
self.value = self.ptr["data" if is_atomic else "value"]
149-
self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"]
150-
self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1
149+
self.strong = self.ptr["meta"]["strong"]["v" if is_atomic else "value"]["value"]
150+
self.weak = self.ptr["meta"]["weak"]["v" if is_atomic else "value"]["value"] - 1
151151

152152
def to_string(self):
153153
if self.is_atomic:

‎src/etc/lldb_providers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -601,10 +601,10 @@ def __init__(self, valobj, dict, is_atomic=False):
601601

602602
self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
603603

604-
self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex(
605-
0).GetChildMemberWithName("value")
606-
self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex(
607-
0).GetChildMemberWithName("value")
604+
self.strong = self.ptr.GetChildMemberWithName("meta")\
605+
.GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value")
606+
self.weak = self.ptr.GetChildMemberWithName("meta")\
607+
.GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value")
608608

609609
self.value_builder = ValueBuilder(valobj)
610610

0 commit comments

Comments
 (0)
Please sign in to comment.