Skip to content

Commit 8a5fb9b

Browse files
committed
Add RawRc type
1 parent 85fca42 commit 8a5fb9b

File tree

3 files changed

+402
-0
lines changed

3 files changed

+402
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ use core::cell::UnsafeCell;
6565
use core::mem;
6666
use core::sync::atomic::Atomic;
6767

68+
mod raw_rc;
6869
mod raw_weak;
6970
mod rc_alloc;
7071
mod rc_layout;

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 369 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,369 @@
1+
use core::alloc::Allocator;
2+
use core::cell::UnsafeCell;
3+
#[cfg(not(no_global_oom_handling))]
4+
use core::clone::CloneToUninit;
5+
use core::marker::PhantomData;
6+
#[cfg(not(no_global_oom_handling))]
7+
use core::mem::{self, DropGuard};
8+
#[cfg(not(no_global_oom_handling))]
9+
use core::ops::DerefMut;
10+
#[cfg(not(no_global_oom_handling))]
11+
use core::ptr;
12+
use core::ptr::NonNull;
13+
14+
#[cfg(not(no_global_oom_handling))]
15+
use crate::raw_rc::MakeMutStrategy;
16+
use crate::raw_rc::RefCounter;
17+
#[cfg(not(no_global_oom_handling))]
18+
use crate::raw_rc::raw_weak;
19+
use crate::raw_rc::raw_weak::RawWeak;
20+
#[cfg(not(no_global_oom_handling))]
21+
use crate::raw_rc::rc_alloc;
22+
#[cfg(not(no_global_oom_handling))]
23+
use crate::raw_rc::rc_layout::RcLayout;
24+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
25+
26+
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`; the user should call
27+
/// `RawRc::drop` manually to destroy this object.
28+
#[repr(transparent)]
29+
pub(crate) struct RawRc<T, A>
30+
where
31+
T: ?Sized,
32+
{
33+
/// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count owned by the
34+
/// `RawRc` object. The weak pointer is always non-dangling.
35+
weak: RawWeak<T, A>,
36+
37+
// Defines the ownership of `T` for drop-check.
38+
_phantom_data: PhantomData<T>,
39+
}
40+
41+
impl<T, A> RawRc<T, A>
42+
where
43+
T: ?Sized,
44+
{
45+
/// # Safety
46+
///
47+
/// - `ptr` points to a value inside a reference-counted allocation.
48+
/// - The allocation can be freed by `A::default()`.
49+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
50+
where
51+
A: Default,
52+
{
53+
unsafe { Self::from_raw_parts(ptr, A::default()) }
54+
}
55+
56+
/// # Safety
57+
///
58+
/// - `ptr` points to a value inside a reference-counted allocation.
59+
/// - The allocation can be freed by `alloc`.
60+
pub(crate) unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
61+
unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) }
62+
}
63+
64+
/// # Safety
65+
///
66+
/// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will
67+
/// take the ownership of exactly one strong reference count.
68+
pub(super) unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
69+
Self { weak, _phantom_data: PhantomData }
70+
}
71+
72+
pub(crate) fn allocator(&self) -> &A {
73+
&self.weak.allocator()
74+
}
75+
76+
pub(crate) fn as_ptr(&self) -> NonNull<T> {
77+
self.weak.as_ptr()
78+
}
79+
80+
pub(crate) unsafe fn cast<U>(self) -> RawRc<U, A> {
81+
unsafe { RawRc::from_weak(self.weak.cast()) }
82+
}
83+
84+
#[inline]
85+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawRc<U, A>
86+
where
87+
U: ?Sized,
88+
F: FnOnce(NonNull<T>) -> NonNull<U>,
89+
{
90+
unsafe { RawRc::from_weak(self.weak.cast_with(f)) }
91+
}
92+
93+
#[inline]
94+
pub(crate) unsafe fn clone<R>(&self) -> Self
95+
where
96+
A: Clone,
97+
R: RefCounter,
98+
{
99+
unsafe {
100+
increment_strong_ref_count::<R>(self.value_ptr());
101+
102+
Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
103+
}
104+
}
105+
106+
pub(crate) unsafe fn decrement_strong_count<R>(ptr: NonNull<T>)
107+
where
108+
A: Allocator + Default,
109+
R: RefCounter,
110+
{
111+
unsafe { Self::decrement_strong_count_in::<R>(ptr, A::default()) };
112+
}
113+
114+
pub(crate) unsafe fn decrement_strong_count_in<R>(ptr: NonNull<T>, alloc: A)
115+
where
116+
A: Allocator,
117+
R: RefCounter,
118+
{
119+
unsafe { RawRc::from_raw_parts(ptr, alloc).drop::<R>() };
120+
}
121+
122+
pub(crate) unsafe fn increment_strong_count<R>(ptr: NonNull<T>)
123+
where
124+
R: RefCounter,
125+
{
126+
unsafe { increment_strong_ref_count::<R>(RcValuePointer::from_value_ptr(ptr.cast())) };
127+
}
128+
129+
pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
130+
where
131+
A: Clone,
132+
R: RefCounter,
133+
{
134+
unsafe fn inner<R>(value_ptr: RcValuePointer)
135+
where
136+
R: RefCounter,
137+
{
138+
unsafe {
139+
R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()).downgrade_increment_weak();
140+
}
141+
}
142+
143+
unsafe {
144+
inner::<R>(self.value_ptr());
145+
146+
RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
147+
}
148+
}
149+
150+
#[inline]
151+
pub(crate) unsafe fn drop<R>(&mut self)
152+
where
153+
A: Allocator,
154+
R: RefCounter,
155+
{
156+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
157+
158+
if is_last_strong_ref {
159+
unsafe { self.weak.assume_init_drop::<R>() }
160+
}
161+
}
162+
163+
pub(crate) unsafe fn get_mut<R>(&mut self) -> Option<&mut T>
164+
where
165+
R: RefCounter,
166+
{
167+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> Option<RcValuePointer>
168+
where
169+
R: RefCounter,
170+
{
171+
unsafe { is_unique::<R>(value_ptr) }.then_some(value_ptr)
172+
}
173+
174+
let (ptr, metadata) = self.weak.as_ptr().to_raw_parts();
175+
176+
unsafe { inner::<R>(RcValuePointer::from_value_ptr(ptr)) }
177+
.map(|ptr| unsafe { NonNull::from_raw_parts(ptr.as_ptr(), metadata).as_mut() })
178+
}
179+
180+
/// Returns a mutable reference to the contained value.
181+
///
182+
/// # Safety
183+
///
184+
/// No other active references to the contained value should exist, and no new references to the
185+
/// contained value will be acquired for the duration of the returned borrow.
186+
pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T {
187+
// SAFETY: The caller guarantees that we can access the contained value exclusively. Note
188+
// that we can't create mutable references that have access to reference counters, because
189+
// the caller only guarantee exclusive access to the contained value, not the reference
190+
// counters.
191+
unsafe { self.weak.as_ptr().as_mut() }
192+
}
193+
194+
pub(crate) fn into_raw(self) -> NonNull<T> {
195+
self.weak.into_raw()
196+
}
197+
198+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
199+
self.weak.into_raw_parts()
200+
}
201+
202+
#[cfg(not(no_global_oom_handling))]
203+
pub(crate) unsafe fn is_unique<R>(&self) -> bool
204+
where
205+
R: RefCounter,
206+
{
207+
unsafe { is_unique::<R>(self.value_ptr()) }
208+
}
209+
210+
#[cfg(not(no_global_oom_handling))]
211+
pub(crate) unsafe fn make_mut<R>(&mut self) -> &mut T
212+
where
213+
T: CloneToUninit,
214+
A: Allocator + Clone,
215+
R: RefCounter,
216+
{
217+
/// Returns a drop guard that sets the pointer in `rc` to `ptr` on drop.
218+
///
219+
/// # Safety
220+
///
221+
/// - `ptr` must point to a valid reference-counted value that can be deallocated with the
222+
/// allocator associated with `rc`.
223+
/// - The value pointed to by `ptr` must have an unowned strong reference count that can be
224+
/// taken ownership of by `rc`.
225+
unsafe fn set_rc_ptr_on_drop<'a, T, A>(
226+
rc: &'a mut RawRc<T, A>,
227+
ptr: NonNull<T>,
228+
) -> impl DerefMut<Target = &'a mut RawRc<T, A>>
229+
where
230+
T: ?Sized,
231+
{
232+
DropGuard::new(rc, move |rc| unsafe { rc.weak.set_ptr(ptr) })
233+
}
234+
235+
unsafe {
236+
let ref_counts = self.ref_counts();
237+
238+
if let Some(strategy) = R::make_mut(
239+
R::from_raw_counter(&ref_counts.strong),
240+
R::from_raw_counter(&ref_counts.weak),
241+
) {
242+
let rc_layout = RcLayout::from_value_ptr_unchecked(self.weak.as_ptr());
243+
244+
match strategy {
245+
MakeMutStrategy::Move => {
246+
// `R::make_mut` has set the strong reference count to zero, so the `RawRc`
247+
// is essentially a `RawWeak` object whose value is initialized. This means
248+
// we are the only owner of the value and can safely move it into a new
249+
// allocation.
250+
251+
// `guard` ensures the old `RawRc` object is dropped even if the allocation
252+
// panics.
253+
let guard = raw_weak::new_weak_guard::<T, A, R>(&mut self.weak);
254+
255+
let new_ptr = rc_alloc::allocate_with_bytes_in::<A, 1>(
256+
guard.as_ptr().cast(),
257+
&guard.allocator(),
258+
rc_layout,
259+
);
260+
261+
// No panic occurred, defuse the guard.
262+
mem::forget(guard);
263+
264+
let new_ptr = NonNull::from_raw_parts(
265+
new_ptr.as_ptr(),
266+
ptr::metadata(self.weak.as_ptr().as_ptr()),
267+
);
268+
269+
// Ensure the value pointer in `self` is updated to `new_ptr`.
270+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
271+
272+
// `MakeMutStrategy::Move` guarantees that the strong count is zero, also we
273+
// have copied the value to a new allocation, so we can pretend the original
274+
// `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak`
275+
// destructor to finish the cleanup.
276+
update_ptr_on_drop.weak.drop_unchecked::<R>();
277+
}
278+
MakeMutStrategy::Clone => {
279+
// There are multiple owners of the value, so we need to clone the value
280+
// into a new allocation.
281+
282+
let new_ptr = rc_alloc::allocate_with_in::<A, _, 1>(
283+
&self.allocator(),
284+
rc_layout,
285+
|dst_ptr| {
286+
T::clone_to_uninit(
287+
self.as_ptr().as_ref(),
288+
dst_ptr.as_ptr().as_ptr().cast(),
289+
)
290+
},
291+
);
292+
293+
let new_ptr = NonNull::from_raw_parts(
294+
new_ptr.as_ptr(),
295+
ptr::metadata(self.weak.as_ptr().as_ptr()),
296+
);
297+
298+
// Ensure the value pointer in `self` is updated to `new_ptr`.
299+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
300+
301+
// Manually drop old `RawRc`.
302+
update_ptr_on_drop.drop::<R>();
303+
}
304+
}
305+
}
306+
307+
self.get_mut_unchecked()
308+
}
309+
}
310+
311+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
312+
RawWeak::ptr_eq(&self.weak, &other.weak)
313+
}
314+
315+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
316+
RawWeak::ptr_ne(&self.weak, &other.weak)
317+
}
318+
319+
#[cfg(not(no_global_oom_handling))]
320+
pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts {
321+
unsafe { self.weak.ref_counts_unchecked() }
322+
}
323+
324+
pub(crate) fn strong_count(&self) -> &UnsafeCell<usize> {
325+
unsafe { self.weak.strong_count_unchecked() }
326+
}
327+
328+
pub(crate) fn weak_count(&self) -> &UnsafeCell<usize> {
329+
unsafe { self.weak.weak_count_unchecked() }
330+
}
331+
332+
#[inline]
333+
fn value_ptr(&self) -> RcValuePointer {
334+
// SAFETY: `self.weak` is guaranteed to be non-dangling.
335+
unsafe { self.weak.value_ptr_unchecked() }
336+
}
337+
}
338+
339+
/// Decrements strong reference count in a reference-counted allocation with a value object that is
340+
/// pointed to by `value_ptr`.
341+
#[inline]
342+
unsafe fn decrement_strong_ref_count<R>(value_ptr: RcValuePointer) -> bool
343+
where
344+
R: RefCounter,
345+
{
346+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).decrement() }
347+
}
348+
349+
/// Increments strong reference count in a reference-counted allocation with a value object that is
350+
/// pointed to by `value_ptr`.
351+
#[inline]
352+
unsafe fn increment_strong_ref_count<R>(value_ptr: RcValuePointer)
353+
where
354+
R: RefCounter,
355+
{
356+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).increment() };
357+
}
358+
359+
#[inline]
360+
unsafe fn is_unique<R>(value_ptr: RcValuePointer) -> bool
361+
where
362+
R: RefCounter,
363+
{
364+
let ref_counts = unsafe { value_ptr.ref_counts_ptr().as_ref() };
365+
366+
unsafe {
367+
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
368+
}
369+
}

0 commit comments

Comments
 (0)