alloc/rc.rs
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::{Cell, CloneFromCell};
246#[cfg(not(no_global_oom_handling))]
247use core::clone::TrivialClone;
248use core::clone::{CloneToUninit, UseCloned};
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, Alignment, ManuallyDrop};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258#[cfg(not(no_global_oom_handling))]
259use core::ops::{Residual, Try};
260use core::panic::{RefUnwindSafe, UnwindSafe};
261#[cfg(not(no_global_oom_handling))]
262use core::pin::Pin;
263use core::pin::PinCoerceUnsized;
264use core::ptr::{self, NonNull, drop_in_place};
265#[cfg(not(no_global_oom_handling))]
266use core::slice::from_raw_parts_mut;
267use core::{borrow, fmt, hint};
268
269#[cfg(not(no_global_oom_handling))]
270use crate::alloc::handle_alloc_error;
271use crate::alloc::{AllocError, Allocator, Global, Layout};
272use crate::borrow::{Cow, ToOwned};
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279// This is repr(C) to future-proof against possible field-reordering, which
280// would interfere with otherwise safe [into|from]_raw() of transmutable
281// inner types.
282// repr(align(2)) (forcing alignment to at least 2) is required because usize
283// has 1-byte alignment on AVR.
284#[repr(C, align(2))]
285struct RcInner<T: ?Sized> {
286 strong: Cell<usize>,
287 weak: Cell<usize>,
288 value: T,
289}
290
291/// Calculate layout for `RcInner<T>` using the inner value's layout
292fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
293 // Calculate layout using the given value layout.
294 // Previously, layout was calculated on the expression
295 // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
296 // reference (see #54908).
297 Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
298}
299
300/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
301/// Counted'.
302///
303/// See the [module-level documentation](./index.html) for more details.
304///
305/// The inherent methods of `Rc` are all associated functions, which means
306/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
307/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
308///
309/// [get_mut]: Rc::get_mut
310#[doc(search_unbox)]
311#[rustc_diagnostic_item = "Rc"]
312#[stable(feature = "rust1", since = "1.0.0")]
313#[rustc_insignificant_dtor]
314#[diagnostic::on_move(
315 message = "the type `{Self}` does not implement `Copy`",
316 label = "this move could be avoided by cloning the original `{Self}`, which is inexpensive",
317 note = "consider using `Rc::clone`"
318)]
319
320pub struct Rc<
321 T: ?Sized,
322 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
323> {
324 ptr: NonNull<RcInner<T>>,
325 phantom: PhantomData<RcInner<T>>,
326 alloc: A,
327}
328
329#[stable(feature = "rust1", since = "1.0.0")]
330impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
331
332// Note that this negative impl isn't strictly necessary for correctness,
333// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
334// However, given how important `Rc`'s `!Sync`-ness is,
335// having an explicit negative impl is nice for documentation purposes
336// and results in nicer error messages.
337#[stable(feature = "rust1", since = "1.0.0")]
338impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
339
340#[stable(feature = "catch_unwind", since = "1.9.0")]
341impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
342#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
343impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
344
345#[unstable(feature = "coerce_unsized", issue = "18598")]
346impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
347
348#[unstable(feature = "dispatch_from_dyn", issue = "none")]
349impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
350
351// SAFETY: `Rc::clone` doesn't access any `Cell`s which could contain the `Rc` being cloned.
352#[unstable(feature = "cell_get_cloned", issue = "145329")]
353unsafe impl<T: ?Sized> CloneFromCell for Rc<T> {}
354
355impl<T: ?Sized> Rc<T> {
356 #[inline]
357 unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
358 unsafe { Self::from_inner_in(ptr, Global) }
359 }
360
361 #[inline]
362 unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
363 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
364 }
365}
366
367impl<T: ?Sized, A: Allocator> Rc<T, A> {
368 #[inline(always)]
369 fn inner(&self) -> &RcInner<T> {
370 // This unsafety is ok because while this Rc is alive we're guaranteed
371 // that the inner pointer is valid.
372 unsafe { self.ptr.as_ref() }
373 }
374
375 #[inline]
376 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
377 let this = mem::ManuallyDrop::new(this);
378 (this.ptr, unsafe { ptr::read(&this.alloc) })
379 }
380
381 #[inline]
382 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
383 Self { ptr, phantom: PhantomData, alloc }
384 }
385
386 #[inline]
387 unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
388 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
389 }
390
391 // Non-inlined part of `drop`.
392 #[inline(never)]
393 unsafe fn drop_slow(&mut self) {
394 // Reconstruct the "strong weak" pointer and drop it when this
395 // variable goes out of scope. This ensures that the memory is
396 // deallocated even if the destructor of `T` panics.
397 let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
398
399 // Destroy the contained object.
400 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
401 unsafe {
402 ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
403 }
404 }
405}
406
407impl<T> Rc<T> {
408 /// Constructs a new `Rc<T>`.
409 ///
410 /// # Examples
411 ///
412 /// ```
413 /// use std::rc::Rc;
414 ///
415 /// let five = Rc::new(5);
416 /// ```
417 #[cfg(not(no_global_oom_handling))]
418 #[stable(feature = "rust1", since = "1.0.0")]
419 pub fn new(value: T) -> Rc<T> {
420 // There is an implicit weak pointer owned by all the strong
421 // pointers, which ensures that the weak destructor never frees
422 // the allocation while the strong destructor is running, even
423 // if the weak pointer is stored inside the strong one.
424 unsafe {
425 Self::from_inner(
426 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
427 .into(),
428 )
429 }
430 }
431
432 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
433 /// to allow you to construct a `T` which holds a weak pointer to itself.
434 ///
435 /// Generally, a structure circularly referencing itself, either directly or
436 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
437 /// Using this function, you get access to the weak pointer during the
438 /// initialization of `T`, before the `Rc<T>` is created, such that you can
439 /// clone and store it inside the `T`.
440 ///
441 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
442 /// then calls your closure, giving it a `Weak<T>` to this allocation,
443 /// and only afterwards completes the construction of the `Rc<T>` by placing
444 /// the `T` returned from your closure into the allocation.
445 ///
446 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
447 /// returns, calling [`upgrade`] on the weak reference inside your closure will
448 /// fail and result in a `None` value.
449 ///
450 /// # Panics
451 ///
452 /// If `data_fn` panics, the panic is propagated to the caller, and the
453 /// temporary [`Weak<T>`] is dropped normally.
454 ///
455 /// # Examples
456 ///
457 /// ```
458 /// # #![allow(dead_code)]
459 /// use std::rc::{Rc, Weak};
460 ///
461 /// struct Gadget {
462 /// me: Weak<Gadget>,
463 /// }
464 ///
465 /// impl Gadget {
466 /// /// Constructs a reference counted Gadget.
467 /// fn new() -> Rc<Self> {
468 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
469 /// // `Rc` we're constructing.
470 /// Rc::new_cyclic(|me| {
471 /// // Create the actual struct here.
472 /// Gadget { me: me.clone() }
473 /// })
474 /// }
475 ///
476 /// /// Returns a reference counted pointer to Self.
477 /// fn me(&self) -> Rc<Self> {
478 /// self.me.upgrade().unwrap()
479 /// }
480 /// }
481 /// ```
482 /// [`upgrade`]: Weak::upgrade
483 #[cfg(not(no_global_oom_handling))]
484 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
485 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
486 where
487 F: FnOnce(&Weak<T>) -> T,
488 {
489 Self::new_cyclic_in(data_fn, Global)
490 }
491
492 /// Constructs a new `Rc` with uninitialized contents.
493 ///
494 /// # Examples
495 ///
496 /// ```
497 /// use std::rc::Rc;
498 ///
499 /// let mut five = Rc::<u32>::new_uninit();
500 ///
501 /// // Deferred initialization:
502 /// Rc::get_mut(&mut five).unwrap().write(5);
503 ///
504 /// let five = unsafe { five.assume_init() };
505 ///
506 /// assert_eq!(*five, 5)
507 /// ```
508 #[cfg(not(no_global_oom_handling))]
509 #[stable(feature = "new_uninit", since = "1.82.0")]
510 #[must_use]
511 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
512 unsafe {
513 Rc::from_ptr(Rc::allocate_for_layout(
514 Layout::new::<T>(),
515 |layout| Global.allocate(layout),
516 <*mut u8>::cast,
517 ))
518 }
519 }
520
521 /// Constructs a new `Rc` with uninitialized contents, with the memory
522 /// being filled with `0` bytes.
523 ///
524 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
525 /// incorrect usage of this method.
526 ///
527 /// # Examples
528 ///
529 /// ```
530 /// use std::rc::Rc;
531 ///
532 /// let zero = Rc::<u32>::new_zeroed();
533 /// let zero = unsafe { zero.assume_init() };
534 ///
535 /// assert_eq!(*zero, 0)
536 /// ```
537 ///
538 /// [zeroed]: mem::MaybeUninit::zeroed
539 #[cfg(not(no_global_oom_handling))]
540 #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
541 #[must_use]
542 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
543 unsafe {
544 Rc::from_ptr(Rc::allocate_for_layout(
545 Layout::new::<T>(),
546 |layout| Global.allocate_zeroed(layout),
547 <*mut u8>::cast,
548 ))
549 }
550 }
551
552 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
553 ///
554 /// # Examples
555 ///
556 /// ```
557 /// #![feature(allocator_api)]
558 /// use std::rc::Rc;
559 ///
560 /// let five = Rc::try_new(5);
561 /// # Ok::<(), std::alloc::AllocError>(())
562 /// ```
563 #[unstable(feature = "allocator_api", issue = "32838")]
564 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
565 // There is an implicit weak pointer owned by all the strong
566 // pointers, which ensures that the weak destructor never frees
567 // the allocation while the strong destructor is running, even
568 // if the weak pointer is stored inside the strong one.
569 unsafe {
570 Ok(Self::from_inner(
571 Box::leak(Box::try_new(RcInner {
572 strong: Cell::new(1),
573 weak: Cell::new(1),
574 value,
575 })?)
576 .into(),
577 ))
578 }
579 }
580
581 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
582 ///
583 /// # Examples
584 ///
585 /// ```
586 /// #![feature(allocator_api)]
587 ///
588 /// use std::rc::Rc;
589 ///
590 /// let mut five = Rc::<u32>::try_new_uninit()?;
591 ///
592 /// // Deferred initialization:
593 /// Rc::get_mut(&mut five).unwrap().write(5);
594 ///
595 /// let five = unsafe { five.assume_init() };
596 ///
597 /// assert_eq!(*five, 5);
598 /// # Ok::<(), std::alloc::AllocError>(())
599 /// ```
600 #[unstable(feature = "allocator_api", issue = "32838")]
601 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
602 unsafe {
603 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
604 Layout::new::<T>(),
605 |layout| Global.allocate(layout),
606 <*mut u8>::cast,
607 )?))
608 }
609 }
610
611 /// Constructs a new `Rc` with uninitialized contents, with the memory
612 /// being filled with `0` bytes, returning an error if the allocation fails
613 ///
614 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
615 /// incorrect usage of this method.
616 ///
617 /// # Examples
618 ///
619 /// ```
620 /// #![feature(allocator_api)]
621 ///
622 /// use std::rc::Rc;
623 ///
624 /// let zero = Rc::<u32>::try_new_zeroed()?;
625 /// let zero = unsafe { zero.assume_init() };
626 ///
627 /// assert_eq!(*zero, 0);
628 /// # Ok::<(), std::alloc::AllocError>(())
629 /// ```
630 ///
631 /// [zeroed]: mem::MaybeUninit::zeroed
632 #[unstable(feature = "allocator_api", issue = "32838")]
633 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
634 unsafe {
635 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
636 Layout::new::<T>(),
637 |layout| Global.allocate_zeroed(layout),
638 <*mut u8>::cast,
639 )?))
640 }
641 }
642 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
643 /// `value` will be pinned in memory and unable to be moved.
644 #[cfg(not(no_global_oom_handling))]
645 #[stable(feature = "pin", since = "1.33.0")]
646 #[must_use]
647 pub fn pin(value: T) -> Pin<Rc<T>> {
648 unsafe { Pin::new_unchecked(Rc::new(value)) }
649 }
650
651 /// Maps the value in an `Rc`, reusing the allocation if possible.
652 ///
653 /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
654 /// an `Rc`.
655 ///
656 /// Note: this is an associated function, which means that you have
657 /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
658 /// is so that there is no conflict with a method on the inner type.
659 ///
660 /// # Examples
661 ///
662 /// ```
663 /// #![feature(smart_pointer_try_map)]
664 ///
665 /// use std::rc::Rc;
666 ///
667 /// let r = Rc::new(7);
668 /// let new = Rc::map(r, |i| i + 7);
669 /// assert_eq!(*new, 14);
670 /// ```
671 #[cfg(not(no_global_oom_handling))]
672 #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
673 pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
674 if size_of::<T>() == size_of::<U>()
675 && align_of::<T>() == align_of::<U>()
676 && Rc::is_unique(&this)
677 {
678 unsafe {
679 let ptr = Rc::into_raw(this);
680 let value = ptr.read();
681 let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
682
683 Rc::get_mut_unchecked(&mut allocation).write(f(&value));
684 allocation.assume_init()
685 }
686 } else {
687 Rc::new(f(&*this))
688 }
689 }
690
691 /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
692 ///
693 /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
694 /// result is returned, also in an `Rc`.
695 ///
696 /// Note: this is an associated function, which means that you have
697 /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
698 /// is so that there is no conflict with a method on the inner type.
699 ///
700 /// # Examples
701 ///
702 /// ```
703 /// #![feature(smart_pointer_try_map)]
704 ///
705 /// use std::rc::Rc;
706 ///
707 /// let b = Rc::new(7);
708 /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
709 /// assert_eq!(*new, 7);
710 /// ```
711 #[cfg(not(no_global_oom_handling))]
712 #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
713 pub fn try_map<R>(
714 this: Self,
715 f: impl FnOnce(&T) -> R,
716 ) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
717 where
718 R: Try,
719 R::Residual: Residual<Rc<R::Output>>,
720 {
721 if size_of::<T>() == size_of::<R::Output>()
722 && align_of::<T>() == align_of::<R::Output>()
723 && Rc::is_unique(&this)
724 {
725 unsafe {
726 let ptr = Rc::into_raw(this);
727 let value = ptr.read();
728 let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
729
730 Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
731 try { allocation.assume_init() }
732 }
733 } else {
734 try { Rc::new(f(&*this)?) }
735 }
736 }
737}
738
739impl<T, A: Allocator> Rc<T, A> {
740 /// Constructs a new `Rc` in the provided allocator.
741 ///
742 /// # Examples
743 ///
744 /// ```
745 /// #![feature(allocator_api)]
746 ///
747 /// use std::rc::Rc;
748 /// use std::alloc::System;
749 ///
750 /// let five = Rc::new_in(5, System);
751 /// ```
752 #[cfg(not(no_global_oom_handling))]
753 #[unstable(feature = "allocator_api", issue = "32838")]
754 #[inline]
755 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
756 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
757 // That would make code size bigger.
758 match Self::try_new_in(value, alloc) {
759 Ok(m) => m,
760 Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
761 }
762 }
763
764 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
765 ///
766 /// # Examples
767 ///
768 /// ```
769 /// #![feature(get_mut_unchecked)]
770 /// #![feature(allocator_api)]
771 ///
772 /// use std::rc::Rc;
773 /// use std::alloc::System;
774 ///
775 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
776 ///
777 /// let five = unsafe {
778 /// // Deferred initialization:
779 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
780 ///
781 /// five.assume_init()
782 /// };
783 ///
784 /// assert_eq!(*five, 5)
785 /// ```
786 #[cfg(not(no_global_oom_handling))]
787 #[unstable(feature = "allocator_api", issue = "32838")]
788 #[inline]
789 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
790 unsafe {
791 Rc::from_ptr_in(
792 Rc::allocate_for_layout(
793 Layout::new::<T>(),
794 |layout| alloc.allocate(layout),
795 <*mut u8>::cast,
796 ),
797 alloc,
798 )
799 }
800 }
801
802 /// Constructs a new `Rc` with uninitialized contents, with the memory
803 /// being filled with `0` bytes, in the provided allocator.
804 ///
805 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
806 /// incorrect usage of this method.
807 ///
808 /// # Examples
809 ///
810 /// ```
811 /// #![feature(allocator_api)]
812 ///
813 /// use std::rc::Rc;
814 /// use std::alloc::System;
815 ///
816 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
817 /// let zero = unsafe { zero.assume_init() };
818 ///
819 /// assert_eq!(*zero, 0)
820 /// ```
821 ///
822 /// [zeroed]: mem::MaybeUninit::zeroed
823 #[cfg(not(no_global_oom_handling))]
824 #[unstable(feature = "allocator_api", issue = "32838")]
825 #[inline]
826 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
827 unsafe {
828 Rc::from_ptr_in(
829 Rc::allocate_for_layout(
830 Layout::new::<T>(),
831 |layout| alloc.allocate_zeroed(layout),
832 <*mut u8>::cast,
833 ),
834 alloc,
835 )
836 }
837 }
838
839 /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
840 /// to allow you to construct a `T` which holds a weak pointer to itself.
841 ///
842 /// Generally, a structure circularly referencing itself, either directly or
843 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
844 /// Using this function, you get access to the weak pointer during the
845 /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
846 /// clone and store it inside the `T`.
847 ///
848 /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
849 /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
850 /// and only afterwards completes the construction of the `Rc<T, A>` by placing
851 /// the `T` returned from your closure into the allocation.
852 ///
853 /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
854 /// returns, calling [`upgrade`] on the weak reference inside your closure will
855 /// fail and result in a `None` value.
856 ///
857 /// # Panics
858 ///
859 /// If `data_fn` panics, the panic is propagated to the caller, and the
860 /// temporary [`Weak<T, A>`] is dropped normally.
861 ///
862 /// # Examples
863 ///
864 /// See [`new_cyclic`].
865 ///
866 /// [`new_cyclic`]: Rc::new_cyclic
867 /// [`upgrade`]: Weak::upgrade
868 #[cfg(not(no_global_oom_handling))]
869 #[unstable(feature = "allocator_api", issue = "32838")]
870 pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
871 where
872 F: FnOnce(&Weak<T, A>) -> T,
873 {
874 // Construct the inner in the "uninitialized" state with a single
875 // weak reference.
876 let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
877 RcInner {
878 strong: Cell::new(0),
879 weak: Cell::new(1),
880 value: mem::MaybeUninit::<T>::uninit(),
881 },
882 alloc,
883 ));
884 let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
885 let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
886
887 let weak = Weak { ptr: init_ptr, alloc };
888
889 // It's important we don't give up ownership of the weak pointer, or
890 // else the memory might be freed by the time `data_fn` returns. If
891 // we really wanted to pass ownership, we could create an additional
892 // weak pointer for ourselves, but this would result in additional
893 // updates to the weak reference count which might not be necessary
894 // otherwise.
895 let data = data_fn(&weak);
896
897 let strong = unsafe {
898 let inner = init_ptr.as_ptr();
899 ptr::write(&raw mut (*inner).value, data);
900
901 let prev_value = (*inner).strong.get();
902 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
903 (*inner).strong.set(1);
904
905 // Strong references should collectively own a shared weak reference,
906 // so don't run the destructor for our old weak reference.
907 // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
908 // and forgetting the weak reference.
909 let alloc = weak.into_raw_with_allocator().1;
910
911 Rc::from_inner_in(init_ptr, alloc)
912 };
913
914 strong
915 }
916
917 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
918 /// fails
919 ///
920 /// # Examples
921 ///
922 /// ```
923 /// #![feature(allocator_api)]
924 /// use std::rc::Rc;
925 /// use std::alloc::System;
926 ///
927 /// let five = Rc::try_new_in(5, System);
928 /// # Ok::<(), std::alloc::AllocError>(())
929 /// ```
930 #[unstable(feature = "allocator_api", issue = "32838")]
931 #[inline]
932 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
933 // There is an implicit weak pointer owned by all the strong
934 // pointers, which ensures that the weak destructor never frees
935 // the allocation while the strong destructor is running, even
936 // if the weak pointer is stored inside the strong one.
937 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
938 RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
939 alloc,
940 )?);
941 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
942 }
943
944 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
945 /// error if the allocation fails
946 ///
947 /// # Examples
948 ///
949 /// ```
950 /// #![feature(allocator_api)]
951 /// #![feature(get_mut_unchecked)]
952 ///
953 /// use std::rc::Rc;
954 /// use std::alloc::System;
955 ///
956 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
957 ///
958 /// let five = unsafe {
959 /// // Deferred initialization:
960 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
961 ///
962 /// five.assume_init()
963 /// };
964 ///
965 /// assert_eq!(*five, 5);
966 /// # Ok::<(), std::alloc::AllocError>(())
967 /// ```
968 #[unstable(feature = "allocator_api", issue = "32838")]
969 #[inline]
970 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
971 unsafe {
972 Ok(Rc::from_ptr_in(
973 Rc::try_allocate_for_layout(
974 Layout::new::<T>(),
975 |layout| alloc.allocate(layout),
976 <*mut u8>::cast,
977 )?,
978 alloc,
979 ))
980 }
981 }
982
983 /// Constructs a new `Rc` with uninitialized contents, with the memory
984 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
985 /// fails
986 ///
987 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
988 /// incorrect usage of this method.
989 ///
990 /// # Examples
991 ///
992 /// ```
993 /// #![feature(allocator_api)]
994 ///
995 /// use std::rc::Rc;
996 /// use std::alloc::System;
997 ///
998 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
999 /// let zero = unsafe { zero.assume_init() };
1000 ///
1001 /// assert_eq!(*zero, 0);
1002 /// # Ok::<(), std::alloc::AllocError>(())
1003 /// ```
1004 ///
1005 /// [zeroed]: mem::MaybeUninit::zeroed
1006 #[unstable(feature = "allocator_api", issue = "32838")]
1007 #[inline]
1008 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
1009 unsafe {
1010 Ok(Rc::from_ptr_in(
1011 Rc::try_allocate_for_layout(
1012 Layout::new::<T>(),
1013 |layout| alloc.allocate_zeroed(layout),
1014 <*mut u8>::cast,
1015 )?,
1016 alloc,
1017 ))
1018 }
1019 }
1020
1021 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
1022 /// `value` will be pinned in memory and unable to be moved.
1023 #[cfg(not(no_global_oom_handling))]
1024 #[unstable(feature = "allocator_api", issue = "32838")]
1025 #[inline]
1026 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
1027 where
1028 A: 'static,
1029 {
1030 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
1031 }
1032
1033 /// Returns the inner value, if the `Rc` has exactly one strong reference.
1034 ///
1035 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1036 /// passed in.
1037 ///
1038 /// This will succeed even if there are outstanding weak references.
1039 ///
1040 /// # Examples
1041 ///
1042 /// ```
1043 /// use std::rc::Rc;
1044 ///
1045 /// let x = Rc::new(3);
1046 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
1047 ///
1048 /// let x = Rc::new(4);
1049 /// let _y = Rc::clone(&x);
1050 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
1051 /// ```
1052 #[inline]
1053 #[stable(feature = "rc_unique", since = "1.4.0")]
1054 pub fn try_unwrap(this: Self) -> Result<T, Self> {
1055 if Rc::strong_count(&this) == 1 {
1056 let this = ManuallyDrop::new(this);
1057
1058 let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
1059 let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
1060
1061 // Indicate to Weaks that they can't be promoted by decrementing
1062 // the strong count, and then remove the implicit "strong weak"
1063 // pointer while also handling drop logic by just crafting a
1064 // fake Weak.
1065 this.inner().dec_strong();
1066 let _weak = Weak { ptr: this.ptr, alloc };
1067 Ok(val)
1068 } else {
1069 Err(this)
1070 }
1071 }
1072
1073 /// Returns the inner value, if the `Rc` has exactly one strong reference.
1074 ///
1075 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
1076 ///
1077 /// This will succeed even if there are outstanding weak references.
1078 ///
1079 /// If `Rc::into_inner` is called on every clone of this `Rc`,
1080 /// it is guaranteed that exactly one of the calls returns the inner value.
1081 /// This means in particular that the inner value is not dropped.
1082 ///
1083 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
1084 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
1085 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1086 /// (Note that the same kind of equivalence does **not** hold true for
1087 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1088 ///
1089 /// # Examples
1090 ///
1091 /// ```
1092 /// use std::rc::Rc;
1093 ///
1094 /// let x = Rc::new(3);
1095 /// assert_eq!(Rc::into_inner(x), Some(3));
1096 ///
1097 /// let x = Rc::new(4);
1098 /// let y = Rc::clone(&x);
1099 ///
1100 /// assert_eq!(Rc::into_inner(y), None);
1101 /// assert_eq!(Rc::into_inner(x), Some(4));
1102 /// ```
1103 #[inline]
1104 #[stable(feature = "rc_into_inner", since = "1.70.0")]
1105 pub fn into_inner(this: Self) -> Option<T> {
1106 Rc::try_unwrap(this).ok()
1107 }
1108}
1109
1110impl<T> Rc<[T]> {
1111 /// Constructs a new reference-counted slice with uninitialized contents.
1112 ///
1113 /// # Examples
1114 ///
1115 /// ```
1116 /// use std::rc::Rc;
1117 ///
1118 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1119 ///
1120 /// // Deferred initialization:
1121 /// let data = Rc::get_mut(&mut values).unwrap();
1122 /// data[0].write(1);
1123 /// data[1].write(2);
1124 /// data[2].write(3);
1125 ///
1126 /// let values = unsafe { values.assume_init() };
1127 ///
1128 /// assert_eq!(*values, [1, 2, 3])
1129 /// ```
1130 #[cfg(not(no_global_oom_handling))]
1131 #[stable(feature = "new_uninit", since = "1.82.0")]
1132 #[must_use]
1133 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1134 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1135 }
1136
1137 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1138 /// filled with `0` bytes.
1139 ///
1140 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1141 /// incorrect usage of this method.
1142 ///
1143 /// # Examples
1144 ///
1145 /// ```
1146 /// use std::rc::Rc;
1147 ///
1148 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1149 /// let values = unsafe { values.assume_init() };
1150 ///
1151 /// assert_eq!(*values, [0, 0, 0])
1152 /// ```
1153 ///
1154 /// [zeroed]: mem::MaybeUninit::zeroed
1155 #[cfg(not(no_global_oom_handling))]
1156 #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
1157 #[must_use]
1158 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1159 unsafe {
1160 Rc::from_ptr(Rc::allocate_for_layout(
1161 Layout::array::<T>(len).unwrap(),
1162 |layout| Global.allocate_zeroed(layout),
1163 |mem| {
1164 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1165 as *mut RcInner<[mem::MaybeUninit<T>]>
1166 },
1167 ))
1168 }
1169 }
1170}
1171
1172impl<T, A: Allocator> Rc<[T], A> {
1173 /// Constructs a new reference-counted slice with uninitialized contents.
1174 ///
1175 /// # Examples
1176 ///
1177 /// ```
1178 /// #![feature(get_mut_unchecked)]
1179 /// #![feature(allocator_api)]
1180 ///
1181 /// use std::rc::Rc;
1182 /// use std::alloc::System;
1183 ///
1184 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1185 ///
1186 /// let values = unsafe {
1187 /// // Deferred initialization:
1188 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1189 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1190 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1191 ///
1192 /// values.assume_init()
1193 /// };
1194 ///
1195 /// assert_eq!(*values, [1, 2, 3])
1196 /// ```
1197 #[cfg(not(no_global_oom_handling))]
1198 #[unstable(feature = "allocator_api", issue = "32838")]
1199 #[inline]
1200 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1201 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1202 }
1203
1204 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1205 /// filled with `0` bytes.
1206 ///
1207 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1208 /// incorrect usage of this method.
1209 ///
1210 /// # Examples
1211 ///
1212 /// ```
1213 /// #![feature(allocator_api)]
1214 ///
1215 /// use std::rc::Rc;
1216 /// use std::alloc::System;
1217 ///
1218 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1219 /// let values = unsafe { values.assume_init() };
1220 ///
1221 /// assert_eq!(*values, [0, 0, 0])
1222 /// ```
1223 ///
1224 /// [zeroed]: mem::MaybeUninit::zeroed
1225 #[cfg(not(no_global_oom_handling))]
1226 #[unstable(feature = "allocator_api", issue = "32838")]
1227 #[inline]
1228 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1229 unsafe {
1230 Rc::from_ptr_in(
1231 Rc::allocate_for_layout(
1232 Layout::array::<T>(len).unwrap(),
1233 |layout| alloc.allocate_zeroed(layout),
1234 |mem| {
1235 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1236 as *mut RcInner<[mem::MaybeUninit<T>]>
1237 },
1238 ),
1239 alloc,
1240 )
1241 }
1242 }
1243
1244 /// Converts the reference-counted slice into a reference-counted array.
1245 ///
1246 /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1247 ///
1248 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1249 ///
1250 /// # Examples
1251 ///
1252 /// ```
1253 /// #![feature(alloc_slice_into_array)]
1254 /// use std::rc::Rc;
1255 ///
1256 /// let rc_slice: Rc<[i32]> = Rc::new([1, 2, 3]);
1257 ///
1258 /// let rc_array: Rc<[i32; 3]> = rc_slice.into_array().unwrap();
1259 /// ```
1260 #[unstable(feature = "alloc_slice_into_array", issue = "148082")]
1261 #[inline]
1262 #[must_use]
1263 pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N], A>> {
1264 if self.len() == N {
1265 let (ptr, alloc) = Self::into_raw_with_allocator(self);
1266 let ptr = ptr as *const [T; N];
1267
1268 // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1269 let me = unsafe { Rc::from_raw_in(ptr, alloc) };
1270 Some(me)
1271 } else {
1272 None
1273 }
1274 }
1275}
1276
1277impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1278 /// Converts to `Rc<T>`.
1279 ///
1280 /// # Safety
1281 ///
1282 /// As with [`MaybeUninit::assume_init`],
1283 /// it is up to the caller to guarantee that the inner value
1284 /// really is in an initialized state.
1285 /// Calling this when the content is not yet fully initialized
1286 /// causes immediate undefined behavior.
1287 ///
1288 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1289 ///
1290 /// # Examples
1291 ///
1292 /// ```
1293 /// use std::rc::Rc;
1294 ///
1295 /// let mut five = Rc::<u32>::new_uninit();
1296 ///
1297 /// // Deferred initialization:
1298 /// Rc::get_mut(&mut five).unwrap().write(5);
1299 ///
1300 /// let five = unsafe { five.assume_init() };
1301 ///
1302 /// assert_eq!(*five, 5)
1303 /// ```
1304 #[stable(feature = "new_uninit", since = "1.82.0")]
1305 #[inline]
1306 pub unsafe fn assume_init(self) -> Rc<T, A> {
1307 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1308 unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1309 }
1310}
1311
1312impl<T: ?Sized + CloneToUninit> Rc<T> {
1313 /// Constructs a new `Rc<T>` with a clone of `value`.
1314 ///
1315 /// # Examples
1316 ///
1317 /// ```
1318 /// #![feature(clone_from_ref)]
1319 /// use std::rc::Rc;
1320 ///
1321 /// let hello: Rc<str> = Rc::clone_from_ref("hello");
1322 /// ```
1323 #[cfg(not(no_global_oom_handling))]
1324 #[unstable(feature = "clone_from_ref", issue = "149075")]
1325 pub fn clone_from_ref(value: &T) -> Rc<T> {
1326 Rc::clone_from_ref_in(value, Global)
1327 }
1328
1329 /// Constructs a new `Rc<T>` with a clone of `value`, returning an error if allocation fails
1330 ///
1331 /// # Examples
1332 ///
1333 /// ```
1334 /// #![feature(clone_from_ref)]
1335 /// #![feature(allocator_api)]
1336 /// use std::rc::Rc;
1337 ///
1338 /// let hello: Rc<str> = Rc::try_clone_from_ref("hello")?;
1339 /// # Ok::<(), std::alloc::AllocError>(())
1340 /// ```
1341 #[unstable(feature = "clone_from_ref", issue = "149075")]
1342 //#[unstable(feature = "allocator_api", issue = "32838")]
1343 pub fn try_clone_from_ref(value: &T) -> Result<Rc<T>, AllocError> {
1344 Rc::try_clone_from_ref_in(value, Global)
1345 }
1346}
1347
1348impl<T: ?Sized + CloneToUninit, A: Allocator> Rc<T, A> {
1349 /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator.
1350 ///
1351 /// # Examples
1352 ///
1353 /// ```
1354 /// #![feature(clone_from_ref)]
1355 /// #![feature(allocator_api)]
1356 /// use std::rc::Rc;
1357 /// use std::alloc::System;
1358 ///
1359 /// let hello: Rc<str, System> = Rc::clone_from_ref_in("hello", System);
1360 /// ```
1361 #[cfg(not(no_global_oom_handling))]
1362 #[unstable(feature = "clone_from_ref", issue = "149075")]
1363 //#[unstable(feature = "allocator_api", issue = "32838")]
1364 pub fn clone_from_ref_in(value: &T, alloc: A) -> Rc<T, A> {
1365 // `in_progress` drops the allocation if we panic before finishing initializing it.
1366 let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::new(value, alloc);
1367
1368 // Initialize with clone of value.
1369 let initialized_clone = unsafe {
1370 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1371 value.clone_to_uninit(in_progress.data_ptr().cast());
1372 // Cast type of pointer, now that it is initialized.
1373 in_progress.into_rc()
1374 };
1375
1376 initialized_clone
1377 }
1378
1379 /// Constructs a new `Rc<T>` with a clone of `value` in the provided allocator, returning an error if allocation fails
1380 ///
1381 /// # Examples
1382 ///
1383 /// ```
1384 /// #![feature(clone_from_ref)]
1385 /// #![feature(allocator_api)]
1386 /// use std::rc::Rc;
1387 /// use std::alloc::System;
1388 ///
1389 /// let hello: Rc<str, System> = Rc::try_clone_from_ref_in("hello", System)?;
1390 /// # Ok::<(), std::alloc::AllocError>(())
1391 /// ```
1392 #[unstable(feature = "clone_from_ref", issue = "149075")]
1393 //#[unstable(feature = "allocator_api", issue = "32838")]
1394 pub fn try_clone_from_ref_in(value: &T, alloc: A) -> Result<Rc<T, A>, AllocError> {
1395 // `in_progress` drops the allocation if we panic before finishing initializing it.
1396 let mut in_progress: UniqueRcUninit<T, A> = UniqueRcUninit::try_new(value, alloc)?;
1397
1398 // Initialize with clone of value.
1399 let initialized_clone = unsafe {
1400 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1401 value.clone_to_uninit(in_progress.data_ptr().cast());
1402 // Cast type of pointer, now that it is initialized.
1403 in_progress.into_rc()
1404 };
1405
1406 Ok(initialized_clone)
1407 }
1408}
1409
1410impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1411 /// Converts to `Rc<[T]>`.
1412 ///
1413 /// # Safety
1414 ///
1415 /// As with [`MaybeUninit::assume_init`],
1416 /// it is up to the caller to guarantee that the inner value
1417 /// really is in an initialized state.
1418 /// Calling this when the content is not yet fully initialized
1419 /// causes immediate undefined behavior.
1420 ///
1421 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1422 ///
1423 /// # Examples
1424 ///
1425 /// ```
1426 /// use std::rc::Rc;
1427 ///
1428 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1429 ///
1430 /// // Deferred initialization:
1431 /// let data = Rc::get_mut(&mut values).unwrap();
1432 /// data[0].write(1);
1433 /// data[1].write(2);
1434 /// data[2].write(3);
1435 ///
1436 /// let values = unsafe { values.assume_init() };
1437 ///
1438 /// assert_eq!(*values, [1, 2, 3])
1439 /// ```
1440 #[stable(feature = "new_uninit", since = "1.82.0")]
1441 #[inline]
1442 pub unsafe fn assume_init(self) -> Rc<[T], A> {
1443 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1444 unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1445 }
1446}
1447
1448impl<T: ?Sized> Rc<T> {
1449 /// Constructs an `Rc<T>` from a raw pointer.
1450 ///
1451 /// The raw pointer must have been previously returned by a call to
1452 /// [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator].
1453 ///
1454 /// # Safety
1455 ///
1456 /// * Creating a `Rc<T>` from a pointer other than one returned from
1457 /// [`Rc<U>::into_raw`][into_raw] or [`Rc<U>::into_raw_with_allocator`][into_raw_with_allocator]
1458 /// is undefined behavior.
1459 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1460 /// is trivially true if `U` is `T`.
1461 /// * If `U` is unsized, its data pointer must have the same size and
1462 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1463 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1464 /// coercion].
1465 /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1466 /// and alignment, this is basically like transmuting references of
1467 /// different types. See [`mem::transmute`][transmute] for more information
1468 /// on what restrictions apply in this case.
1469 /// * The raw pointer must point to a block of memory allocated by the global allocator
1470 /// * The user of `from_raw` has to make sure a specific value of `T` is only
1471 /// dropped once.
1472 ///
1473 /// This function is unsafe because improper use may lead to memory unsafety,
1474 /// even if the returned `Rc<T>` is never accessed.
1475 ///
1476 /// [into_raw]: Rc::into_raw
1477 /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1478 /// [transmute]: core::mem::transmute
1479 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1480 ///
1481 /// # Examples
1482 ///
1483 /// ```
1484 /// use std::rc::Rc;
1485 ///
1486 /// let x = Rc::new("hello".to_owned());
1487 /// let x_ptr = Rc::into_raw(x);
1488 ///
1489 /// unsafe {
1490 /// // Convert back to an `Rc` to prevent leak.
1491 /// let x = Rc::from_raw(x_ptr);
1492 /// assert_eq!(&*x, "hello");
1493 ///
1494 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1495 /// }
1496 ///
1497 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1498 /// ```
1499 ///
1500 /// Convert a slice back into its original array:
1501 ///
1502 /// ```
1503 /// use std::rc::Rc;
1504 ///
1505 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1506 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1507 ///
1508 /// unsafe {
1509 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1510 /// assert_eq!(&*x, &[1, 2, 3]);
1511 /// }
1512 /// ```
1513 #[inline]
1514 #[stable(feature = "rc_raw", since = "1.17.0")]
1515 pub unsafe fn from_raw(ptr: *const T) -> Self {
1516 unsafe { Self::from_raw_in(ptr, Global) }
1517 }
1518
1519 /// Consumes the `Rc`, returning the wrapped pointer.
1520 ///
1521 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1522 /// [`Rc::from_raw`].
1523 ///
1524 /// # Examples
1525 ///
1526 /// ```
1527 /// use std::rc::Rc;
1528 ///
1529 /// let x = Rc::new("hello".to_owned());
1530 /// let x_ptr = Rc::into_raw(x);
1531 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1532 /// # // Prevent leaks for Miri.
1533 /// # drop(unsafe { Rc::from_raw(x_ptr) });
1534 /// ```
1535 #[must_use = "losing the pointer will leak memory"]
1536 #[stable(feature = "rc_raw", since = "1.17.0")]
1537 #[rustc_never_returns_null_ptr]
1538 pub fn into_raw(this: Self) -> *const T {
1539 let this = ManuallyDrop::new(this);
1540 Self::as_ptr(&*this)
1541 }
1542
1543 /// Increments the strong reference count on the `Rc<T>` associated with the
1544 /// provided pointer by one.
1545 ///
1546 /// # Safety
1547 ///
1548 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1549 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1550 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1551 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1552 /// allocated by the global allocator.
1553 ///
1554 /// [from_raw_in]: Rc::from_raw_in
1555 ///
1556 /// # Examples
1557 ///
1558 /// ```
1559 /// use std::rc::Rc;
1560 ///
1561 /// let five = Rc::new(5);
1562 ///
1563 /// unsafe {
1564 /// let ptr = Rc::into_raw(five);
1565 /// Rc::increment_strong_count(ptr);
1566 ///
1567 /// let five = Rc::from_raw(ptr);
1568 /// assert_eq!(2, Rc::strong_count(&five));
1569 /// # // Prevent leaks for Miri.
1570 /// # Rc::decrement_strong_count(ptr);
1571 /// }
1572 /// ```
1573 #[inline]
1574 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1575 pub unsafe fn increment_strong_count(ptr: *const T) {
1576 unsafe { Self::increment_strong_count_in(ptr, Global) }
1577 }
1578
1579 /// Decrements the strong reference count on the `Rc<T>` associated with the
1580 /// provided pointer by one.
1581 ///
1582 /// # Safety
1583 ///
1584 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1585 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1586 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1587 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1588 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1589 /// backing storage, but **should not** be called after the final `Rc` has been released.
1590 ///
1591 /// [from_raw_in]: Rc::from_raw_in
1592 ///
1593 /// # Examples
1594 ///
1595 /// ```
1596 /// use std::rc::Rc;
1597 ///
1598 /// let five = Rc::new(5);
1599 ///
1600 /// unsafe {
1601 /// let ptr = Rc::into_raw(five);
1602 /// Rc::increment_strong_count(ptr);
1603 ///
1604 /// let five = Rc::from_raw(ptr);
1605 /// assert_eq!(2, Rc::strong_count(&five));
1606 /// Rc::decrement_strong_count(ptr);
1607 /// assert_eq!(1, Rc::strong_count(&five));
1608 /// }
1609 /// ```
1610 #[inline]
1611 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1612 pub unsafe fn decrement_strong_count(ptr: *const T) {
1613 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1614 }
1615}
1616
1617impl<T: ?Sized, A: Allocator> Rc<T, A> {
1618 /// Returns a reference to the underlying allocator.
1619 ///
1620 /// Note: this is an associated function, which means that you have
1621 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1622 /// is so that there is no conflict with a method on the inner type.
1623 #[inline]
1624 #[unstable(feature = "allocator_api", issue = "32838")]
1625 pub fn allocator(this: &Self) -> &A {
1626 &this.alloc
1627 }
1628
1629 /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1630 ///
1631 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1632 /// [`Rc::from_raw_in`].
1633 ///
1634 /// # Examples
1635 ///
1636 /// ```
1637 /// #![feature(allocator_api)]
1638 /// use std::rc::Rc;
1639 /// use std::alloc::System;
1640 ///
1641 /// let x = Rc::new_in("hello".to_owned(), System);
1642 /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1643 /// assert_eq!(unsafe { &*ptr }, "hello");
1644 /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1645 /// assert_eq!(&*x, "hello");
1646 /// ```
1647 #[must_use = "losing the pointer will leak memory"]
1648 #[unstable(feature = "allocator_api", issue = "32838")]
1649 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1650 let this = mem::ManuallyDrop::new(this);
1651 let ptr = Self::as_ptr(&this);
1652 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1653 let alloc = unsafe { ptr::read(&this.alloc) };
1654 (ptr, alloc)
1655 }
1656
1657 /// Provides a raw pointer to the data.
1658 ///
1659 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1660 /// for as long as there are strong counts in the `Rc`.
1661 ///
1662 /// # Examples
1663 ///
1664 /// ```
1665 /// use std::rc::Rc;
1666 ///
1667 /// let x = Rc::new(0);
1668 /// let y = Rc::clone(&x);
1669 /// let x_ptr = Rc::as_ptr(&x);
1670 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1671 /// assert_eq!(unsafe { *x_ptr }, 0);
1672 /// ```
1673 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1674 #[rustc_never_returns_null_ptr]
1675 pub fn as_ptr(this: &Self) -> *const T {
1676 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1677
1678 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1679 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1680 // write through the pointer after the Rc is recovered through `from_raw`.
1681 unsafe { &raw mut (*ptr).value }
1682 }
1683
1684 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1685 ///
1686 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1687 /// A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator].
1688 ///
1689 /// # Safety
1690 ///
1691 /// * Creating a `Rc<T, A>` from a pointer other than one returned from
1692 /// [`Rc<U, A>::into_raw`][into_raw] or [`Rc<U, A>::into_raw_with_allocator`][into_raw_with_allocator]
1693 /// is undefined behavior.
1694 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1695 /// is trivially true if `U` is `T`.
1696 /// * If `U` is unsized, its data pointer must have the same size and
1697 /// alignment as `T`. This is trivially true if `Rc<U, A>` was constructed
1698 /// through `Rc<T, A>` and then converted to `Rc<U, A>` through an [unsized
1699 /// coercion].
1700 /// * Note that if `U` or `U`'s data pointer is not `T` but has the same size
1701 /// and alignment, this is basically like transmuting references of
1702 /// different types. See [`mem::transmute`][transmute] for more information
1703 /// on what restrictions apply in this case.
1704 /// * The raw pointer must point to a block of memory allocated by `alloc`
1705 /// * The user of `from_raw` has to make sure a specific value of `T` is only
1706 /// dropped once.
1707 ///
1708 /// This function is unsafe because improper use may lead to memory unsafety,
1709 /// even if the returned `Rc<T, A>` is never accessed.
1710 ///
1711 /// [into_raw]: Rc::into_raw
1712 /// [into_raw_with_allocator]: Rc::into_raw_with_allocator
1713 /// [transmute]: core::mem::transmute
1714 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1715 ///
1716 /// # Examples
1717 ///
1718 /// ```
1719 /// #![feature(allocator_api)]
1720 ///
1721 /// use std::rc::Rc;
1722 /// use std::alloc::System;
1723 ///
1724 /// let x = Rc::new_in("hello".to_owned(), System);
1725 /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1726 ///
1727 /// unsafe {
1728 /// // Convert back to an `Rc` to prevent leak.
1729 /// let x = Rc::from_raw_in(x_ptr, System);
1730 /// assert_eq!(&*x, "hello");
1731 ///
1732 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1733 /// }
1734 ///
1735 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1736 /// ```
1737 ///
1738 /// Convert a slice back into its original array:
1739 ///
1740 /// ```
1741 /// #![feature(allocator_api)]
1742 ///
1743 /// use std::rc::Rc;
1744 /// use std::alloc::System;
1745 ///
1746 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1747 /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1748 ///
1749 /// unsafe {
1750 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1751 /// assert_eq!(&*x, &[1, 2, 3]);
1752 /// }
1753 /// ```
1754 #[unstable(feature = "allocator_api", issue = "32838")]
1755 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1756 let offset = unsafe { data_offset(ptr) };
1757
1758 // Reverse the offset to find the original RcInner.
1759 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1760
1761 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1762 }
1763
1764 /// Creates a new [`Weak`] pointer to this allocation.
1765 ///
1766 /// # Examples
1767 ///
1768 /// ```
1769 /// use std::rc::Rc;
1770 ///
1771 /// let five = Rc::new(5);
1772 ///
1773 /// let weak_five = Rc::downgrade(&five);
1774 /// ```
1775 #[must_use = "this returns a new `Weak` pointer, \
1776 without modifying the original `Rc`"]
1777 #[stable(feature = "rc_weak", since = "1.4.0")]
1778 pub fn downgrade(this: &Self) -> Weak<T, A>
1779 where
1780 A: Clone,
1781 {
1782 this.inner().inc_weak();
1783 // Make sure we do not create a dangling Weak
1784 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1785 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1786 }
1787
1788 /// Gets the number of [`Weak`] pointers to this allocation.
1789 ///
1790 /// # Examples
1791 ///
1792 /// ```
1793 /// use std::rc::Rc;
1794 ///
1795 /// let five = Rc::new(5);
1796 /// let _weak_five = Rc::downgrade(&five);
1797 ///
1798 /// assert_eq!(1, Rc::weak_count(&five));
1799 /// ```
1800 #[inline]
1801 #[stable(feature = "rc_counts", since = "1.15.0")]
1802 pub fn weak_count(this: &Self) -> usize {
1803 this.inner().weak() - 1
1804 }
1805
1806 /// Gets the number of strong (`Rc`) pointers to this allocation.
1807 ///
1808 /// # Examples
1809 ///
1810 /// ```
1811 /// use std::rc::Rc;
1812 ///
1813 /// let five = Rc::new(5);
1814 /// let _also_five = Rc::clone(&five);
1815 ///
1816 /// assert_eq!(2, Rc::strong_count(&five));
1817 /// ```
1818 #[inline]
1819 #[stable(feature = "rc_counts", since = "1.15.0")]
1820 pub fn strong_count(this: &Self) -> usize {
1821 this.inner().strong()
1822 }
1823
1824 /// Increments the strong reference count on the `Rc<T>` associated with the
1825 /// provided pointer by one.
1826 ///
1827 /// # Safety
1828 ///
1829 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1830 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1831 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1832 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1833 /// allocated by `alloc`.
1834 ///
1835 /// [from_raw_in]: Rc::from_raw_in
1836 ///
1837 /// # Examples
1838 ///
1839 /// ```
1840 /// #![feature(allocator_api)]
1841 ///
1842 /// use std::rc::Rc;
1843 /// use std::alloc::System;
1844 ///
1845 /// let five = Rc::new_in(5, System);
1846 ///
1847 /// unsafe {
1848 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1849 /// Rc::increment_strong_count_in(ptr, System);
1850 ///
1851 /// let five = Rc::from_raw_in(ptr, System);
1852 /// assert_eq!(2, Rc::strong_count(&five));
1853 /// # // Prevent leaks for Miri.
1854 /// # Rc::decrement_strong_count_in(ptr, System);
1855 /// }
1856 /// ```
1857 #[inline]
1858 #[unstable(feature = "allocator_api", issue = "32838")]
1859 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1860 where
1861 A: Clone,
1862 {
1863 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1864 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1865 // Now increase refcount, but don't drop new refcount either
1866 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1867 }
1868
1869 /// Decrements the strong reference count on the `Rc<T>` associated with the
1870 /// provided pointer by one.
1871 ///
1872 /// # Safety
1873 ///
1874 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1875 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1876 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1877 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1878 /// allocated by `alloc`. This method can be used to release the final `Rc` and
1879 /// backing storage, but **should not** be called after the final `Rc` has been released.
1880 ///
1881 /// [from_raw_in]: Rc::from_raw_in
1882 ///
1883 /// # Examples
1884 ///
1885 /// ```
1886 /// #![feature(allocator_api)]
1887 ///
1888 /// use std::rc::Rc;
1889 /// use std::alloc::System;
1890 ///
1891 /// let five = Rc::new_in(5, System);
1892 ///
1893 /// unsafe {
1894 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1895 /// Rc::increment_strong_count_in(ptr, System);
1896 ///
1897 /// let five = Rc::from_raw_in(ptr, System);
1898 /// assert_eq!(2, Rc::strong_count(&five));
1899 /// Rc::decrement_strong_count_in(ptr, System);
1900 /// assert_eq!(1, Rc::strong_count(&five));
1901 /// }
1902 /// ```
1903 #[inline]
1904 #[unstable(feature = "allocator_api", issue = "32838")]
1905 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1906 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1907 }
1908
1909 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1910 /// this allocation.
1911 #[inline]
1912 fn is_unique(this: &Self) -> bool {
1913 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1914 }
1915
1916 /// Returns a mutable reference into the given `Rc`, if there are
1917 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1918 ///
1919 /// Returns [`None`] otherwise, because it is not safe to
1920 /// mutate a shared value.
1921 ///
1922 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1923 /// the inner value when there are other `Rc` pointers.
1924 ///
1925 /// [make_mut]: Rc::make_mut
1926 /// [clone]: Clone::clone
1927 ///
1928 /// # Examples
1929 ///
1930 /// ```
1931 /// use std::rc::Rc;
1932 ///
1933 /// let mut x = Rc::new(3);
1934 /// *Rc::get_mut(&mut x).unwrap() = 4;
1935 /// assert_eq!(*x, 4);
1936 ///
1937 /// let _y = Rc::clone(&x);
1938 /// assert!(Rc::get_mut(&mut x).is_none());
1939 /// ```
1940 #[inline]
1941 #[stable(feature = "rc_unique", since = "1.4.0")]
1942 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1943 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1944 }
1945
1946 /// Returns a mutable reference into the given `Rc`,
1947 /// without any check.
1948 ///
1949 /// See also [`get_mut`], which is safe and does appropriate checks.
1950 ///
1951 /// [`get_mut`]: Rc::get_mut
1952 ///
1953 /// # Safety
1954 ///
1955 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1956 /// they must not be dereferenced or have active borrows for the duration
1957 /// of the returned borrow, and their inner type must be exactly the same as the
1958 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1959 /// such pointers exist, for example immediately after `Rc::new`.
1960 ///
1961 /// # Examples
1962 ///
1963 /// ```
1964 /// #![feature(get_mut_unchecked)]
1965 ///
1966 /// use std::rc::Rc;
1967 ///
1968 /// let mut x = Rc::new(String::new());
1969 /// unsafe {
1970 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1971 /// }
1972 /// assert_eq!(*x, "foo");
1973 /// ```
1974 /// Other `Rc` pointers to the same allocation must be to the same type.
1975 /// ```no_run
1976 /// #![feature(get_mut_unchecked)]
1977 ///
1978 /// use std::rc::Rc;
1979 ///
1980 /// let x: Rc<str> = Rc::from("Hello, world!");
1981 /// let mut y: Rc<[u8]> = x.clone().into();
1982 /// unsafe {
1983 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1984 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1985 /// }
1986 /// println!("{}", &*x); // Invalid UTF-8 in a str
1987 /// ```
1988 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1989 /// ```no_run
1990 /// #![feature(get_mut_unchecked)]
1991 ///
1992 /// use std::rc::Rc;
1993 ///
1994 /// let x: Rc<&str> = Rc::new("Hello, world!");
1995 /// {
1996 /// let s = String::from("Oh, no!");
1997 /// let mut y: Rc<&str> = x.clone();
1998 /// unsafe {
1999 /// // this is Undefined Behavior, because x's inner type
2000 /// // is &'long str, not &'short str
2001 /// *Rc::get_mut_unchecked(&mut y) = &s;
2002 /// }
2003 /// }
2004 /// println!("{}", &*x); // Use-after-free
2005 /// ```
2006 #[inline]
2007 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
2008 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
2009 // We are careful to *not* create a reference covering the "count" fields, as
2010 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
2011 unsafe { &mut (*this.ptr.as_ptr()).value }
2012 }
2013
2014 #[inline]
2015 #[stable(feature = "ptr_eq", since = "1.17.0")]
2016 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
2017 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
2018 ///
2019 /// # Examples
2020 ///
2021 /// ```
2022 /// use std::rc::Rc;
2023 ///
2024 /// let five = Rc::new(5);
2025 /// let same_five = Rc::clone(&five);
2026 /// let other_five = Rc::new(5);
2027 ///
2028 /// assert!(Rc::ptr_eq(&five, &same_five));
2029 /// assert!(!Rc::ptr_eq(&five, &other_five));
2030 /// ```
2031 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
2032 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
2033 }
2034}
2035
2036#[cfg(not(no_global_oom_handling))]
2037impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
2038 /// Makes a mutable reference into the given `Rc`.
2039 ///
2040 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
2041 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
2042 /// referred to as clone-on-write.
2043 ///
2044 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
2045 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
2046 /// be cloned.
2047 ///
2048 /// See also [`get_mut`], which will fail rather than cloning the inner value
2049 /// or disassociating [`Weak`] pointers.
2050 ///
2051 /// [`clone`]: Clone::clone
2052 /// [`get_mut`]: Rc::get_mut
2053 ///
2054 /// # Examples
2055 ///
2056 /// ```
2057 /// use std::rc::Rc;
2058 ///
2059 /// let mut data = Rc::new(5);
2060 ///
2061 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
2062 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
2063 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
2064 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
2065 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
2066 ///
2067 /// // Now `data` and `other_data` point to different allocations.
2068 /// assert_eq!(*data, 8);
2069 /// assert_eq!(*other_data, 12);
2070 /// ```
2071 ///
2072 /// [`Weak`] pointers will be disassociated:
2073 ///
2074 /// ```
2075 /// use std::rc::Rc;
2076 ///
2077 /// let mut data = Rc::new(75);
2078 /// let weak = Rc::downgrade(&data);
2079 ///
2080 /// assert!(75 == *data);
2081 /// assert!(75 == *weak.upgrade().unwrap());
2082 ///
2083 /// *Rc::make_mut(&mut data) += 1;
2084 ///
2085 /// assert!(76 == *data);
2086 /// assert!(weak.upgrade().is_none());
2087 /// ```
2088 #[inline]
2089 #[stable(feature = "rc_unique", since = "1.4.0")]
2090 pub fn make_mut(this: &mut Self) -> &mut T {
2091 let size_of_val = size_of_val::<T>(&**this);
2092
2093 if Rc::strong_count(this) != 1 {
2094 // Gotta clone the data, there are other Rcs.
2095 *this = Rc::clone_from_ref_in(&**this, this.alloc.clone());
2096 } else if Rc::weak_count(this) != 0 {
2097 // Can just steal the data, all that's left is Weaks
2098
2099 // We don't need panic-protection like the above branch does, but we might as well
2100 // use the same mechanism.
2101 let mut in_progress: UniqueRcUninit<T, A> =
2102 UniqueRcUninit::new(&**this, this.alloc.clone());
2103 unsafe {
2104 // Initialize `in_progress` with move of **this.
2105 // We have to express this in terms of bytes because `T: ?Sized`; there is no
2106 // operation that just copies a value based on its `size_of_val()`.
2107 ptr::copy_nonoverlapping(
2108 ptr::from_ref(&**this).cast::<u8>(),
2109 in_progress.data_ptr().cast::<u8>(),
2110 size_of_val,
2111 );
2112
2113 this.inner().dec_strong();
2114 // Remove implicit strong-weak ref (no need to craft a fake
2115 // Weak here -- we know other Weaks can clean up for us)
2116 this.inner().dec_weak();
2117 // Replace `this` with newly constructed Rc that has the moved data.
2118 ptr::write(this, in_progress.into_rc());
2119 }
2120 }
2121 // This unsafety is ok because we're guaranteed that the pointer
2122 // returned is the *only* pointer that will ever be returned to T. Our
2123 // reference count is guaranteed to be 1 at this point, and we required
2124 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
2125 // reference to the allocation.
2126 unsafe { &mut this.ptr.as_mut().value }
2127 }
2128}
2129
2130impl<T: Clone, A: Allocator> Rc<T, A> {
2131 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2132 /// clone.
2133 ///
2134 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
2135 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
2136 ///
2137 /// # Examples
2138 ///
2139 /// ```
2140 /// # use std::{ptr, rc::Rc};
2141 /// let inner = String::from("test");
2142 /// let ptr = inner.as_ptr();
2143 ///
2144 /// let rc = Rc::new(inner);
2145 /// let inner = Rc::unwrap_or_clone(rc);
2146 /// // The inner value was not cloned
2147 /// assert!(ptr::eq(ptr, inner.as_ptr()));
2148 ///
2149 /// let rc = Rc::new(inner);
2150 /// let rc2 = rc.clone();
2151 /// let inner = Rc::unwrap_or_clone(rc);
2152 /// // Because there were 2 references, we had to clone the inner value.
2153 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
2154 /// // `rc2` is the last reference, so when we unwrap it we get back
2155 /// // the original `String`.
2156 /// let inner = Rc::unwrap_or_clone(rc2);
2157 /// assert!(ptr::eq(ptr, inner.as_ptr()));
2158 /// ```
2159 #[inline]
2160 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
2161 pub fn unwrap_or_clone(this: Self) -> T {
2162 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
2163 }
2164}
2165
2166impl<A: Allocator> Rc<dyn Any, A> {
2167 /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
2168 ///
2169 /// # Examples
2170 ///
2171 /// ```
2172 /// use std::any::Any;
2173 /// use std::rc::Rc;
2174 ///
2175 /// fn print_if_string(value: Rc<dyn Any>) {
2176 /// if let Ok(string) = value.downcast::<String>() {
2177 /// println!("String ({}): {}", string.len(), string);
2178 /// }
2179 /// }
2180 ///
2181 /// let my_string = "Hello World".to_string();
2182 /// print_if_string(Rc::new(my_string));
2183 /// print_if_string(Rc::new(0i8));
2184 /// ```
2185 #[inline]
2186 #[stable(feature = "rc_downcast", since = "1.29.0")]
2187 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
2188 if (*self).is::<T>() {
2189 unsafe {
2190 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2191 Ok(Rc::from_inner_in(ptr.cast(), alloc))
2192 }
2193 } else {
2194 Err(self)
2195 }
2196 }
2197
2198 /// Downcasts the `Rc<dyn Any>` to a concrete type.
2199 ///
2200 /// For a safe alternative see [`downcast`].
2201 ///
2202 /// # Examples
2203 ///
2204 /// ```
2205 /// #![feature(downcast_unchecked)]
2206 ///
2207 /// use std::any::Any;
2208 /// use std::rc::Rc;
2209 ///
2210 /// let x: Rc<dyn Any> = Rc::new(1_usize);
2211 ///
2212 /// unsafe {
2213 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2214 /// }
2215 /// ```
2216 ///
2217 /// # Safety
2218 ///
2219 /// The contained value must be of type `T`. Calling this method
2220 /// with the incorrect type is *undefined behavior*.
2221 ///
2222 ///
2223 /// [`downcast`]: Self::downcast
2224 #[inline]
2225 #[unstable(feature = "downcast_unchecked", issue = "90850")]
2226 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2227 unsafe {
2228 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2229 Rc::from_inner_in(ptr.cast(), alloc)
2230 }
2231 }
2232}
2233
2234impl<T: ?Sized> Rc<T> {
2235 /// Allocates an `RcInner<T>` with sufficient space for
2236 /// a possibly-unsized inner value where the value has the layout provided.
2237 ///
2238 /// The function `mem_to_rc_inner` is called with the data pointer
2239 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2240 #[cfg(not(no_global_oom_handling))]
2241 unsafe fn allocate_for_layout(
2242 value_layout: Layout,
2243 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2244 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2245 ) -> *mut RcInner<T> {
2246 let layout = rc_inner_layout_for_value_layout(value_layout);
2247 unsafe {
2248 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2249 .unwrap_or_else(|_| handle_alloc_error(layout))
2250 }
2251 }
2252
2253 /// Allocates an `RcInner<T>` with sufficient space for
2254 /// a possibly-unsized inner value where the value has the layout provided,
2255 /// returning an error if allocation fails.
2256 ///
2257 /// The function `mem_to_rc_inner` is called with the data pointer
2258 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2259 #[inline]
2260 unsafe fn try_allocate_for_layout(
2261 value_layout: Layout,
2262 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2263 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2264 ) -> Result<*mut RcInner<T>, AllocError> {
2265 let layout = rc_inner_layout_for_value_layout(value_layout);
2266
2267 // Allocate for the layout.
2268 let ptr = allocate(layout)?;
2269
2270 // Initialize the RcInner
2271 let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2272 unsafe {
2273 debug_assert_eq!(Layout::for_value_raw(inner), layout);
2274
2275 (&raw mut (*inner).strong).write(Cell::new(1));
2276 (&raw mut (*inner).weak).write(Cell::new(1));
2277 }
2278
2279 Ok(inner)
2280 }
2281}
2282
2283impl<T: ?Sized, A: Allocator> Rc<T, A> {
2284 /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2285 #[cfg(not(no_global_oom_handling))]
2286 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2287 // Allocate for the `RcInner<T>` using the given value.
2288 unsafe {
2289 Rc::<T>::allocate_for_layout(
2290 Layout::for_value_raw(ptr),
2291 |layout| alloc.allocate(layout),
2292 |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2293 )
2294 }
2295 }
2296
2297 #[cfg(not(no_global_oom_handling))]
2298 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2299 unsafe {
2300 let value_size = size_of_val(&*src);
2301 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2302
2303 // Copy value as bytes
2304 ptr::copy_nonoverlapping(
2305 (&raw const *src) as *const u8,
2306 (&raw mut (*ptr).value) as *mut u8,
2307 value_size,
2308 );
2309
2310 // Free the allocation without dropping its contents
2311 let (bptr, alloc) = Box::into_raw_with_allocator(src);
2312 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2313 drop(src);
2314
2315 Self::from_ptr_in(ptr, alloc)
2316 }
2317 }
2318}
2319
2320impl<T> Rc<[T]> {
2321 /// Allocates an `RcInner<[T]>` with the given length.
2322 #[cfg(not(no_global_oom_handling))]
2323 unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2324 unsafe {
2325 Self::allocate_for_layout(
2326 Layout::array::<T>(len).unwrap(),
2327 |layout| Global.allocate(layout),
2328 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2329 )
2330 }
2331 }
2332
2333 /// Copy elements from slice into newly allocated `Rc<[T]>`
2334 ///
2335 /// Unsafe because the caller must either take ownership, bind `T: Copy` or
2336 /// bind `T: TrivialClone`.
2337 #[cfg(not(no_global_oom_handling))]
2338 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2339 unsafe {
2340 let ptr = Self::allocate_for_slice(v.len());
2341 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2342 Self::from_ptr(ptr)
2343 }
2344 }
2345
2346 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2347 ///
2348 /// Behavior is undefined should the size be wrong.
2349 #[cfg(not(no_global_oom_handling))]
2350 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2351 // Panic guard while cloning T elements.
2352 // In the event of a panic, elements that have been written
2353 // into the new RcInner will be dropped, then the memory freed.
2354 struct Guard<T> {
2355 mem: NonNull<u8>,
2356 elems: *mut T,
2357 layout: Layout,
2358 n_elems: usize,
2359 }
2360
2361 impl<T> Drop for Guard<T> {
2362 fn drop(&mut self) {
2363 unsafe {
2364 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2365 ptr::drop_in_place(slice);
2366
2367 Global.deallocate(self.mem, self.layout);
2368 }
2369 }
2370 }
2371
2372 unsafe {
2373 let ptr = Self::allocate_for_slice(len);
2374
2375 let mem = ptr as *mut _ as *mut u8;
2376 let layout = Layout::for_value_raw(ptr);
2377
2378 // Pointer to first element
2379 let elems = (&raw mut (*ptr).value) as *mut T;
2380
2381 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2382
2383 for (i, item) in iter.enumerate() {
2384 ptr::write(elems.add(i), item);
2385 guard.n_elems += 1;
2386 }
2387
2388 // All clear. Forget the guard so it doesn't free the new RcInner.
2389 mem::forget(guard);
2390
2391 Self::from_ptr(ptr)
2392 }
2393 }
2394}
2395
2396impl<T, A: Allocator> Rc<[T], A> {
2397 /// Allocates an `RcInner<[T]>` with the given length.
2398 #[inline]
2399 #[cfg(not(no_global_oom_handling))]
2400 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2401 unsafe {
2402 Rc::<[T]>::allocate_for_layout(
2403 Layout::array::<T>(len).unwrap(),
2404 |layout| alloc.allocate(layout),
2405 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2406 )
2407 }
2408 }
2409}
2410
2411#[cfg(not(no_global_oom_handling))]
2412/// Specialization trait used for `From<&[T]>`.
2413trait RcFromSlice<T> {
2414 fn from_slice(slice: &[T]) -> Self;
2415}
2416
2417#[cfg(not(no_global_oom_handling))]
2418impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2419 #[inline]
2420 default fn from_slice(v: &[T]) -> Self {
2421 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2422 }
2423}
2424
2425#[cfg(not(no_global_oom_handling))]
2426impl<T: TrivialClone> RcFromSlice<T> for Rc<[T]> {
2427 #[inline]
2428 fn from_slice(v: &[T]) -> Self {
2429 // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent
2430 // to the above.
2431 unsafe { Rc::copy_from_slice(v) }
2432 }
2433}
2434
2435#[stable(feature = "rust1", since = "1.0.0")]
2436impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2437 type Target = T;
2438
2439 #[inline(always)]
2440 fn deref(&self) -> &T {
2441 &self.inner().value
2442 }
2443}
2444
2445#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2446unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2447
2448//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2449#[unstable(feature = "pin_coerce_unsized_trait", issue = "150112")]
2450unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2451
2452#[unstable(feature = "deref_pure_trait", issue = "87121")]
2453unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2454
2455//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2456#[unstable(feature = "deref_pure_trait", issue = "87121")]
2457unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2458
2459#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2460impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2461
2462#[stable(feature = "rust1", since = "1.0.0")]
2463unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2464 /// Drops the `Rc`.
2465 ///
2466 /// This will decrement the strong reference count. If the strong reference
2467 /// count reaches zero then the only other references (if any) are
2468 /// [`Weak`], so we `drop` the inner value.
2469 ///
2470 /// # Examples
2471 ///
2472 /// ```
2473 /// use std::rc::Rc;
2474 ///
2475 /// struct Foo;
2476 ///
2477 /// impl Drop for Foo {
2478 /// fn drop(&mut self) {
2479 /// println!("dropped!");
2480 /// }
2481 /// }
2482 ///
2483 /// let foo = Rc::new(Foo);
2484 /// let foo2 = Rc::clone(&foo);
2485 ///
2486 /// drop(foo); // Doesn't print anything
2487 /// drop(foo2); // Prints "dropped!"
2488 /// ```
2489 #[inline]
2490 fn drop(&mut self) {
2491 unsafe {
2492 self.inner().dec_strong();
2493 if self.inner().strong() == 0 {
2494 self.drop_slow();
2495 }
2496 }
2497 }
2498}
2499
2500#[stable(feature = "rust1", since = "1.0.0")]
2501impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2502 /// Makes a clone of the `Rc` pointer.
2503 ///
2504 /// This creates another pointer to the same allocation, increasing the
2505 /// strong reference count.
2506 ///
2507 /// # Examples
2508 ///
2509 /// ```
2510 /// use std::rc::Rc;
2511 ///
2512 /// let five = Rc::new(5);
2513 ///
2514 /// let _ = Rc::clone(&five);
2515 /// ```
2516 #[inline]
2517 fn clone(&self) -> Self {
2518 unsafe {
2519 self.inner().inc_strong();
2520 Self::from_inner_in(self.ptr, self.alloc.clone())
2521 }
2522 }
2523}
2524
2525#[unstable(feature = "ergonomic_clones", issue = "132290")]
2526impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2527
2528#[cfg(not(no_global_oom_handling))]
2529#[stable(feature = "rust1", since = "1.0.0")]
2530impl<T: Default> Default for Rc<T> {
2531 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2532 ///
2533 /// # Examples
2534 ///
2535 /// ```
2536 /// use std::rc::Rc;
2537 ///
2538 /// let x: Rc<i32> = Default::default();
2539 /// assert_eq!(*x, 0);
2540 /// ```
2541 #[inline]
2542 fn default() -> Self {
2543 unsafe {
2544 Self::from_inner(
2545 Box::leak(Box::write(
2546 Box::new_uninit(),
2547 RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2548 ))
2549 .into(),
2550 )
2551 }
2552 }
2553}
2554
2555#[cfg(not(no_global_oom_handling))]
2556#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2557impl Default for Rc<str> {
2558 /// Creates an empty `str` inside an `Rc`.
2559 ///
2560 /// This may or may not share an allocation with other Rcs on the same thread.
2561 #[inline]
2562 fn default() -> Self {
2563 let rc = Rc::<[u8]>::default();
2564 // `[u8]` has the same layout as `str`.
2565 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2566 }
2567}
2568
2569#[cfg(not(no_global_oom_handling))]
2570#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2571impl<T> Default for Rc<[T]> {
2572 /// Creates an empty `[T]` inside an `Rc`.
2573 ///
2574 /// This may or may not share an allocation with other Rcs on the same thread.
2575 #[inline]
2576 fn default() -> Self {
2577 let arr: [T; 0] = [];
2578 Rc::from(arr)
2579 }
2580}
2581
2582#[cfg(not(no_global_oom_handling))]
2583#[stable(feature = "pin_default_impls", since = "1.91.0")]
2584impl<T> Default for Pin<Rc<T>>
2585where
2586 T: ?Sized,
2587 Rc<T>: Default,
2588{
2589 #[inline]
2590 fn default() -> Self {
2591 unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2592 }
2593}
2594
2595#[stable(feature = "rust1", since = "1.0.0")]
2596trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2597 fn eq(&self, other: &Rc<T, A>) -> bool;
2598 fn ne(&self, other: &Rc<T, A>) -> bool;
2599}
2600
2601#[stable(feature = "rust1", since = "1.0.0")]
2602impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2603 #[inline]
2604 default fn eq(&self, other: &Rc<T, A>) -> bool {
2605 **self == **other
2606 }
2607
2608 #[inline]
2609 default fn ne(&self, other: &Rc<T, A>) -> bool {
2610 **self != **other
2611 }
2612}
2613
2614// Hack to allow specializing on `Eq` even though `Eq` has a method.
2615#[rustc_unsafe_specialization_marker]
2616pub(crate) trait MarkerEq: PartialEq<Self> {}
2617
2618impl<T: Eq> MarkerEq for T {}
2619
2620/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2621/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2622/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2623/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2624/// the same value, than two `&T`s.
2625///
2626/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2627#[stable(feature = "rust1", since = "1.0.0")]
2628impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2629 #[inline]
2630 fn eq(&self, other: &Rc<T, A>) -> bool {
2631 Rc::ptr_eq(self, other) || **self == **other
2632 }
2633
2634 #[inline]
2635 fn ne(&self, other: &Rc<T, A>) -> bool {
2636 !Rc::ptr_eq(self, other) && **self != **other
2637 }
2638}
2639
2640#[stable(feature = "rust1", since = "1.0.0")]
2641impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2642 /// Equality for two `Rc`s.
2643 ///
2644 /// Two `Rc`s are equal if their inner values are equal, even if they are
2645 /// stored in different allocation.
2646 ///
2647 /// If `T` also implements `Eq` (implying reflexivity of equality),
2648 /// two `Rc`s that point to the same allocation are
2649 /// always equal.
2650 ///
2651 /// # Examples
2652 ///
2653 /// ```
2654 /// use std::rc::Rc;
2655 ///
2656 /// let five = Rc::new(5);
2657 ///
2658 /// assert!(five == Rc::new(5));
2659 /// ```
2660 #[inline]
2661 fn eq(&self, other: &Rc<T, A>) -> bool {
2662 RcEqIdent::eq(self, other)
2663 }
2664
2665 /// Inequality for two `Rc`s.
2666 ///
2667 /// Two `Rc`s are not equal if their inner values are not equal.
2668 ///
2669 /// If `T` also implements `Eq` (implying reflexivity of equality),
2670 /// two `Rc`s that point to the same allocation are
2671 /// always equal.
2672 ///
2673 /// # Examples
2674 ///
2675 /// ```
2676 /// use std::rc::Rc;
2677 ///
2678 /// let five = Rc::new(5);
2679 ///
2680 /// assert!(five != Rc::new(6));
2681 /// ```
2682 #[inline]
2683 fn ne(&self, other: &Rc<T, A>) -> bool {
2684 RcEqIdent::ne(self, other)
2685 }
2686}
2687
2688#[stable(feature = "rust1", since = "1.0.0")]
2689impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2690
2691#[stable(feature = "rust1", since = "1.0.0")]
2692impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2693 /// Partial comparison for two `Rc`s.
2694 ///
2695 /// The two are compared by calling `partial_cmp()` on their inner values.
2696 ///
2697 /// # Examples
2698 ///
2699 /// ```
2700 /// use std::rc::Rc;
2701 /// use std::cmp::Ordering;
2702 ///
2703 /// let five = Rc::new(5);
2704 ///
2705 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2706 /// ```
2707 #[inline(always)]
2708 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2709 (**self).partial_cmp(&**other)
2710 }
2711
2712 /// Less-than comparison for two `Rc`s.
2713 ///
2714 /// The two are compared by calling `<` on their inner values.
2715 ///
2716 /// # Examples
2717 ///
2718 /// ```
2719 /// use std::rc::Rc;
2720 ///
2721 /// let five = Rc::new(5);
2722 ///
2723 /// assert!(five < Rc::new(6));
2724 /// ```
2725 #[inline(always)]
2726 fn lt(&self, other: &Rc<T, A>) -> bool {
2727 **self < **other
2728 }
2729
2730 /// 'Less than or equal to' comparison for two `Rc`s.
2731 ///
2732 /// The two are compared by calling `<=` on their inner values.
2733 ///
2734 /// # Examples
2735 ///
2736 /// ```
2737 /// use std::rc::Rc;
2738 ///
2739 /// let five = Rc::new(5);
2740 ///
2741 /// assert!(five <= Rc::new(5));
2742 /// ```
2743 #[inline(always)]
2744 fn le(&self, other: &Rc<T, A>) -> bool {
2745 **self <= **other
2746 }
2747
2748 /// Greater-than comparison for two `Rc`s.
2749 ///
2750 /// The two are compared by calling `>` on their inner values.
2751 ///
2752 /// # Examples
2753 ///
2754 /// ```
2755 /// use std::rc::Rc;
2756 ///
2757 /// let five = Rc::new(5);
2758 ///
2759 /// assert!(five > Rc::new(4));
2760 /// ```
2761 #[inline(always)]
2762 fn gt(&self, other: &Rc<T, A>) -> bool {
2763 **self > **other
2764 }
2765
2766 /// 'Greater than or equal to' comparison for two `Rc`s.
2767 ///
2768 /// The two are compared by calling `>=` on their inner values.
2769 ///
2770 /// # Examples
2771 ///
2772 /// ```
2773 /// use std::rc::Rc;
2774 ///
2775 /// let five = Rc::new(5);
2776 ///
2777 /// assert!(five >= Rc::new(5));
2778 /// ```
2779 #[inline(always)]
2780 fn ge(&self, other: &Rc<T, A>) -> bool {
2781 **self >= **other
2782 }
2783}
2784
2785#[stable(feature = "rust1", since = "1.0.0")]
2786impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2787 /// Comparison for two `Rc`s.
2788 ///
2789 /// The two are compared by calling `cmp()` on their inner values.
2790 ///
2791 /// # Examples
2792 ///
2793 /// ```
2794 /// use std::rc::Rc;
2795 /// use std::cmp::Ordering;
2796 ///
2797 /// let five = Rc::new(5);
2798 ///
2799 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2800 /// ```
2801 #[inline]
2802 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2803 (**self).cmp(&**other)
2804 }
2805}
2806
2807#[stable(feature = "rust1", since = "1.0.0")]
2808impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2809 fn hash<H: Hasher>(&self, state: &mut H) {
2810 (**self).hash(state);
2811 }
2812}
2813
2814#[stable(feature = "rust1", since = "1.0.0")]
2815impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2816 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2817 fmt::Display::fmt(&**self, f)
2818 }
2819}
2820
2821#[stable(feature = "rust1", since = "1.0.0")]
2822impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2823 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2824 fmt::Debug::fmt(&**self, f)
2825 }
2826}
2827
2828#[stable(feature = "rust1", since = "1.0.0")]
2829impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2830 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2831 fmt::Pointer::fmt(&(&raw const **self), f)
2832 }
2833}
2834
2835#[cfg(not(no_global_oom_handling))]
2836#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2837impl<T> From<T> for Rc<T> {
2838 /// Converts a generic type `T` into an `Rc<T>`
2839 ///
2840 /// The conversion allocates on the heap and moves `t`
2841 /// from the stack into it.
2842 ///
2843 /// # Example
2844 /// ```rust
2845 /// # use std::rc::Rc;
2846 /// let x = 5;
2847 /// let rc = Rc::new(5);
2848 ///
2849 /// assert_eq!(Rc::from(x), rc);
2850 /// ```
2851 fn from(t: T) -> Self {
2852 Rc::new(t)
2853 }
2854}
2855
2856#[cfg(not(no_global_oom_handling))]
2857#[stable(feature = "shared_from_array", since = "1.74.0")]
2858impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2859 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2860 ///
2861 /// The conversion moves the array into a newly allocated `Rc`.
2862 ///
2863 /// # Example
2864 ///
2865 /// ```
2866 /// # use std::rc::Rc;
2867 /// let original: [i32; 3] = [1, 2, 3];
2868 /// let shared: Rc<[i32]> = Rc::from(original);
2869 /// assert_eq!(&[1, 2, 3], &shared[..]);
2870 /// ```
2871 #[inline]
2872 fn from(v: [T; N]) -> Rc<[T]> {
2873 Rc::<[T; N]>::from(v)
2874 }
2875}
2876
2877#[cfg(not(no_global_oom_handling))]
2878#[stable(feature = "shared_from_slice", since = "1.21.0")]
2879impl<T: Clone> From<&[T]> for Rc<[T]> {
2880 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2881 ///
2882 /// # Example
2883 ///
2884 /// ```
2885 /// # use std::rc::Rc;
2886 /// let original: &[i32] = &[1, 2, 3];
2887 /// let shared: Rc<[i32]> = Rc::from(original);
2888 /// assert_eq!(&[1, 2, 3], &shared[..]);
2889 /// ```
2890 #[inline]
2891 fn from(v: &[T]) -> Rc<[T]> {
2892 <Self as RcFromSlice<T>>::from_slice(v)
2893 }
2894}
2895
2896#[cfg(not(no_global_oom_handling))]
2897#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2898impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2899 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2900 ///
2901 /// # Example
2902 ///
2903 /// ```
2904 /// # use std::rc::Rc;
2905 /// let mut original = [1, 2, 3];
2906 /// let original: &mut [i32] = &mut original;
2907 /// let shared: Rc<[i32]> = Rc::from(original);
2908 /// assert_eq!(&[1, 2, 3], &shared[..]);
2909 /// ```
2910 #[inline]
2911 fn from(v: &mut [T]) -> Rc<[T]> {
2912 Rc::from(&*v)
2913 }
2914}
2915
2916#[cfg(not(no_global_oom_handling))]
2917#[stable(feature = "shared_from_slice", since = "1.21.0")]
2918impl From<&str> for Rc<str> {
2919 /// Allocates a reference-counted string slice and copies `v` into it.
2920 ///
2921 /// # Example
2922 ///
2923 /// ```
2924 /// # use std::rc::Rc;
2925 /// let shared: Rc<str> = Rc::from("statue");
2926 /// assert_eq!("statue", &shared[..]);
2927 /// ```
2928 #[inline]
2929 fn from(v: &str) -> Rc<str> {
2930 let rc = Rc::<[u8]>::from(v.as_bytes());
2931 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2932 }
2933}
2934
2935#[cfg(not(no_global_oom_handling))]
2936#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2937impl From<&mut str> for Rc<str> {
2938 /// Allocates a reference-counted string slice and copies `v` into it.
2939 ///
2940 /// # Example
2941 ///
2942 /// ```
2943 /// # use std::rc::Rc;
2944 /// let mut original = String::from("statue");
2945 /// let original: &mut str = &mut original;
2946 /// let shared: Rc<str> = Rc::from(original);
2947 /// assert_eq!("statue", &shared[..]);
2948 /// ```
2949 #[inline]
2950 fn from(v: &mut str) -> Rc<str> {
2951 Rc::from(&*v)
2952 }
2953}
2954
2955#[cfg(not(no_global_oom_handling))]
2956#[stable(feature = "shared_from_slice", since = "1.21.0")]
2957impl From<String> for Rc<str> {
2958 /// Allocates a reference-counted string slice and copies `v` into it.
2959 ///
2960 /// # Example
2961 ///
2962 /// ```
2963 /// # use std::rc::Rc;
2964 /// let original: String = "statue".to_owned();
2965 /// let shared: Rc<str> = Rc::from(original);
2966 /// assert_eq!("statue", &shared[..]);
2967 /// ```
2968 #[inline]
2969 fn from(v: String) -> Rc<str> {
2970 Rc::from(&v[..])
2971 }
2972}
2973
2974#[cfg(not(no_global_oom_handling))]
2975#[stable(feature = "shared_from_slice", since = "1.21.0")]
2976impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2977 /// Move a boxed object to a new, reference counted, allocation.
2978 ///
2979 /// # Example
2980 ///
2981 /// ```
2982 /// # use std::rc::Rc;
2983 /// let original: Box<i32> = Box::new(1);
2984 /// let shared: Rc<i32> = Rc::from(original);
2985 /// assert_eq!(1, *shared);
2986 /// ```
2987 #[inline]
2988 fn from(v: Box<T, A>) -> Rc<T, A> {
2989 Rc::from_box_in(v)
2990 }
2991}
2992
2993#[cfg(not(no_global_oom_handling))]
2994#[stable(feature = "shared_from_slice", since = "1.21.0")]
2995impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2996 /// Allocates a reference-counted slice and moves `v`'s items into it.
2997 ///
2998 /// # Example
2999 ///
3000 /// ```
3001 /// # use std::rc::Rc;
3002 /// let unique: Vec<i32> = vec![1, 2, 3];
3003 /// let shared: Rc<[i32]> = Rc::from(unique);
3004 /// assert_eq!(&[1, 2, 3], &shared[..]);
3005 /// ```
3006 #[inline]
3007 fn from(v: Vec<T, A>) -> Rc<[T], A> {
3008 unsafe {
3009 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
3010
3011 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
3012 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
3013
3014 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
3015 // without dropping its contents or the allocator
3016 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
3017
3018 Self::from_ptr_in(rc_ptr, alloc)
3019 }
3020 }
3021}
3022
3023#[stable(feature = "shared_from_cow", since = "1.45.0")]
3024impl<'a, B> From<Cow<'a, B>> for Rc<B>
3025where
3026 B: ToOwned + ?Sized,
3027 Rc<B>: From<&'a B> + From<B::Owned>,
3028{
3029 /// Creates a reference-counted pointer from a clone-on-write pointer by
3030 /// copying its content.
3031 ///
3032 /// # Example
3033 ///
3034 /// ```rust
3035 /// # use std::rc::Rc;
3036 /// # use std::borrow::Cow;
3037 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
3038 /// let shared: Rc<str> = Rc::from(cow);
3039 /// assert_eq!("eggplant", &shared[..]);
3040 /// ```
3041 #[inline]
3042 fn from(cow: Cow<'a, B>) -> Rc<B> {
3043 match cow {
3044 Cow::Borrowed(s) => Rc::from(s),
3045 Cow::Owned(s) => Rc::from(s),
3046 }
3047 }
3048}
3049
3050#[stable(feature = "shared_from_str", since = "1.62.0")]
3051impl From<Rc<str>> for Rc<[u8]> {
3052 /// Converts a reference-counted string slice into a byte slice.
3053 ///
3054 /// # Example
3055 ///
3056 /// ```
3057 /// # use std::rc::Rc;
3058 /// let string: Rc<str> = Rc::from("eggplant");
3059 /// let bytes: Rc<[u8]> = Rc::from(string);
3060 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
3061 /// ```
3062 #[inline]
3063 fn from(rc: Rc<str>) -> Self {
3064 // SAFETY: `str` has the same layout as `[u8]`.
3065 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
3066 }
3067}
3068
3069#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3070impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
3071 type Error = Rc<[T], A>;
3072
3073 fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
3074 if boxed_slice.len() == N {
3075 let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
3076 Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
3077 } else {
3078 Err(boxed_slice)
3079 }
3080 }
3081}
3082
3083#[cfg(not(no_global_oom_handling))]
3084#[stable(feature = "shared_from_iter", since = "1.37.0")]
3085impl<T> FromIterator<T> for Rc<[T]> {
3086 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
3087 ///
3088 /// # Performance characteristics
3089 ///
3090 /// ## The general case
3091 ///
3092 /// In the general case, collecting into `Rc<[T]>` is done by first
3093 /// collecting into a `Vec<T>`. That is, when writing the following:
3094 ///
3095 /// ```rust
3096 /// # use std::rc::Rc;
3097 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
3098 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3099 /// ```
3100 ///
3101 /// this behaves as if we wrote:
3102 ///
3103 /// ```rust
3104 /// # use std::rc::Rc;
3105 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
3106 /// .collect::<Vec<_>>() // The first set of allocations happens here.
3107 /// .into(); // A second allocation for `Rc<[T]>` happens here.
3108 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3109 /// ```
3110 ///
3111 /// This will allocate as many times as needed for constructing the `Vec<T>`
3112 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
3113 ///
3114 /// ## Iterators of known length
3115 ///
3116 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
3117 /// a single allocation will be made for the `Rc<[T]>`. For example:
3118 ///
3119 /// ```rust
3120 /// # use std::rc::Rc;
3121 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
3122 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
3123 /// ```
3124 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
3125 ToRcSlice::to_rc_slice(iter.into_iter())
3126 }
3127}
3128
3129/// Specialization trait used for collecting into `Rc<[T]>`.
3130#[cfg(not(no_global_oom_handling))]
3131trait ToRcSlice<T>: Iterator<Item = T> + Sized {
3132 fn to_rc_slice(self) -> Rc<[T]>;
3133}
3134
3135#[cfg(not(no_global_oom_handling))]
3136impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
3137 default fn to_rc_slice(self) -> Rc<[T]> {
3138 self.collect::<Vec<T>>().into()
3139 }
3140}
3141
3142#[cfg(not(no_global_oom_handling))]
3143impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
3144 fn to_rc_slice(self) -> Rc<[T]> {
3145 // This is the case for a `TrustedLen` iterator.
3146 let (low, high) = self.size_hint();
3147 if let Some(high) = high {
3148 debug_assert_eq!(
3149 low,
3150 high,
3151 "TrustedLen iterator's size hint is not exact: {:?}",
3152 (low, high)
3153 );
3154
3155 unsafe {
3156 // SAFETY: We need to ensure that the iterator has an exact length and we have.
3157 Rc::from_iter_exact(self, low)
3158 }
3159 } else {
3160 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
3161 // length exceeding `usize::MAX`.
3162 // The default implementation would collect into a vec which would panic.
3163 // Thus we panic here immediately without invoking `Vec` code.
3164 panic!("capacity overflow");
3165 }
3166 }
3167}
3168
3169/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
3170/// managed allocation.
3171///
3172/// The allocation is accessed by calling [`upgrade`] on the `Weak`
3173/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
3174///
3175/// Since a `Weak` reference does not count towards ownership, it will not
3176/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
3177/// guarantees about the value still being present. Thus it may return [`None`]
3178/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
3179/// itself (the backing store) from being deallocated.
3180///
3181/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
3182/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
3183/// prevent circular references between [`Rc`] pointers, since mutual owning references
3184/// would never allow either [`Rc`] to be dropped. For example, a tree could
3185/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
3186/// pointers from children back to their parents.
3187///
3188/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
3189///
3190/// [`upgrade`]: Weak::upgrade
3191#[stable(feature = "rc_weak", since = "1.4.0")]
3192#[rustc_diagnostic_item = "RcWeak"]
3193pub struct Weak<
3194 T: ?Sized,
3195 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3196> {
3197 // This is a `NonNull` to allow optimizing the size of this type in enums,
3198 // but it is not necessarily a valid pointer.
3199 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3200 // to allocate space on the heap. That's not a value a real pointer
3201 // will ever have because RcInner has alignment at least 2.
3202 ptr: NonNull<RcInner<T>>,
3203 alloc: A,
3204}
3205
3206#[stable(feature = "rc_weak", since = "1.4.0")]
3207impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3208#[stable(feature = "rc_weak", since = "1.4.0")]
3209impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3210
3211#[unstable(feature = "coerce_unsized", issue = "18598")]
3212impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3213
3214#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3215impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3216
3217// SAFETY: `Weak::clone` doesn't access any `Cell`s which could contain the `Weak` being cloned.
3218#[unstable(feature = "cell_get_cloned", issue = "145329")]
3219unsafe impl<T: ?Sized> CloneFromCell for Weak<T> {}
3220
3221impl<T> Weak<T> {
3222 /// Constructs a new `Weak<T>`, without allocating any memory.
3223 /// Calling [`upgrade`] on the return value always gives [`None`].
3224 ///
3225 /// [`upgrade`]: Weak::upgrade
3226 ///
3227 /// # Examples
3228 ///
3229 /// ```
3230 /// use std::rc::Weak;
3231 ///
3232 /// let empty: Weak<i64> = Weak::new();
3233 /// assert!(empty.upgrade().is_none());
3234 /// ```
3235 #[inline]
3236 #[stable(feature = "downgraded_weak", since = "1.10.0")]
3237 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3238 #[must_use]
3239 pub const fn new() -> Weak<T> {
3240 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3241 }
3242}
3243
3244impl<T, A: Allocator> Weak<T, A> {
3245 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3246 /// allocator.
3247 /// Calling [`upgrade`] on the return value always gives [`None`].
3248 ///
3249 /// [`upgrade`]: Weak::upgrade
3250 ///
3251 /// # Examples
3252 ///
3253 /// ```
3254 /// use std::rc::Weak;
3255 ///
3256 /// let empty: Weak<i64> = Weak::new();
3257 /// assert!(empty.upgrade().is_none());
3258 /// ```
3259 #[inline]
3260 #[unstable(feature = "allocator_api", issue = "32838")]
3261 pub fn new_in(alloc: A) -> Weak<T, A> {
3262 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3263 }
3264}
3265
3266pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3267 (ptr.cast::<()>()).addr() == usize::MAX
3268}
3269
3270/// Helper type to allow accessing the reference counts without
3271/// making any assertions about the data field.
3272struct WeakInner<'a> {
3273 weak: &'a Cell<usize>,
3274 strong: &'a Cell<usize>,
3275}
3276
3277impl<T: ?Sized> Weak<T> {
3278 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3279 ///
3280 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3281 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3282 ///
3283 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3284 /// as these don't own anything; the method still works on them).
3285 ///
3286 /// # Safety
3287 ///
3288 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3289 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3290 ///
3291 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3292 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3293 /// count is not modified by this operation) and therefore it must be paired with a previous
3294 /// call to [`into_raw`].
3295 ///
3296 /// # Examples
3297 ///
3298 /// ```
3299 /// use std::rc::{Rc, Weak};
3300 ///
3301 /// let strong = Rc::new("hello".to_owned());
3302 ///
3303 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3304 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3305 ///
3306 /// assert_eq!(2, Rc::weak_count(&strong));
3307 ///
3308 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3309 /// assert_eq!(1, Rc::weak_count(&strong));
3310 ///
3311 /// drop(strong);
3312 ///
3313 /// // Decrement the last weak count.
3314 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3315 /// ```
3316 ///
3317 /// [`into_raw`]: Weak::into_raw
3318 /// [`upgrade`]: Weak::upgrade
3319 /// [`new`]: Weak::new
3320 #[inline]
3321 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3322 pub unsafe fn from_raw(ptr: *const T) -> Self {
3323 unsafe { Self::from_raw_in(ptr, Global) }
3324 }
3325
3326 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3327 ///
3328 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3329 /// one weak reference (the weak count is not modified by this operation). It can be turned
3330 /// back into the `Weak<T>` with [`from_raw`].
3331 ///
3332 /// The same restrictions of accessing the target of the pointer as with
3333 /// [`as_ptr`] apply.
3334 ///
3335 /// # Examples
3336 ///
3337 /// ```
3338 /// use std::rc::{Rc, Weak};
3339 ///
3340 /// let strong = Rc::new("hello".to_owned());
3341 /// let weak = Rc::downgrade(&strong);
3342 /// let raw = weak.into_raw();
3343 ///
3344 /// assert_eq!(1, Rc::weak_count(&strong));
3345 /// assert_eq!("hello", unsafe { &*raw });
3346 ///
3347 /// drop(unsafe { Weak::from_raw(raw) });
3348 /// assert_eq!(0, Rc::weak_count(&strong));
3349 /// ```
3350 ///
3351 /// [`from_raw`]: Weak::from_raw
3352 /// [`as_ptr`]: Weak::as_ptr
3353 #[must_use = "losing the pointer will leak memory"]
3354 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3355 pub fn into_raw(self) -> *const T {
3356 mem::ManuallyDrop::new(self).as_ptr()
3357 }
3358}
3359
3360impl<T: ?Sized, A: Allocator> Weak<T, A> {
3361 /// Returns a reference to the underlying allocator.
3362 #[inline]
3363 #[unstable(feature = "allocator_api", issue = "32838")]
3364 pub fn allocator(&self) -> &A {
3365 &self.alloc
3366 }
3367
3368 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3369 ///
3370 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3371 /// unaligned or even [`null`] otherwise.
3372 ///
3373 /// # Examples
3374 ///
3375 /// ```
3376 /// use std::rc::Rc;
3377 /// use std::ptr;
3378 ///
3379 /// let strong = Rc::new("hello".to_owned());
3380 /// let weak = Rc::downgrade(&strong);
3381 /// // Both point to the same object
3382 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3383 /// // The strong here keeps it alive, so we can still access the object.
3384 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3385 ///
3386 /// drop(strong);
3387 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3388 /// // undefined behavior.
3389 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3390 /// ```
3391 ///
3392 /// [`null`]: ptr::null
3393 #[must_use]
3394 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3395 pub fn as_ptr(&self) -> *const T {
3396 let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3397
3398 if is_dangling(ptr) {
3399 // If the pointer is dangling, we return the sentinel directly. This cannot be
3400 // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3401 ptr as *const T
3402 } else {
3403 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3404 // The payload may be dropped at this point, and we have to maintain provenance,
3405 // so use raw pointer manipulation.
3406 unsafe { &raw mut (*ptr).value }
3407 }
3408 }
3409
3410 /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3411 ///
3412 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3413 /// one weak reference (the weak count is not modified by this operation). It can be turned
3414 /// back into the `Weak<T>` with [`from_raw_in`].
3415 ///
3416 /// The same restrictions of accessing the target of the pointer as with
3417 /// [`as_ptr`] apply.
3418 ///
3419 /// # Examples
3420 ///
3421 /// ```
3422 /// #![feature(allocator_api)]
3423 /// use std::rc::{Rc, Weak};
3424 /// use std::alloc::System;
3425 ///
3426 /// let strong = Rc::new_in("hello".to_owned(), System);
3427 /// let weak = Rc::downgrade(&strong);
3428 /// let (raw, alloc) = weak.into_raw_with_allocator();
3429 ///
3430 /// assert_eq!(1, Rc::weak_count(&strong));
3431 /// assert_eq!("hello", unsafe { &*raw });
3432 ///
3433 /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3434 /// assert_eq!(0, Rc::weak_count(&strong));
3435 /// ```
3436 ///
3437 /// [`from_raw_in`]: Weak::from_raw_in
3438 /// [`as_ptr`]: Weak::as_ptr
3439 #[must_use = "losing the pointer will leak memory"]
3440 #[inline]
3441 #[unstable(feature = "allocator_api", issue = "32838")]
3442 pub fn into_raw_with_allocator(self) -> (*const T, A) {
3443 let this = mem::ManuallyDrop::new(self);
3444 let result = this.as_ptr();
3445 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3446 let alloc = unsafe { ptr::read(&this.alloc) };
3447 (result, alloc)
3448 }
3449
3450 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3451 ///
3452 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3453 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3454 ///
3455 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3456 /// as these don't own anything; the method still works on them).
3457 ///
3458 /// # Safety
3459 ///
3460 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3461 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3462 ///
3463 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3464 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3465 /// count is not modified by this operation) and therefore it must be paired with a previous
3466 /// call to [`into_raw`].
3467 ///
3468 /// # Examples
3469 ///
3470 /// ```
3471 /// use std::rc::{Rc, Weak};
3472 ///
3473 /// let strong = Rc::new("hello".to_owned());
3474 ///
3475 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3476 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3477 ///
3478 /// assert_eq!(2, Rc::weak_count(&strong));
3479 ///
3480 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3481 /// assert_eq!(1, Rc::weak_count(&strong));
3482 ///
3483 /// drop(strong);
3484 ///
3485 /// // Decrement the last weak count.
3486 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3487 /// ```
3488 ///
3489 /// [`into_raw`]: Weak::into_raw
3490 /// [`upgrade`]: Weak::upgrade
3491 /// [`new`]: Weak::new
3492 #[inline]
3493 #[unstable(feature = "allocator_api", issue = "32838")]
3494 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3495 // See Weak::as_ptr for context on how the input pointer is derived.
3496
3497 let ptr = if is_dangling(ptr) {
3498 // This is a dangling Weak.
3499 ptr as *mut RcInner<T>
3500 } else {
3501 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3502 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3503 let offset = unsafe { data_offset(ptr) };
3504 // Thus, we reverse the offset to get the whole RcInner.
3505 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3506 unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3507 };
3508
3509 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3510 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3511 }
3512
3513 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3514 /// dropping of the inner value if successful.
3515 ///
3516 /// Returns [`None`] if the inner value has since been dropped.
3517 ///
3518 /// # Examples
3519 ///
3520 /// ```
3521 /// use std::rc::Rc;
3522 ///
3523 /// let five = Rc::new(5);
3524 ///
3525 /// let weak_five = Rc::downgrade(&five);
3526 ///
3527 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3528 /// assert!(strong_five.is_some());
3529 ///
3530 /// // Destroy all strong pointers.
3531 /// drop(strong_five);
3532 /// drop(five);
3533 ///
3534 /// assert!(weak_five.upgrade().is_none());
3535 /// ```
3536 #[must_use = "this returns a new `Rc`, \
3537 without modifying the original weak pointer"]
3538 #[stable(feature = "rc_weak", since = "1.4.0")]
3539 pub fn upgrade(&self) -> Option<Rc<T, A>>
3540 where
3541 A: Clone,
3542 {
3543 let inner = self.inner()?;
3544
3545 if inner.strong() == 0 {
3546 None
3547 } else {
3548 unsafe {
3549 inner.inc_strong();
3550 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3551 }
3552 }
3553 }
3554
3555 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3556 ///
3557 /// If `self` was created using [`Weak::new`], this will return 0.
3558 #[must_use]
3559 #[stable(feature = "weak_counts", since = "1.41.0")]
3560 pub fn strong_count(&self) -> usize {
3561 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3562 }
3563
3564 /// Gets the number of `Weak` pointers pointing to this allocation.
3565 ///
3566 /// If no strong pointers remain, this will return zero.
3567 #[must_use]
3568 #[stable(feature = "weak_counts", since = "1.41.0")]
3569 pub fn weak_count(&self) -> usize {
3570 if let Some(inner) = self.inner() {
3571 if inner.strong() > 0 {
3572 inner.weak() - 1 // subtract the implicit weak ptr
3573 } else {
3574 0
3575 }
3576 } else {
3577 0
3578 }
3579 }
3580
3581 /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3582 /// (i.e., when this `Weak` was created by `Weak::new`).
3583 #[inline]
3584 fn inner(&self) -> Option<WeakInner<'_>> {
3585 if is_dangling(self.ptr.as_ptr()) {
3586 None
3587 } else {
3588 // We are careful to *not* create a reference covering the "data" field, as
3589 // the field may be mutated concurrently (for example, if the last `Rc`
3590 // is dropped, the data field will be dropped in-place).
3591 Some(unsafe {
3592 let ptr = self.ptr.as_ptr();
3593 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3594 })
3595 }
3596 }
3597
3598 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3599 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3600 /// this function ignores the metadata of `dyn Trait` pointers.
3601 ///
3602 /// # Notes
3603 ///
3604 /// Since this compares pointers it means that `Weak::new()` will equal each
3605 /// other, even though they don't point to any allocation.
3606 ///
3607 /// # Examples
3608 ///
3609 /// ```
3610 /// use std::rc::Rc;
3611 ///
3612 /// let first_rc = Rc::new(5);
3613 /// let first = Rc::downgrade(&first_rc);
3614 /// let second = Rc::downgrade(&first_rc);
3615 ///
3616 /// assert!(first.ptr_eq(&second));
3617 ///
3618 /// let third_rc = Rc::new(5);
3619 /// let third = Rc::downgrade(&third_rc);
3620 ///
3621 /// assert!(!first.ptr_eq(&third));
3622 /// ```
3623 ///
3624 /// Comparing `Weak::new`.
3625 ///
3626 /// ```
3627 /// use std::rc::{Rc, Weak};
3628 ///
3629 /// let first = Weak::new();
3630 /// let second = Weak::new();
3631 /// assert!(first.ptr_eq(&second));
3632 ///
3633 /// let third_rc = Rc::new(());
3634 /// let third = Rc::downgrade(&third_rc);
3635 /// assert!(!first.ptr_eq(&third));
3636 /// ```
3637 #[inline]
3638 #[must_use]
3639 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3640 pub fn ptr_eq(&self, other: &Self) -> bool {
3641 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3642 }
3643}
3644
3645#[stable(feature = "rc_weak", since = "1.4.0")]
3646unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3647 /// Drops the `Weak` pointer.
3648 ///
3649 /// # Examples
3650 ///
3651 /// ```
3652 /// use std::rc::{Rc, Weak};
3653 ///
3654 /// struct Foo;
3655 ///
3656 /// impl Drop for Foo {
3657 /// fn drop(&mut self) {
3658 /// println!("dropped!");
3659 /// }
3660 /// }
3661 ///
3662 /// let foo = Rc::new(Foo);
3663 /// let weak_foo = Rc::downgrade(&foo);
3664 /// let other_weak_foo = Weak::clone(&weak_foo);
3665 ///
3666 /// drop(weak_foo); // Doesn't print anything
3667 /// drop(foo); // Prints "dropped!"
3668 ///
3669 /// assert!(other_weak_foo.upgrade().is_none());
3670 /// ```
3671 fn drop(&mut self) {
3672 let inner = if let Some(inner) = self.inner() { inner } else { return };
3673
3674 inner.dec_weak();
3675 // the weak count starts at 1, and will only go to zero if all
3676 // the strong pointers have disappeared.
3677 if inner.weak() == 0 {
3678 unsafe {
3679 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3680 }
3681 }
3682 }
3683}
3684
3685#[stable(feature = "rc_weak", since = "1.4.0")]
3686impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3687 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3688 ///
3689 /// # Examples
3690 ///
3691 /// ```
3692 /// use std::rc::{Rc, Weak};
3693 ///
3694 /// let weak_five = Rc::downgrade(&Rc::new(5));
3695 ///
3696 /// let _ = Weak::clone(&weak_five);
3697 /// ```
3698 #[inline]
3699 fn clone(&self) -> Weak<T, A> {
3700 if let Some(inner) = self.inner() {
3701 inner.inc_weak()
3702 }
3703 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3704 }
3705}
3706
3707#[unstable(feature = "ergonomic_clones", issue = "132290")]
3708impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3709
3710#[stable(feature = "rc_weak", since = "1.4.0")]
3711impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3712 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3713 write!(f, "(Weak)")
3714 }
3715}
3716
3717#[stable(feature = "downgraded_weak", since = "1.10.0")]
3718impl<T> Default for Weak<T> {
3719 /// Constructs a new `Weak<T>`, without allocating any memory.
3720 /// Calling [`upgrade`] on the return value always gives [`None`].
3721 ///
3722 /// [`upgrade`]: Weak::upgrade
3723 ///
3724 /// # Examples
3725 ///
3726 /// ```
3727 /// use std::rc::Weak;
3728 ///
3729 /// let empty: Weak<i64> = Default::default();
3730 /// assert!(empty.upgrade().is_none());
3731 /// ```
3732 fn default() -> Weak<T> {
3733 Weak::new()
3734 }
3735}
3736
3737// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3738// is not decremented, meaning the ref-count can overflow, and then you can
3739// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3740// unsound. We abort because this is such a degenerate scenario that we don't
3741// care about what happens -- no real program should ever experience this.
3742//
3743// This should have negligible overhead since you don't actually need to
3744// clone these much in Rust thanks to ownership and move-semantics.
3745
3746#[doc(hidden)]
3747trait RcInnerPtr {
3748 fn weak_ref(&self) -> &Cell<usize>;
3749 fn strong_ref(&self) -> &Cell<usize>;
3750
3751 #[inline]
3752 fn strong(&self) -> usize {
3753 self.strong_ref().get()
3754 }
3755
3756 #[inline]
3757 fn inc_strong(&self) {
3758 let strong = self.strong();
3759
3760 // We insert an `assume` here to hint LLVM at an otherwise
3761 // missed optimization.
3762 // SAFETY: The reference count will never be zero when this is
3763 // called.
3764 unsafe {
3765 hint::assert_unchecked(strong != 0);
3766 }
3767
3768 let strong = strong.wrapping_add(1);
3769 self.strong_ref().set(strong);
3770
3771 // We want to abort on overflow instead of dropping the value.
3772 // Checking for overflow after the store instead of before
3773 // allows for slightly better code generation.
3774 if core::intrinsics::unlikely(strong == 0) {
3775 abort();
3776 }
3777 }
3778
3779 #[inline]
3780 fn dec_strong(&self) {
3781 self.strong_ref().set(self.strong() - 1);
3782 }
3783
3784 #[inline]
3785 fn weak(&self) -> usize {
3786 self.weak_ref().get()
3787 }
3788
3789 #[inline]
3790 fn inc_weak(&self) {
3791 let weak = self.weak();
3792
3793 // We insert an `assume` here to hint LLVM at an otherwise
3794 // missed optimization.
3795 // SAFETY: The reference count will never be zero when this is
3796 // called.
3797 unsafe {
3798 hint::assert_unchecked(weak != 0);
3799 }
3800
3801 let weak = weak.wrapping_add(1);
3802 self.weak_ref().set(weak);
3803
3804 // We want to abort on overflow instead of dropping the value.
3805 // Checking for overflow after the store instead of before
3806 // allows for slightly better code generation.
3807 if core::intrinsics::unlikely(weak == 0) {
3808 abort();
3809 }
3810 }
3811
3812 #[inline]
3813 fn dec_weak(&self) {
3814 self.weak_ref().set(self.weak() - 1);
3815 }
3816}
3817
3818impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3819 #[inline(always)]
3820 fn weak_ref(&self) -> &Cell<usize> {
3821 &self.weak
3822 }
3823
3824 #[inline(always)]
3825 fn strong_ref(&self) -> &Cell<usize> {
3826 &self.strong
3827 }
3828}
3829
3830impl<'a> RcInnerPtr for WeakInner<'a> {
3831 #[inline(always)]
3832 fn weak_ref(&self) -> &Cell<usize> {
3833 self.weak
3834 }
3835
3836 #[inline(always)]
3837 fn strong_ref(&self) -> &Cell<usize> {
3838 self.strong
3839 }
3840}
3841
3842#[stable(feature = "rust1", since = "1.0.0")]
3843impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3844 fn borrow(&self) -> &T {
3845 &**self
3846 }
3847}
3848
3849#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3850impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3851 fn as_ref(&self) -> &T {
3852 &**self
3853 }
3854}
3855
3856#[stable(feature = "pin", since = "1.33.0")]
3857impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3858
3859/// Gets the offset within an `RcInner` for the payload behind a pointer.
3860///
3861/// # Safety
3862///
3863/// The pointer must point to (and have valid metadata for) a previously
3864/// valid instance of T, but the T is allowed to be dropped.
3865unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3866 // Align the unsized value to the end of the RcInner.
3867 // Because RcInner is repr(C), it will always be the last field in memory.
3868 // SAFETY: since the only unsized types possible are slices, trait objects,
3869 // and extern types, the input safety requirement is currently enough to
3870 // satisfy the requirements of Alignment::of_val_raw; this is an implementation
3871 // detail of the language that must not be relied upon outside of std.
3872 unsafe { data_offset_alignment(Alignment::of_val_raw(ptr)) }
3873}
3874
3875#[inline]
3876fn data_offset_alignment(alignment: Alignment) -> usize {
3877 let layout = Layout::new::<RcInner<()>>();
3878 layout.size() + layout.padding_needed_for(alignment)
3879}
3880
3881/// A uniquely owned [`Rc`].
3882///
3883/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3884/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3885/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3886///
3887/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3888/// use case is to have an object be mutable during its initialization phase but then have it become
3889/// immutable and converted to a normal `Rc`.
3890///
3891/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3892///
3893/// ```
3894/// #![feature(unique_rc_arc)]
3895/// use std::rc::{Rc, Weak, UniqueRc};
3896///
3897/// struct Gadget {
3898/// #[allow(dead_code)]
3899/// me: Weak<Gadget>,
3900/// }
3901///
3902/// fn create_gadget() -> Option<Rc<Gadget>> {
3903/// let mut rc = UniqueRc::new(Gadget {
3904/// me: Weak::new(),
3905/// });
3906/// rc.me = UniqueRc::downgrade(&rc);
3907/// Some(UniqueRc::into_rc(rc))
3908/// }
3909///
3910/// create_gadget().unwrap();
3911/// ```
3912///
3913/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3914/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3915/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3916/// including fallible or async constructors.
3917#[unstable(feature = "unique_rc_arc", issue = "112566")]
3918pub struct UniqueRc<
3919 T: ?Sized,
3920 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3921> {
3922 ptr: NonNull<RcInner<T>>,
3923 // Define the ownership of `RcInner<T>` for drop-check
3924 _marker: PhantomData<RcInner<T>>,
3925 // Invariance is necessary for soundness: once other `Weak`
3926 // references exist, we already have a form of shared mutability!
3927 _marker2: PhantomData<*mut T>,
3928 alloc: A,
3929}
3930
3931// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3932// but having an explicit negative impl is nice for documentation purposes
3933// and results in nicer error messages.
3934#[unstable(feature = "unique_rc_arc", issue = "112566")]
3935impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3936
3937// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3938// but having an explicit negative impl is nice for documentation purposes
3939// and results in nicer error messages.
3940#[unstable(feature = "unique_rc_arc", issue = "112566")]
3941impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3942
3943#[unstable(feature = "unique_rc_arc", issue = "112566")]
3944impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3945 for UniqueRc<T, A>
3946{
3947}
3948
3949//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3950#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3951impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3952
3953#[unstable(feature = "unique_rc_arc", issue = "112566")]
3954impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3955 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3956 fmt::Display::fmt(&**self, f)
3957 }
3958}
3959
3960#[unstable(feature = "unique_rc_arc", issue = "112566")]
3961impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3962 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3963 fmt::Debug::fmt(&**self, f)
3964 }
3965}
3966
3967#[unstable(feature = "unique_rc_arc", issue = "112566")]
3968impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3969 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3970 fmt::Pointer::fmt(&(&raw const **self), f)
3971 }
3972}
3973
3974#[unstable(feature = "unique_rc_arc", issue = "112566")]
3975impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3976 fn borrow(&self) -> &T {
3977 &**self
3978 }
3979}
3980
3981#[unstable(feature = "unique_rc_arc", issue = "112566")]
3982impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3983 fn borrow_mut(&mut self) -> &mut T {
3984 &mut **self
3985 }
3986}
3987
3988#[unstable(feature = "unique_rc_arc", issue = "112566")]
3989impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3990 fn as_ref(&self) -> &T {
3991 &**self
3992 }
3993}
3994
3995#[unstable(feature = "unique_rc_arc", issue = "112566")]
3996impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3997 fn as_mut(&mut self) -> &mut T {
3998 &mut **self
3999 }
4000}
4001
4002#[unstable(feature = "unique_rc_arc", issue = "112566")]
4003impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
4004
4005#[cfg(not(no_global_oom_handling))]
4006#[unstable(feature = "unique_rc_arc", issue = "112566")]
4007impl<T> From<T> for UniqueRc<T> {
4008 #[inline(always)]
4009 fn from(value: T) -> Self {
4010 Self::new(value)
4011 }
4012}
4013
4014#[unstable(feature = "unique_rc_arc", issue = "112566")]
4015impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
4016 /// Equality for two `UniqueRc`s.
4017 ///
4018 /// Two `UniqueRc`s are equal if their inner values are equal.
4019 ///
4020 /// # Examples
4021 ///
4022 /// ```
4023 /// #![feature(unique_rc_arc)]
4024 /// use std::rc::UniqueRc;
4025 ///
4026 /// let five = UniqueRc::new(5);
4027 ///
4028 /// assert!(five == UniqueRc::new(5));
4029 /// ```
4030 #[inline]
4031 fn eq(&self, other: &Self) -> bool {
4032 PartialEq::eq(&**self, &**other)
4033 }
4034
4035 /// Inequality for two `UniqueRc`s.
4036 ///
4037 /// Two `UniqueRc`s are not equal if their inner values are not equal.
4038 ///
4039 /// # Examples
4040 ///
4041 /// ```
4042 /// #![feature(unique_rc_arc)]
4043 /// use std::rc::UniqueRc;
4044 ///
4045 /// let five = UniqueRc::new(5);
4046 ///
4047 /// assert!(five != UniqueRc::new(6));
4048 /// ```
4049 #[inline]
4050 fn ne(&self, other: &Self) -> bool {
4051 PartialEq::ne(&**self, &**other)
4052 }
4053}
4054
4055#[unstable(feature = "unique_rc_arc", issue = "112566")]
4056impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
4057 /// Partial comparison for two `UniqueRc`s.
4058 ///
4059 /// The two are compared by calling `partial_cmp()` on their inner values.
4060 ///
4061 /// # Examples
4062 ///
4063 /// ```
4064 /// #![feature(unique_rc_arc)]
4065 /// use std::rc::UniqueRc;
4066 /// use std::cmp::Ordering;
4067 ///
4068 /// let five = UniqueRc::new(5);
4069 ///
4070 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
4071 /// ```
4072 #[inline(always)]
4073 fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
4074 (**self).partial_cmp(&**other)
4075 }
4076
4077 /// Less-than comparison for two `UniqueRc`s.
4078 ///
4079 /// The two are compared by calling `<` on their inner values.
4080 ///
4081 /// # Examples
4082 ///
4083 /// ```
4084 /// #![feature(unique_rc_arc)]
4085 /// use std::rc::UniqueRc;
4086 ///
4087 /// let five = UniqueRc::new(5);
4088 ///
4089 /// assert!(five < UniqueRc::new(6));
4090 /// ```
4091 #[inline(always)]
4092 fn lt(&self, other: &UniqueRc<T, A>) -> bool {
4093 **self < **other
4094 }
4095
4096 /// 'Less than or equal to' comparison for two `UniqueRc`s.
4097 ///
4098 /// The two are compared by calling `<=` on their inner values.
4099 ///
4100 /// # Examples
4101 ///
4102 /// ```
4103 /// #![feature(unique_rc_arc)]
4104 /// use std::rc::UniqueRc;
4105 ///
4106 /// let five = UniqueRc::new(5);
4107 ///
4108 /// assert!(five <= UniqueRc::new(5));
4109 /// ```
4110 #[inline(always)]
4111 fn le(&self, other: &UniqueRc<T, A>) -> bool {
4112 **self <= **other
4113 }
4114
4115 /// Greater-than comparison for two `UniqueRc`s.
4116 ///
4117 /// The two are compared by calling `>` on their inner values.
4118 ///
4119 /// # Examples
4120 ///
4121 /// ```
4122 /// #![feature(unique_rc_arc)]
4123 /// use std::rc::UniqueRc;
4124 ///
4125 /// let five = UniqueRc::new(5);
4126 ///
4127 /// assert!(five > UniqueRc::new(4));
4128 /// ```
4129 #[inline(always)]
4130 fn gt(&self, other: &UniqueRc<T, A>) -> bool {
4131 **self > **other
4132 }
4133
4134 /// 'Greater than or equal to' comparison for two `UniqueRc`s.
4135 ///
4136 /// The two are compared by calling `>=` on their inner values.
4137 ///
4138 /// # Examples
4139 ///
4140 /// ```
4141 /// #![feature(unique_rc_arc)]
4142 /// use std::rc::UniqueRc;
4143 ///
4144 /// let five = UniqueRc::new(5);
4145 ///
4146 /// assert!(five >= UniqueRc::new(5));
4147 /// ```
4148 #[inline(always)]
4149 fn ge(&self, other: &UniqueRc<T, A>) -> bool {
4150 **self >= **other
4151 }
4152}
4153
4154#[unstable(feature = "unique_rc_arc", issue = "112566")]
4155impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
4156 /// Comparison for two `UniqueRc`s.
4157 ///
4158 /// The two are compared by calling `cmp()` on their inner values.
4159 ///
4160 /// # Examples
4161 ///
4162 /// ```
4163 /// #![feature(unique_rc_arc)]
4164 /// use std::rc::UniqueRc;
4165 /// use std::cmp::Ordering;
4166 ///
4167 /// let five = UniqueRc::new(5);
4168 ///
4169 /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
4170 /// ```
4171 #[inline]
4172 fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
4173 (**self).cmp(&**other)
4174 }
4175}
4176
4177#[unstable(feature = "unique_rc_arc", issue = "112566")]
4178impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
4179
4180#[unstable(feature = "unique_rc_arc", issue = "112566")]
4181impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
4182 fn hash<H: Hasher>(&self, state: &mut H) {
4183 (**self).hash(state);
4184 }
4185}
4186
4187// Depends on A = Global
4188impl<T> UniqueRc<T> {
4189 /// Creates a new `UniqueRc`.
4190 ///
4191 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4192 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4193 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4194 /// point to the new [`Rc`].
4195 #[cfg(not(no_global_oom_handling))]
4196 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4197 pub fn new(value: T) -> Self {
4198 Self::new_in(value, Global)
4199 }
4200
4201 /// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4202 ///
4203 /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4204 /// also in a `UniqueRc`.
4205 ///
4206 /// Note: this is an associated function, which means that you have
4207 /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4208 /// is so that there is no conflict with a method on the inner type.
4209 ///
4210 /// # Examples
4211 ///
4212 /// ```
4213 /// #![feature(smart_pointer_try_map)]
4214 /// #![feature(unique_rc_arc)]
4215 ///
4216 /// use std::rc::UniqueRc;
4217 ///
4218 /// let r = UniqueRc::new(7);
4219 /// let new = UniqueRc::map(r, |i| i + 7);
4220 /// assert_eq!(*new, 14);
4221 /// ```
4222 #[cfg(not(no_global_oom_handling))]
4223 #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4224 pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4225 if size_of::<T>() == size_of::<U>()
4226 && align_of::<T>() == align_of::<U>()
4227 && UniqueRc::weak_count(&this) == 0
4228 {
4229 unsafe {
4230 let ptr = UniqueRc::into_raw(this);
4231 let value = ptr.read();
4232 let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4233
4234 allocation.write(f(value));
4235 allocation.assume_init()
4236 }
4237 } else {
4238 UniqueRc::new(f(UniqueRc::unwrap(this)))
4239 }
4240 }
4241
4242 /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4243 ///
4244 /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4245 /// the result is returned, also in a `UniqueRc`.
4246 ///
4247 /// Note: this is an associated function, which means that you have
4248 /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4249 /// is so that there is no conflict with a method on the inner type.
4250 ///
4251 /// # Examples
4252 ///
4253 /// ```
4254 /// #![feature(smart_pointer_try_map)]
4255 /// #![feature(unique_rc_arc)]
4256 ///
4257 /// use std::rc::UniqueRc;
4258 ///
4259 /// let b = UniqueRc::new(7);
4260 /// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4261 /// assert_eq!(*new, 7);
4262 /// ```
4263 #[cfg(not(no_global_oom_handling))]
4264 #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4265 pub fn try_map<R>(
4266 this: Self,
4267 f: impl FnOnce(T) -> R,
4268 ) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4269 where
4270 R: Try,
4271 R::Residual: Residual<UniqueRc<R::Output>>,
4272 {
4273 if size_of::<T>() == size_of::<R::Output>()
4274 && align_of::<T>() == align_of::<R::Output>()
4275 && UniqueRc::weak_count(&this) == 0
4276 {
4277 unsafe {
4278 let ptr = UniqueRc::into_raw(this);
4279 let value = ptr.read();
4280 let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4281
4282 allocation.write(f(value)?);
4283 try { allocation.assume_init() }
4284 }
4285 } else {
4286 try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4287 }
4288 }
4289
4290 #[cfg(not(no_global_oom_handling))]
4291 fn unwrap(this: Self) -> T {
4292 let this = ManuallyDrop::new(this);
4293 let val: T = unsafe { ptr::read(&**this) };
4294
4295 let _weak = Weak { ptr: this.ptr, alloc: Global };
4296
4297 val
4298 }
4299}
4300
4301impl<T: ?Sized> UniqueRc<T> {
4302 #[cfg(not(no_global_oom_handling))]
4303 unsafe fn from_raw(ptr: *const T) -> Self {
4304 let offset = unsafe { data_offset(ptr) };
4305
4306 // Reverse the offset to find the original RcInner.
4307 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4308
4309 Self {
4310 ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4311 _marker: PhantomData,
4312 _marker2: PhantomData,
4313 alloc: Global,
4314 }
4315 }
4316
4317 #[cfg(not(no_global_oom_handling))]
4318 fn into_raw(this: Self) -> *const T {
4319 let this = ManuallyDrop::new(this);
4320 Self::as_ptr(&*this)
4321 }
4322}
4323
4324impl<T, A: Allocator> UniqueRc<T, A> {
4325 /// Creates a new `UniqueRc` in the provided allocator.
4326 ///
4327 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4328 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4329 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4330 /// point to the new [`Rc`].
4331 #[cfg(not(no_global_oom_handling))]
4332 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4333 pub fn new_in(value: T, alloc: A) -> Self {
4334 let (ptr, alloc) = Box::into_unique(Box::new_in(
4335 RcInner {
4336 strong: Cell::new(0),
4337 // keep one weak reference so if all the weak pointers that are created are dropped
4338 // the UniqueRc still stays valid.
4339 weak: Cell::new(1),
4340 value,
4341 },
4342 alloc,
4343 ));
4344 Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4345 }
4346}
4347
4348impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4349 /// Converts the `UniqueRc` into a regular [`Rc`].
4350 ///
4351 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4352 /// is passed to `into_rc`.
4353 ///
4354 /// Any weak references created before this method is called can now be upgraded to strong
4355 /// references.
4356 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4357 pub fn into_rc(this: Self) -> Rc<T, A> {
4358 let mut this = ManuallyDrop::new(this);
4359
4360 // Move the allocator out.
4361 // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4362 // a `ManuallyDrop`.
4363 let alloc: A = unsafe { ptr::read(&this.alloc) };
4364
4365 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4366 unsafe {
4367 // Convert our weak reference into a strong reference
4368 this.ptr.as_mut().strong.set(1);
4369 Rc::from_inner_in(this.ptr, alloc)
4370 }
4371 }
4372
4373 #[cfg(not(no_global_oom_handling))]
4374 fn weak_count(this: &Self) -> usize {
4375 this.inner().weak() - 1
4376 }
4377
4378 #[cfg(not(no_global_oom_handling))]
4379 fn inner(&self) -> &RcInner<T> {
4380 // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4381 unsafe { self.ptr.as_ref() }
4382 }
4383
4384 #[cfg(not(no_global_oom_handling))]
4385 fn as_ptr(this: &Self) -> *const T {
4386 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4387
4388 // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4389 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4390 // write through the pointer after the Rc is recovered through `from_raw`.
4391 unsafe { &raw mut (*ptr).value }
4392 }
4393
4394 #[inline]
4395 #[cfg(not(no_global_oom_handling))]
4396 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4397 let this = mem::ManuallyDrop::new(this);
4398 (this.ptr, unsafe { ptr::read(&this.alloc) })
4399 }
4400
4401 #[inline]
4402 #[cfg(not(no_global_oom_handling))]
4403 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4404 Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4405 }
4406}
4407
4408impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4409 /// Creates a new weak reference to the `UniqueRc`.
4410 ///
4411 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4412 /// to a [`Rc`] using [`UniqueRc::into_rc`].
4413 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4414 pub fn downgrade(this: &Self) -> Weak<T, A> {
4415 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4416 // one strong reference before converting to a regular Rc.
4417 unsafe {
4418 this.ptr.as_ref().inc_weak();
4419 }
4420 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4421 }
4422}
4423
4424#[cfg(not(no_global_oom_handling))]
4425impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4426 unsafe fn assume_init(self) -> UniqueRc<T, A> {
4427 let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4428 unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4429 }
4430}
4431
4432#[unstable(feature = "unique_rc_arc", issue = "112566")]
4433impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4434 type Target = T;
4435
4436 fn deref(&self) -> &T {
4437 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4438 unsafe { &self.ptr.as_ref().value }
4439 }
4440}
4441
4442#[unstable(feature = "unique_rc_arc", issue = "112566")]
4443impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4444 fn deref_mut(&mut self) -> &mut T {
4445 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4446 // have unique ownership and therefore it's safe to make a mutable reference because
4447 // `UniqueRc` owns the only strong reference to itself.
4448 unsafe { &mut (*self.ptr.as_ptr()).value }
4449 }
4450}
4451
4452#[unstable(feature = "unique_rc_arc", issue = "112566")]
4453unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4454 fn drop(&mut self) {
4455 unsafe {
4456 // destroy the contained object
4457 drop_in_place(DerefMut::deref_mut(self));
4458
4459 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4460 self.ptr.as_ref().dec_weak();
4461
4462 if self.ptr.as_ref().weak() == 0 {
4463 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4464 }
4465 }
4466 }
4467}
4468
4469/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4470/// but will deallocate it (without dropping the value) when dropped.
4471///
4472/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4473/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4474/// which `MaybeUninit` does not.
4475struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4476 ptr: NonNull<RcInner<T>>,
4477 layout_for_value: Layout,
4478 alloc: Option<A>,
4479}
4480
4481impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4482 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4483 #[cfg(not(no_global_oom_handling))]
4484 fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4485 let layout = Layout::for_value(for_value);
4486 let ptr = unsafe {
4487 Rc::allocate_for_layout(
4488 layout,
4489 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4490 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4491 )
4492 };
4493 Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4494 }
4495
4496 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it,
4497 /// returning an error if allocation fails.
4498 fn try_new(for_value: &T, alloc: A) -> Result<UniqueRcUninit<T, A>, AllocError> {
4499 let layout = Layout::for_value(for_value);
4500 let ptr = unsafe {
4501 Rc::try_allocate_for_layout(
4502 layout,
4503 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4504 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4505 )?
4506 };
4507 Ok(Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) })
4508 }
4509
4510 /// Returns the pointer to be written into to initialize the [`Rc`].
4511 fn data_ptr(&mut self) -> *mut T {
4512 let offset = data_offset_alignment(self.layout_for_value.alignment());
4513 unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4514 }
4515
4516 /// Upgrade this into a normal [`Rc`].
4517 ///
4518 /// # Safety
4519 ///
4520 /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4521 unsafe fn into_rc(self) -> Rc<T, A> {
4522 let mut this = ManuallyDrop::new(self);
4523 let ptr = this.ptr;
4524 let alloc = this.alloc.take().unwrap();
4525
4526 // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4527 // for having initialized the data.
4528 unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4529 }
4530}
4531
4532impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4533 fn drop(&mut self) {
4534 // SAFETY:
4535 // * new() produced a pointer safe to deallocate.
4536 // * We own the pointer unless into_rc() was called, which forgets us.
4537 unsafe {
4538 self.alloc.take().unwrap().deallocate(
4539 self.ptr.cast(),
4540 rc_inner_layout_for_value_layout(self.layout_for_value),
4541 );
4542 }
4543 }
4544}
4545
4546#[unstable(feature = "allocator_api", issue = "32838")]
4547unsafe impl<T: ?Sized + Allocator, A: Allocator> Allocator for Rc<T, A> {
4548 #[inline]
4549 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4550 (**self).allocate(layout)
4551 }
4552
4553 #[inline]
4554 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4555 (**self).allocate_zeroed(layout)
4556 }
4557
4558 #[inline]
4559 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
4560 // SAFETY: the safety contract must be upheld by the caller
4561 unsafe { (**self).deallocate(ptr, layout) }
4562 }
4563
4564 #[inline]
4565 unsafe fn grow(
4566 &self,
4567 ptr: NonNull<u8>,
4568 old_layout: Layout,
4569 new_layout: Layout,
4570 ) -> Result<NonNull<[u8]>, AllocError> {
4571 // SAFETY: the safety contract must be upheld by the caller
4572 unsafe { (**self).grow(ptr, old_layout, new_layout) }
4573 }
4574
4575 #[inline]
4576 unsafe fn grow_zeroed(
4577 &self,
4578 ptr: NonNull<u8>,
4579 old_layout: Layout,
4580 new_layout: Layout,
4581 ) -> Result<NonNull<[u8]>, AllocError> {
4582 // SAFETY: the safety contract must be upheld by the caller
4583 unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) }
4584 }
4585
4586 #[inline]
4587 unsafe fn shrink(
4588 &self,
4589 ptr: NonNull<u8>,
4590 old_layout: Layout,
4591 new_layout: Layout,
4592 ) -> Result<NonNull<[u8]>, AllocError> {
4593 // SAFETY: the safety contract must be upheld by the caller
4594 unsafe { (**self).shrink(ptr, old_layout, new_layout) }
4595 }
4596}