Lines Matching refs:inner

15103 // Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
15895 inner: Arc<i32>,
16109 inner: Weak<ZeroRefs>,
16111 let zero_refs = Arc::new_cyclic(|inner| {
16112 assert_eq!(inner.strong_count(), 0);
16113 assert!(inner.upgrade().is_none());
16114 ZeroRefs { inner: Weak::new() }
16119 assert_eq!(zero_refs.inner.strong_count(), 0);
16120 assert_eq!(zero_refs.inner.weak_count(), 0);
16126 inner: Weak<OneRef>,
16128 let one_ref = Arc::new_cyclic(|inner| {
16129 assert_eq!(inner.strong_count(), 0);
16130 assert!(inner.upgrade().is_none());
16131 OneRef { inner: inner.clone() }
16137 let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
16150 let two_refs = Arc::new_cyclic(|inner| {
16151 assert_eq!(inner.strong_count(), 0);
16152 assert!(inner.upgrade().is_none());
16154 let inner1 = inner.clone();
19503 //! referred to as "inner value") is also dropped.
19523 //! (the backing store for the inner value) alive.
19556 //! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
19773 // inner types.
19788 /// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
19811 fn inner(&self) -> &RcBox<T> {
19813 // that the inner pointer is valid.
19873 // Construct the inner in the "uninitialized" state with a single
19895 let inner = init_ptr.as_ptr();
19896 ptr::write(ptr::addr_of_mut!((*inner).value), data);
19898 let prev_value = (*inner).strong.get();
19900 (*inner).strong.set(1);
20070 /// Returns the inner value, if the `Rc` has exactly one strong reference.
20100 this.inner().dec_strong();
20181 /// it is up to the caller to guarantee that the inner value
20220 /// it is up to the caller to guarantee that the inner value
20299 // SAFETY: This cannot go through Deref::deref or Rc::inner because
20366 this.inner().inc_weak();
20387 this.inner().weak() - 1
20405 this.inner().strong()
20489 /// the inner value when there are other pointers.
20575 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
20594 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
20595 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
20639 this.inner().dec_strong();
20642 this.inner().dec_weak();
20689 /// a possibly-unsized inner value where the value has the layout provided.
20710 /// a possibly-unsized inner value where the value has the layout provided,
20731 let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
20733 debug_assert_eq!(Layout::for_value(&*inner), layout);
20735 ptr::write(&mut (*inner).strong, Cell::new(1));
20736 ptr::write(&mut (*inner).weak, Cell::new(1));
20739 Ok(inner)
20742 /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
20874 &self.inner().value
20887 /// [`Weak`], so we `drop` the inner value.
20910 self.inner().dec_strong();
20911 if self.inner().strong() == 0 {
20917 self.inner().dec_weak();
20919 if self.inner().weak() == 0 {
20945 self.inner().inc_strong();
21017 /// Two `Rc`s are equal if their inner values are equal, even if they are
21040 /// Two `Rc`s are unequal if their inner values are unequal.
21068 /// The two are compared by calling `partial_cmp()` on their inner values.
21087 /// The two are compared by calling `<` on their inner values.
21105 /// The two are compared by calling `<=` on their inner values.
21123 /// The two are compared by calling `>` on their inner values.
21141 /// The two are compared by calling `>=` on their inner values.
21162 /// The two are compared by calling `cmp()` on their inner values.
21433 /// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
21635 /// dropping of the inner value if successful.
21637 /// Returns [`None`] if the inner value has since been dropped.
21659 let inner = self.inner()?;
21660 if inner.strong() == 0 {
21663 inner.inc_strong();
21673 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
21681 self.inner()
21682 .map(|inner| {
21683 if inner.strong() > 0 {
21684 inner.weak() - 1 // subtract the implicit weak ptr
21695 fn inner(&self) -> Option<WeakInner<'_>> {
21784 let inner = if let Some(inner) = self.inner() { inner } else { return };
21786 inner.dec_weak();
21789 if inner.weak() == 0 {
21812 if let Some(inner) = self.inner() {
21813 inner.inc_weak()
23338 /// referred to as "inner value") is also dropped.
23427 /// [`Weak<T>`][Weak] does not auto-dereference to `T`, because the inner value may have
23532 /// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
23571 // inner types.
23633 // Construct the inner in the "uninitialized" state with a single
23653 // Now we can properly initialize the inner value and turn our weak
23656 let inner = init_ptr.as_ptr();
23657 ptr::write(ptr::addr_of_mut!((*inner).data), data);
23671 let prev_value = (*inner).strong.fetch_add(1, Release);
23844 /// Returns the inner value, if the `Arc` has exactly one strong reference.
23866 if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
23870 acquire!(this.inner().strong);
23954 /// it is up to the caller to guarantee that the inner value
23993 /// it is up to the caller to guarantee that the inner value
24070 // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because
24140 let mut cur = this.inner().weak.load(Relaxed);
24146 cur = this.inner().weak.load(Relaxed);
24157 match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
24191 let cnt = this.inner().weak.load(SeqCst);
24220 this.inner().strong.load(SeqCst)
24295 fn inner(&self) -> &ArcInner<T> {
24297 // that the inner pointer is valid. Furthermore, we know that the
24298 // `ArcInner` structure itself is `Sync` because the inner data is
24341 /// a possibly-unsized inner value where the value has the layout provided.
24362 /// a possibly-unsized inner value where the value has the layout provided,
24381 let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr());
24382 debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
24385 ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
24386 ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
24389 Ok(inner)
24392 /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
24549 let old_size = self.inner().strong.fetch_add(1, Relaxed);
24574 &self.inner().data
24585 /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
24605 /// let mut other_data = Arc::clone(&data); // Won't clone inner data
24606 /// *Arc::make_mut(&mut data) += 1; // Clones inner data
24625 if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
24634 } else if this.inner().weak.load(Relaxed) != 1 {
24661 this.inner().strong.store(1, Release);
24678 /// the inner value when there are other pointers.
24703 // reference to the inner data.
24757 if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
24761 let unique = self.inner().strong.load(Acquire) == 1;
24766 self.inner().weak.store(1, Release); // release the lock
24780 /// [`Weak`], so we `drop` the inner value.
24806 if self.inner().strong.fetch_sub(1, Release) != 1 {
24838 acquire!(self.inner().strong);
25049 /// dropping of the inner value if successful.
25051 /// Returns [`None`] if the inner value has since been dropped.
25076 let inner = self.inner()?;
25082 let mut n = inner.strong.load(Relaxed);
25095 // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
25098 match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
25110 if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
25126 self.inner()
25127 .map(|inner| {
25128 let weak = inner.weak.load(SeqCst);
25129 let strong = inner.strong.load(SeqCst);
25147 fn inner(&self) -> Option<WeakInner<'_>> {
25224 let inner = if let Some(inner) = self.inner() {
25225 inner
25233 let old_size = inner.weak.fetch_add(1, Relaxed);
25300 let inner = if let Some(inner) = self.inner() { inner } else { return };
25302 if inner.weak.fetch_sub(1, Release) == 1 {
25303 acquire!(inner.weak);
25351 /// Two `Arc`s are equal if their inner values are equal, even if they are
25373 /// Two `Arc`s are unequal if their inner values are unequal.
25397 /// The two are compared by calling `partial_cmp()` on their inner values.
25415 /// The two are compared by calling `<` on their inner values.
25432 /// The two are compared by calling `<=` on their inner values.
25449 /// The two are compared by calling `>` on their inner values.
25466 /// The two are compared by calling `>=` on their inner values.
25485 /// The two are compared by calling `cmp()` on their inner values.
26883 inner: Weak<ZeroRefs>,
26886 let zero_refs = Rc::new_cyclic(|inner| {
26887 assert_eq!(inner.strong_count(), 0);
26888 assert!(inner.upgrade().is_none());
26889 ZeroRefs { inner: Weak::new() }
26894 assert_eq!(zero_refs.inner.strong_count(), 0);
26895 assert_eq!(zero_refs.inner.weak_count(), 0);
26901 inner: Weak<OneRef>,
26904 let one_ref = Rc::new_cyclic(|inner| {
26905 assert_eq!(inner.strong_count(), 0);
26906 assert!(inner.upgrade().is_none());
26907 OneRef { inner: inner.clone() }
26913 let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
26916 assert_eq!(one_ref.inner.strong_count(), 2);
26917 assert_eq!(one_ref.inner.weak_count(), 1);
26923 inner: Weak<TwoRefs>,
26927 let two_refs = Rc::new_cyclic(|inner| {
26928 assert_eq!(inner.strong_count(), 0);
26929 assert!(inner.upgrade().is_none());
26930 TwoRefs { inner: inner.clone(), inner1: inner.clone() }
26936 let two_ref3 = Weak::upgrade(&two_refs.inner).unwrap();
30113 let range = Range { inner: self.range.reborrow() };
30141 inner: Iter<'a, K, V>,
30159 inner: Iter<'a, K, V>,
30177 inner: IterMut<'a, K, V>,
30183 f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
30195 inner: IntoIter<K, V>,
30201 f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish()
30213 inner: IntoIter<K, V>,
30219 f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
30231 inner: LeafRange<marker::Immut<'a>, K, V>,
30249 inner: LeafRange<marker::ValMut<'a>, K, V>,
30258 let range = Range { inner: self.inner.reborrow() };
30839 Range { inner: root.reborrow().range_search(range) }
30841 Range { inner: LeafRange::none() }
30883 RangeMut { inner: root.borrow_valmut().range_search(range), _marker: PhantomData }
30885 RangeMut { inner: LeafRange::none(), _marker: PhantomData }
31010 DrainFilter { pred, inner: self.drain_filter_inner() }
31050 IntoKeys { inner: self.into_iter() }
31073 IntoValues { inner: self.into_iter() }
31318 self.inner.next().map(|(k, _)| k)
31322 self.inner.size_hint()
31341 self.inner.next_back().map(|(k, _)| k)
31348 self.inner.len()
31358 Keys { inner: self.inner.clone() }
31367 self.inner.next().map(|(_, v)| v)
31371 self.inner.size_hint()
31382 self.inner.next_back().map(|(_, v)| v)
31389 self.inner.len()
31399 Values { inner: self.inner.clone() }
31412 inner: DrainFilterInner<'a, K, V>,
31446 f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
31458 self.inner.next(&mut self.pred)
31462 self.inner.size_hint()
31515 if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) }
31536 self.inner.next().map(|(_, v)| v)
31540 self.inner.size_hint()
31551 self.inner.next_back().map(|(_, v)| v)
31558 self.inner.len()
31567 unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() }
31576 self.inner.next().map(|(k, _)| k)
31580 self.inner.size_hint()
31599 self.inner.next_back().map(|(k, _)| k)
31606 self.inner.len()
31618 self.inner.next().map(|(_, v)| v)
31622 self.inner.size_hint()
31633 self.inner.next_back().map(|(_, v)| v)
31640 self.inner.len()
31650 if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
31656 unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() }
31666 Range { inner: LeafRange { front: self.inner.front, back: self.inner.back } }
31675 if self.inner.is_empty() { None } else { Some(unsafe { self.next_unchecked() }) }
31693 unsafe { self.inner.front.as_mut().unwrap_unchecked().next_unchecked() }
31699 Range { inner: self.inner.reborrow() }
31706 if self.inner.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
31715 unsafe { self.inner.back.as_mut().unwrap_unchecked().next_back_unchecked() }
31851 Iter { range: Range { inner: full_range }, length: self.length }
31853 Iter { range: Range { inner: LeafRange::none() }, length: 0 }
31884 range: RangeMut { inner: full_range, _marker: PhantomData },
31889 range: RangeMut { inner: LeafRange::none(), _marker: PhantomData },
31913 Keys { inner: self.iter() }
31934 Values { inner: self.iter() }
31960 ValuesMut { inner: self.iter_mut() }
32296 inner: DifferenceInner<'a, T>,
32316 f.debug_tuple("Difference").field(&self.inner).finish()
32344 inner: IntersectionInner<'a, T>,
32364 f.debug_tuple("Intersection").field(&self.inner).finish()
32475 return Difference { inner: DifferenceInner::Iterate(self.iter()) };
32481 return Difference { inner: DifferenceInner::Iterate(self.iter()) };
32484 inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
32564 return Intersection { inner: IntersectionInner::Answer(None) };
32570 return Intersection { inner: IntersectionInner::Answer(None) };
32573 inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
33143 DrainFilter { pred, inner: self.map.drain_filter_inner() }
33265 inner: super::map::DrainFilterInner<'a, T, ()>,
33285 f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
33299 self.inner.next(&mut mapped_pred).map(|(k, _)| k)
33303 self.inner.size_hint()
33560 inner: match &self.inner {
33578 match &mut self.inner {
33605 let (self_len, other_len) = match &self.inner {
33662 inner: match &self.inner {
33679 match &mut self.inner {
33702 match &self.inner {
40019 pub(crate) inner: VecDeque<T>,
40025 f.debug_tuple("IntoIter").field(&self.inner).finish()
40035 self.inner.pop_front()
40040 let len = self.inner.len();
40055 let idx = self.inner.wrap_add(self.inner.tail, idx);
40056 self.inner.buffer_read(idx)
40065 self.inner.pop_back()
40072 self.inner.is_empty()
43117 IntoIter { inner: self }
45028 DrainSorted { inner: self }
45108 IntoIterSorted { inner: self }
45592 inner: BinaryHeap<T>,
45601 self.inner.pop()
45606 let exact = self.inner.len();
45674 inner: &'a mut BinaryHeap<T>,
45685 while self.0.inner.pop().is_some() {}
45689 while let Some(item) = self.inner.pop() {
45703 self.inner.pop()
45708 let exact = self.inner.len();
46708 let inner = iterator.as_inner().as_into_iter();
46710 inner.buf.as_ptr(),
46711 inner.ptr,
46712 inner.buf.as_ptr() as *mut T,
46713 inner.end as *const T,
46714 inner.cap,
46778 let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf };
46793 let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
48025 /// the inner vectors were not freed prior to the `set_len` call:
50094 pub(super) inner: *mut T,
50100 unsafe { self.dst.offset_from(self.inner) as usize }
50108 ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len()));
50998 /// is so that there is no conflict with a method on the inner type.
51043 /// is so that there is no conflict with a method on the inner type.
51103 /// is so that there is no conflict with a method on the inner type.
51124 /// is so that there is no conflict with a method on the inner type.
51620 // It's not possible to extract the inner Uniq directly from the Box,