Lines Matching defs:ptr

2031 use std::{mem, ptr};
2149 ptr::write_bytes(vp, 0, 1024);
6312 // ptr::offset because LLVM is the worst.
7610 let mut to_free: *mut Droppable = core::ptr::null_mut();
7658 let ptr = vec.as_ptr();
7661 assert_eq!(vec.as_ptr(), ptr);
7663 let ptr = &vec[1] as *const _;
7668 assert!(ptr != vec.as_ptr());
7991 // ptr::offset because LLVM is the worst.
8295 let null_raw_dyn = ptr_from_raw_parts(std::ptr::null_mut(), vtable);
8304 fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () {
8305 unsafe { std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable }
9774 // small rotations in large slice, uses ptr::copy
9809 // small rotations in large slice, uses ptr::copy
12370 // ptr::offset because LLVM is the worst.
13774 use std::ptr::NonNull;
13856 for &ptr in &pointers {
13858 (ptr.as_non_null_ptr().as_ptr() as usize) % align,
13865 for &ptr in &pointers {
13867 ptr.as_non_null_ptr(),
14009 use core::ptr::Unique;
14011 use core::ptr::{self, NonNull};
14032 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
14034 fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
14075 /// let ptr = alloc(layout);
14077 /// *(ptr as *mut u16) = 42;
14078 /// assert_eq!(*(ptr as *mut u16), 42);
14080 /// dealloc(ptr, layout);
14103 pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
14104 unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
14121 pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
14122 unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
14145 /// let ptr = alloc_zeroed(layout);
14147 /// assert_eq!(*(ptr as *mut u16), 0);
14149 /// dealloc(ptr, layout);
14167 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14168 Ok(NonNull::slice_from_raw_parts(ptr, size))
14177 ptr: NonNull<u8>,
14198 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
14199 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14203 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
14213 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
14214 self.deallocate(ptr, old_layout);
14235 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
14239 unsafe { dealloc(ptr.as_ptr(), layout) }
14246 ptr: NonNull<u8>,
14251 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
14257 ptr: NonNull<u8>,
14262 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
14268 ptr: NonNull<u8>,
14280 self.deallocate(ptr, old_layout);
14289 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
14290 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
14291 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
14301 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
14302 self.deallocate(ptr, old_layout);
14317 Ok(ptr) => ptr.as_mut_ptr(),
14329 pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
14331 let size = size_of_val(ptr.as_ref());
14332 let align = min_align_of_val(ptr.as_ref());
14334 alloc.deallocate(ptr.cast().into(), layout)
14431 use core::ptr::{self, NonNull, Unique};
14460 /// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
14472 ptr: Unique<T>,
14522 /// The `ptr` must be allocated (on the system heap), and with the given `capacity`.
14525 /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
14527 pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
14528 unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
14551 Self { ptr: Unique::dangling(), cap: 0, alloc }
14597 let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
14598 Box::from_raw_in(slice, ptr::read(&me.alloc))
14620 let ptr = match result {
14621 Ok(ptr) => ptr,
14626 ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
14627 cap: Self::capacity_from_bytes(ptr.len()),
14637 /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given
14641 /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is
14644 pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
14645 Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc }
14652 pub fn ptr(&self) -> *mut T {
14653 self.ptr.as_ptr()
14679 Some((self.ptr.cast().into(), layout))
14709 /// # use std::ptr;
14723 /// ptr::write(self.buf.ptr().add(self.len), x.clone());
14820 fn set_ptr(&mut self, ptr: NonNull<[u8]>) {
14821 self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
14822 self.cap = Self::capacity_from_bytes(ptr.len());
14853 let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
14854 self.set_ptr(ptr);
14872 let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
14873 self.set_ptr(ptr);
14880 let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
14883 let ptr = unsafe {
14885 self.alloc.shrink(ptr, layout, new_layout).map_err(|_| TryReserveError::AllocError {
14890 self.set_ptr(ptr);
14913 let memory = if let Some((ptr, old_layout)) = current_memory {
14918 alloc.grow(ptr, old_layout, new_layout)
14930 if let Some((ptr, layout)) = self.current_memory() {
14931 unsafe { self.alloc.deallocate(ptr, layout) }
15002 use core::ptr;
15679 let ptr = Arc::into_raw(arc.clone());
15680 let arc2 = unsafe { Arc::from_raw(ptr) };
15682 assert_eq!(unsafe { &*ptr }, "foo");
15687 let ptr = Arc::into_raw(arc.clone());
15688 let arc2 = unsafe { Arc::from_raw(ptr) };
15690 assert_eq!(unsafe { &*ptr }.to_string(), "123");
15720 let ptr = Weak::into_raw(weak.clone());
15721 let weak2 = unsafe { Weak::from_raw(ptr) };
15723 assert_eq!(unsafe { &*ptr }, "foo");
15729 let ptr = Weak::into_raw(weak.clone());
15730 let weak2 = unsafe { Weak::from_raw(ptr) };
15732 assert_eq!(unsafe { &*ptr }.to_string(), "123");
16222 use core::ptr;
16369 /// let ptr = story.as_mut_ptr();
16376 /// // We can re-build a String out of ptr, len, and capacity. This is all
16379 /// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ;
16827 /// let (ptr, len, cap) = s.into_raw_parts();
16829 /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) };
16873 /// let ptr = s.as_mut_ptr();
16877 /// let s = String::from_raw_parts(ptr, len, capacity);
17370 ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next);
17421 ptr::copy(
17488 ptr::copy(
17544 ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
17545 ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
19483 #[cfg(target_has_atomic = "ptr")]
19485 #[cfg(target_has_atomic = "ptr")]
19758 use core::ptr::{self, NonNull};
19794 ptr: NonNull<RcBox<T>>,
19814 unsafe { self.ptr.as_ref() }
19817 fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
19818 Self { ptr, phantom: PhantomData }
19821 unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
19822 Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
19884 let weak = Weak { ptr: init_ptr };
19896 ptr::write(ptr::addr_of_mut!((*inner).value), data);
20094 let val = ptr::read(&*this); // copy the contained object
20101 let _weak = Weak { ptr: this.ptr };
20167 ptr::slice_from_raw_parts_mut(mem as *mut T, len)
20210 Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
20251 unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
20274 let ptr = Self::as_ptr(&this);
20276 ptr
20297 let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
20302 unsafe { ptr::addr_of_mut!((*ptr).value) }
20343 pub unsafe fn from_raw(ptr: *const T) -> Self {
20344 let offset = unsafe { data_offset(ptr) };
20348 unsafe { (ptr as *mut RcBox<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) };
20368 debug_assert!(!is_dangling(this.ptr.as_ptr()));
20369 Weak { ptr: this.ptr }
20425 /// let ptr = Rc::into_raw(five);
20426 /// Rc::increment_strong_count(ptr);
20428 /// let five = Rc::from_raw(ptr);
20434 pub unsafe fn increment_strong_count(ptr: *const T) {
20436 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) };
20460 /// let ptr = Rc::into_raw(five);
20461 /// Rc::increment_strong_count(ptr);
20463 /// let five = Rc::from_raw(ptr);
20465 /// Rc::decrement_strong_count(ptr);
20471 pub unsafe fn decrement_strong_count(ptr: *const T) {
20472 unsafe { mem::drop(Rc::from_raw(ptr)) };
20544 unsafe { &mut (*this.ptr.as_ptr()).value }
20550 /// (in a vein similar to [`ptr::eq`]).
20565 /// [`ptr::eq`]: core::ptr::eq
20567 this.ptr.as_ptr() == other.ptr.as_ptr()
20643 ptr::write(this, rc.assume_init());
20651 unsafe { &mut this.ptr.as_mut().value }
20678 let ptr = self.ptr.cast::<RcBox<T>>();
20680 Ok(Rc::from_inner(ptr))
20700 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
20723 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
20728 let ptr = allocate(layout)?;
20731 let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
20735 ptr::write(&mut (*inner).strong, Cell::new(1));
20736 ptr::write(&mut (*inner).weak, Cell::new(1));
20743 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
20747 Layout::for_value(&*ptr),
20749 |mem| (ptr as *mut RcBox<T>).set_ptr_value(mem),
20760 let ptr = Self::allocate_for_ptr(bptr);
20763 ptr::copy_nonoverlapping(
20765 &mut (*ptr).value as *mut _ as *mut u8,
20772 Self::from_ptr(ptr)
20784 |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
20794 let ptr = Self::allocate_for_slice(v.len());
20795 ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
20796 Self::from_ptr(ptr)
20818 ptr::drop_in_place(slice);
20826 let ptr = Self::allocate_for_slice(len);
20828 let mem = ptr as *mut _ as *mut u8;
20829 let layout = Layout::for_value(&*ptr);
20832 let elems = &mut (*ptr).value as *mut [T] as *mut T;
20837 ptr::write(elems.add(i), item);
20844 Self::from_ptr(ptr)
20913 ptr::drop_in_place(Self::get_mut_unchecked(self));
20920 Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
20946 Self::from_inner(self.ptr)
21450 ptr: NonNull<RcBox<T>>,
21480 Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
21484 pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
21485 let address = ptr as *mut () as usize;
21506 /// use std::ptr;
21511 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
21521 /// [`null`]: core::ptr::null
21524 let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
21526 if is_dangling(ptr) {
21529 ptr as *const T
21534 unsafe { ptr::addr_of_mut!((*ptr).value) }
21615 pub unsafe fn from_raw(ptr: *const T) -> Self {
21618 let ptr = if is_dangling(ptr as *mut T) {
21620 ptr as *mut RcBox<T>
21623 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
21624 let offset = unsafe { data_offset(ptr) };
21627 unsafe { (ptr as *mut RcBox<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) }
21631 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
21664 Some(Rc::from_inner(self.ptr))
21684 inner.weak() - 1 // subtract the implicit weak ptr
21696 if is_dangling(self.ptr.as_ptr()) {
21703 let ptr = self.ptr.as_ptr();
21704 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
21710 /// [`ptr::eq`]), or if both don't point to any allocation
21749 /// [`ptr::eq`]: core::ptr::eq
21753 self.ptr.as_ptr() == other.ptr.as_ptr()
21791 Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
21815 Weak { ptr: self.ptr }
21957 unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
21964 unsafe { data_offset_align(align_of_val_raw(ptr)) }
22059 use core::ptr;
22185 // ptr::copy_to_non_overlapping below.
22517 ptr::copy_nonoverlapping(
22538 ptr::copy_nonoverlapping(
22832 let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
22845 ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
22851 ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
22867 ptr::copy_nonoverlapping(self.src, self.dest, 1);
22910 ptr::copy_nonoverlapping(v, buf, mid);
22928 ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
22934 ptr::copy_nonoverlapping(v_mid, buf, len - mid);
22952 ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
22959 unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
22960 let old = *ptr;
22961 *ptr = unsafe { ptr.offset(1) };
22965 unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
22966 *ptr = unsafe { ptr.offset(-1) };
22967 *ptr
22982 ptr::copy_nonoverlapping(self.start, self.dest, len);
23290 use core::ptr::{self, NonNull};
23496 ptr: NonNull<ArcInner<T>>,
23512 fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
23513 Self { ptr, phantom: PhantomData }
23516 unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
23517 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
23549 ptr: NonNull<ArcInner<T>>,
23643 let weak = Weak { ptr: init_ptr };
23657 ptr::write(ptr::addr_of_mut!((*inner).data), data);
23873 let elem = ptr::read(&this.ptr.as_ref().data);
23876 let _weak = Weak { ptr: this.ptr };
23940 ptr::slice_from_raw_parts_mut(mem as *mut T, len)
23983 Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
24024 unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
24045 let ptr = Self::as_ptr(&this);
24047 ptr
24068 let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
24073 unsafe { ptr::addr_of_mut!((*ptr).data) }
24114 pub unsafe fn from_raw(ptr: *const T) -> Self {
24116 let offset = data_offset(ptr);
24119 let arc_ptr = (ptr as *mut ArcInner<T>).set_ptr_value((ptr as *mut u8).offset(-offset));
24160 debug_assert!(!is_dangling(this.ptr.as_ptr()));
24161 return Weak { ptr: this.ptr };
24240 /// let ptr = Arc::into_raw(five);
24241 /// Arc::increment_strong_count(ptr);
24245 /// let five = Arc::from_raw(ptr);
24251 pub unsafe fn increment_strong_count(ptr: *const T) {
24253 let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
24277 /// let ptr = Arc::into_raw(five);
24278 /// Arc::increment_strong_count(ptr);
24282 /// let five = Arc::from_raw(ptr);
24284 /// Arc::decrement_strong_count(ptr);
24290 pub unsafe fn decrement_strong_count(ptr: *const T) {
24291 unsafe { mem::drop(Arc::from_raw(ptr)) };
24301 unsafe { self.ptr.as_ref() }
24309 unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
24312 drop(Weak { ptr: self.ptr });
24318 /// (in a vein similar to [`ptr::eq`]).
24333 /// [`ptr::eq`]: core::ptr::eq
24335 this.ptr.as_ptr() == other.ptr.as_ptr()
24352 // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
24374 // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
24378 let ptr = allocate(layout)?;
24381 let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr());
24385 ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
24386 ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
24393 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
24397 Layout::for_value(&*ptr),
24399 |mem| (ptr as *mut ArcInner<T>).set_ptr_value(mem) as *mut ArcInner<T>,
24410 let ptr = Self::allocate_for_ptr(bptr);
24413 ptr::copy_nonoverlapping(
24415 &mut (*ptr).data as *mut _ as *mut u8,
24422 Self::from_ptr(ptr)
24434 |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
24444 let ptr = Self::allocate_for_slice(v.len());
24446 ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
24448 Self::from_ptr(ptr)
24470 ptr::drop_in_place(slice);
24478 let ptr = Self::allocate_for_slice(len);
24480 let mem = ptr as *mut _ as *mut u8;
24481 let layout = Layout::for_value(&*ptr);
24484 let elems = &mut (*ptr).data as *mut [T] as *mut T;
24489 ptr::write(elems.add(i), item);
24496 Self::from_ptr(ptr)
24564 Self::from_inner(self.ptr)
24649 let _weak = Weak { ptr: this.ptr };
24656 ptr::write(this, arc.assume_init());
24742 unsafe { &mut (*this.ptr.as_ptr()).data }
24872 let ptr = self.ptr.cast::<ArcInner<T>>();
24874 Ok(Arc::from_inner(ptr))
24897 Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
24918 /// use std::ptr;
24923 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
24933 /// [`null`]: core::ptr::null
24936 let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
24938 if is_dangling(ptr) {
24941 ptr as *const T
24946 unsafe { ptr::addr_of_mut!((*ptr).data) }
25027 pub unsafe fn from_raw(ptr: *const T) -> Self {
25030 let ptr = if is_dangling(ptr as *mut T) {
25032 ptr as *mut ArcInner<T>
25035 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
25036 let offset = unsafe { data_offset(ptr) };
25039 unsafe { (ptr as *mut ArcInner<T>).set_ptr_value((ptr as *mut u8).offset(-offset)) }
25043 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
25099 Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
25148 if is_dangling(self.ptr.as_ptr()) {
25155 let ptr = self.ptr.as_ptr();
25156 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
25162 /// [`ptr::eq`]), or if both don't point to any allocation
25201 /// [`ptr::eq`]: core::ptr::eq
25205 self.ptr.as_ptr() == other.ptr.as_ptr()
25227 return Weak { ptr: self.ptr };
25240 Weak { ptr: self.ptr }
25304 unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) }
25786 unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
25793 unsafe { data_offset_align(align_of_val_raw(ptr)) }
26561 let ptr = Rc::into_raw(rc.clone());
26562 let rc2 = unsafe { Rc::from_raw(ptr) };
26564 assert_eq!(unsafe { &*ptr }, "foo");
26569 let ptr = Rc::into_raw(rc.clone());
26570 let rc2 = unsafe { Rc::from_raw(ptr) };
26572 assert_eq!(unsafe { &*ptr }.to_string(), "123");
26602 let ptr = Weak::into_raw(weak.clone());
26603 let weak2 = unsafe { Weak::from_raw(ptr) };
26605 assert_eq!(unsafe { &*ptr }, "foo");
26611 let ptr = Weak::into_raw(weak.clone());
26612 let weak2 = unsafe { Weak::from_raw(ptr) };
26614 assert_eq!(unsafe { &*ptr }.to_string(), "123");
26965 use core::ptr::NonNull;
27263 // Fix the head ptr of the second part
27303 // Fix the tail ptr of the first part
28938 // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
29437 use std::ptr;
29502 assert!(ptr::eq(self.1, other.1));
29510 assert!(ptr::eq(self.1, other.1));
29801 use core::ptr;
30001 let root = ptr::read(&subtree.root);
31238 unsafe { ptr::read(&this.front).deallocating_end() }
32014 use core::ptr::NonNull;
32027 ptr: NonNull<T>,
32039 let ptr = NonNull::from(t);
32042 let new_ref = unsafe { &mut *ptr.as_ptr() };
32043 (new_ref, Self { ptr, _marker: PhantomData })
32054 unsafe { &mut *self.ptr.as_ptr() }
32062 use core::ptr;
32087 let value = unsafe { ptr::read(v) };
32090 ptr::write(v, new_value);
33751 use core::ptr;
33810 let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
33898 let self2 = unsafe { ptr::read(&self) };
33910 let self2 = unsafe { ptr::read(&self) };
34003 let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
34004 let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
34031 let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
34032 let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
34095 (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
34110 (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
37957 use core::ptr::{self, NonNull};
37996 ptr::addr_of_mut!((*this).parent).write(None);
37997 ptr::addr_of_mut!((*this).len).write(0);
38038 LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
38175 /// Returns a raw ptr to avoid invalidating other references to this node.
38185 let ptr = Self::as_internal_ptr(self);
38186 unsafe { &mut *ptr }
38217 /// Returns a raw ptr to avoid invalidating other references to this node.
38293 let ptr = Self::as_leaf_ptr(&self);
38295 unsafe { &*ptr }
38348 let ptr = Self::as_leaf_ptr(self);
38350 unsafe { &mut *ptr }
38355 let ptr = Self::as_leaf_ptr(&mut self);
38357 unsafe { &mut *ptr }
38415 let keys = unsafe { ptr::addr_of!((*leaf).keys) };
38416 let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
38804 let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
38807 child.set_parent_link(ptr, idx);
38877 (InsertResult::Fit(handle), ptr) => {
38878 return (InsertResult::Fit(handle.forget_node_type()), ptr);
39067 let self1 = unsafe { ptr::read(&self) };
39068 let self2 = unsafe { ptr::read(&self) };
39093 match unsafe { ptr::read(&self) }.ascend() {
39096 parent: unsafe { ptr::read(&left_parent_kv) },
39102 parent: unsafe { ptr::read(&right_parent_kv) },
39574 ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
39591 ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
39603 ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
39614 ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
39624 ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
40112 use core::ptr::{self, NonNull};
40214 self.iter.next().map(|elt| unsafe { ptr::read(elt) })
40227 self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
40236 use core::ptr::{self};
40279 ptr::slice_from_raw_parts_mut(self.as_mut_ptr().wrapping_add(from), len)
40284 let ptr = self.as_mut_ptr();
40287 ptr::slice_from_raw_parts_mut(ptr, mid),
40288 ptr::slice_from_raw_parts_mut(ptr.wrapping_add(mid), len - mid),
40374 use core::ptr::{self, NonNull};
40483 ptr::drop_in_place(self.0);
40492 ptr::drop_in_place(front);
40510 fn ptr(&self) -> *mut T {
40511 self.buf.ptr()
40525 /// Turn ptr into a slice
40528 unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
40531 /// Turn ptr into a mut slice
40534 unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
40540 unsafe { ptr::read(self.ptr().add(off)) }
40547 ptr::write(self.ptr().add(off), value);
40598 ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
40622 ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
40859 unsafe { Some(&*self.ptr().add(idx)) }
40888 unsafe { Some(&mut *self.ptr().add(idx)) }
40923 unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) }
41228 ptr::drop_in_place(self.0);
41250 ptr::drop_in_place(drop_back);
41259 ptr::drop_in_place(drop_front);
41308 ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()),
41501 ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()),
41547 // Drain will ptr::read out the values to remove.
42342 ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first);
42345 ptr::copy_nonoverlapping(
42347 other.ptr().add(amount_in_first),
42355 ptr::copy_nonoverlapping(
42357 other.ptr(),
42557 let buf = self.buf.ptr();
42572 ptr::copy(buf, buf.add(tail_len), self.head);
42574 ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len);
42594 ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len);
42596 ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head);
42626 ptr::swap(buf.add(i), buf.offset(src));
43251 /// let ptr = deque.as_slices().0.as_ptr();
43254 /// assert_eq!(vec.as_ptr(), ptr);
43260 /// let ptr = deque.as_slices().1.as_ptr();
43263 /// assert_eq!(vec.as_ptr(), ptr);
43270 let buf = other.buf.ptr();
43275 ptr::copy(buf.add(other.tail), buf, len);
44417 use core::ptr;
44775 // It is safe to access index 0 (i.e. `ptr`), because
44778 let ptr = self.data.as_mut_ptr();
44779 ptr::swap(ptr, ptr.add(end));
45399 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
45432 let ptr = self.data.as_mut_ptr();
45433 let index_ptr: *const _ = ptr.add(index);
45434 let hole_ptr = ptr.add(self.pos);
45435 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
45447 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
45854 use core::ptr::{self, NonNull};
45877 pub(super) ptr: *const T,
45902 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
45931 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
45941 /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
45950 self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
45951 self.ptr = self.buf.as_ptr();
45955 ptr::drop_in_place(remaining);
45978 if self.ptr as *const _ == self.end {
45981 // purposefully don't use 'ptr.offset' because for
45984 self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
45989 let old = self.ptr;
45990 self.ptr = unsafe { self.ptr.offset(1) };
45992 Some(unsafe { ptr::read(old) })
45999 (self.end as usize).wrapping_sub(self.ptr as usize)
46001 unsafe { self.end.offset_from(self.ptr) as usize }
46016 // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
46024 if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
46033 if self.end == self.ptr {
46036 // See above for why 'ptr.offset' isn't used
46044 Some(unsafe { ptr::read(self.end) })
46052 self.ptr == self.end
46064 // T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
46094 let alloc = ptr::read(&self.0.alloc);
46104 ptr::drop_in_place(guard.0.as_raw_mut_slice());
46139 use core::ptr::{self};
46174 let mut ptr = self.as_mut_ptr().add(self.len());
46177 ptr::write(ptr, element);
46178 ptr = ptr.offset(1);
46199 iterator.ptr = iterator.end;
46224 use core::ptr::{self};
46249 ptr::write_bytes(v.as_mut_ptr(), elem as u8, n);
46264 ptr::write_bytes(v.as_mut_ptr(), elem, n);
46286 use core::ptr::{self, NonNull};
46367 self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
46379 self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
46405 ptr::copy(src, dst, self.0.tail_len);
46438 use core::ptr::{self};
46465 ptr::write(vector.as_mut_ptr(), element);
46568 use core::ptr::{self};
46616 let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr;
46621 ptr::copy(it.ptr, it.buf.as_ptr(), it.len());
46666 use core::ptr::{self};
46711 inner.ptr,
46727 if src.ptr != src_ptr {
46729 unsafe { dst_buf.add(len) as *const _ } <= src.ptr,
46754 ptr::write(sink.dst, item);
46801 ptr::write(dst, self.__iterator_get_unchecked(i));
46873 use core::ptr::{self, NonNull};
47102 /// ptr len capacity
47260 /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
47262 /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
47277 /// The ownership of `ptr` is effectively transferred to the
47289 /// use std::ptr;
47307 /// ptr::write(p.offset(i), 4 + i);
47317 pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
47318 unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) }
47398 /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
47400 /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
47415 /// The ownership of `ptr` is effectively transferred to the
47431 /// use std::ptr;
47453 /// ptr::write(p.offset(i), 4 + i);
47463 pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
47464 unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } }
47488 /// let (ptr, len, cap) = v.into_raw_parts();
47493 /// let ptr = ptr as *mut u32;
47495 /// Vec::from_raw_parts(ptr, len, cap)
47531 /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
47536 /// let ptr = ptr as *mut u32;
47538 /// Vec::from_raw_parts_in(ptr, len, cap, alloc)
47548 let ptr = me.as_mut_ptr();
47549 let alloc = unsafe { ptr::read(me.allocator()) };
47550 (ptr, len, capacity, alloc)
47778 let buf = ptr::read(&me.buf);
47843 let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
47845 ptr::drop_in_place(s);
47913 let ptr = self.buf.ptr();
47915 assume(!ptr.is_null());
47917 ptr
47949 let ptr = self.buf.ptr();
47951 assume(!ptr.is_null());
47953 ptr
48087 let last = ptr::read(self.as_ptr().add(len - 1));
48090 ptr::replace(hole, last)
48135 ptr::copy(p, p.offset(1), len - index);
48138 ptr::write(p, element);
48175 let ptr = self.as_mut_ptr().add(index);
48178 ret = ptr::read(ptr);
48181 ptr::copy(ptr.offset(1), ptr, len - index - 1);
48245 ptr::copy(
48269 unsafe { ptr::drop_in_place(cur) };
48278 ptr::copy_nonoverlapping(cur, hole_slot, 1);
48361 let ptr = self.vec.as_mut_ptr();
48369 let dropped_ptr = ptr.add(self.write);
48371 let valid_ptr = ptr.add(self.read);
48375 ptr::copy(valid_ptr, dropped_ptr, items_left);
48387 let ptr = gap.vec.as_mut_ptr();
48396 let read_ptr = ptr.add(gap.read);
48397 let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
48401 ptr::drop_in_place(read_ptr);
48403 let write_ptr = ptr.add(gap.write);
48408 ptr::copy(read_ptr, write_ptr, 1);
48448 ptr::write(end, value);
48471 Some(ptr::read(self.as_ptr().add(self.len())))
48506 unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
48546 // Drain will ptr::read out the values to remove.
48671 ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
48856 let Range { start: ptr, end: spare_ptr } = self.as_mut_ptr_range();
48861 // - `ptr` is guaranteed to be valid for `len` elements
48864 let initialized = slice::from_raw_parts_mut(ptr, self.len);
49004 let mut ptr = self.as_mut_ptr().add(self.len());
49006 // may not realize the store through `ptr` through self.set_len()
49012 ptr::write(ptr, value.next());
49013 ptr = ptr.offset(1);
49020 ptr::write(ptr, value.last());
49113 unsafe { ptr::copy_nonoverlapping(source.as_ptr(), spare.as_mut_ptr() as _, count) };
49238 let alloc = ptr::read(me.allocator());
49251 ptr: begin,
49314 ptr::write(self.as_mut_ptr().add(len), element);
49475 ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
49702 let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
49735 use core::ptr::{self};
49813 return Some(ptr::read(&v[i]));
49818 ptr::copy_nonoverlapping(src, dst, 1);
49856 let ptr = self.drain.vec.as_mut_ptr();
49857 let src = ptr.add(self.drain.idx);
49956 use core::ptr::{self};
50064 unsafe { ptr::write(place, new_item) };
50083 ptr::copy(src, dst, self.tail_len);
50088 use core::ptr::{self};
50108 ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len()));
50151 let ptr =
50154 let mut i = ptr.as_non_null_ptr().as_ptr();
50160 Global.deallocate(ptr.as_non_null_ptr(), layout);
50240 //! be used is to use [`ptr::NonNull::dangling`].
50301 //! [valid]: ptr#safety
50319 use core::ptr::{self, Unique};
50595 let ptr = alloc.allocate(layout)?.cast();
50596 unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
50657 let ptr = alloc.allocate_zeroed(layout)?.cast();
50658 unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
50904 /// let ptr = Box::into_raw(x);
50905 /// let x = unsafe { Box::from_raw(ptr) };
50912 /// let ptr = alloc(Layout::new::<i32>()) as *mut i32;
50914 /// // the (uninitialized) previous contents of `ptr`, though for this
50915 /// // simple example `*ptr = 5` would have worked as well.
50916 /// ptr.write(5);
50917 /// let x = Box::from_raw(ptr);
50956 /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
50957 /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
50966 /// let ptr = System.allocate(Layout::new::<i32>())?.as_mut_ptr() as *mut i32;
50968 /// // the (uninitialized) previous contents of `ptr`, though for this
50969 /// // simple example `*ptr = 5` would have worked as well.
50970 /// ptr.write(5);
50971 /// let x = Box::from_raw_in(ptr, System);
51005 /// let ptr = Box::into_raw(x);
51006 /// let x = unsafe { Box::from_raw(ptr) };
51012 /// use std::ptr;
51017 /// ptr::drop_in_place(p);
51054 /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
51055 /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
51063 /// use std::ptr::{self, NonNull};
51066 /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
51068 /// ptr::drop_in_place(ptr);
51069 /// let non_null = NonNull::new_unchecked(ptr);
51095 let alloc = unsafe { ptr::read(&b.1) };
51406 ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
51622 let ptr: *const T = &**self;
51623 fmt::Pointer::fmt(&ptr, f)
51902 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
51903 unsafe { Global.deallocate(ptr, layout) }